@@ -57,7 +57,9 @@ class AccessReportLine:
57
57
58
58
class AccessReport (List [AccessReportLine ]):
59
59
def __str__ (self ):
60
- return "\n " .join (f"{ line .name } \n { line .status } : { line .message } " for line in self )
60
+ return "\n " .join (
61
+ f"{ line .name } \n { line .status } : { line .message } " for line in self
62
+ )
61
63
62
64
def add_boto_method_call (
63
65
self ,
@@ -95,15 +97,15 @@ def __str__(self):
95
97
96
98
97
99
class ProjectError (Error ):
98
- code : str = Field ( "Project Error" , const = True )
100
+ code : str = "Project Error"
99
101
100
102
101
103
class RecommendationError (Error ):
102
- code : str = Field ( "Recommendation Error" , const = True )
104
+ code : str = "Recommendation Error"
103
105
104
106
105
107
class SubmissionError (Error ):
106
- code : str = Field ( "Submission Error" , const = True )
108
+ code : str = "Submission Error"
107
109
108
110
109
111
@unique
@@ -139,13 +141,13 @@ class AzureDatabricksClusterReport(DatabricksClusterReport):
139
141
140
142
141
143
class DatabricksError (Error ):
142
- code : str = Field ( "Databricks Error" , const = True )
144
+ code : str = "Databricks Error"
143
145
144
146
145
147
class MissingOrIncompleteEventlogError (Error ):
146
148
dbfs_eventlog_file_size : Union [int , None ] = None
147
- code : str = Field ( "Retryable Databricks Error" , const = True )
148
- message : str = Field ( "Event log was missing or incomplete. Please retry." , const = True )
149
+ code : str = "Retryable Databricks Error"
150
+ message : str = "Event log was missing or incomplete. Please retry."
149
151
150
152
151
153
class DatabricksAPIError (Error ):
@@ -162,8 +164,8 @@ def validate_error(cls, values):
162
164
163
165
164
166
class Response (GenericModel , Generic [DataType ]):
165
- result : Union [DataType , None ]
166
- error : Union [Error , None ]
167
+ result : Union [DataType , None ] = None
168
+ error : Union [Error , None ] = None
167
169
168
170
@validator ("error" , always = True )
169
171
def check_consistency (cls , err , values ):
@@ -265,9 +267,13 @@ class AwsRegionEnum(str, Enum):
265
267
"Statement" : [
266
268
{
267
269
"Effect" : "Allow" ,
268
- "Principal" : {"AWS" : "arn:aws:iam::533267411813:role/sync-computing-collector" },
270
+ "Principal" : {
271
+ "AWS" : "arn:aws:iam::533267411813:role/sync-computing-collector"
272
+ },
269
273
"Action" : "sts:AssumeRole" ,
270
- "Condition" : {"StringEquals" : {"sts:ExternalId" : "PLACEHOLDER_EXTERNAL_ID" }},
274
+ "Condition" : {
275
+ "StringEquals" : {"sts:ExternalId" : "PLACEHOLDER_EXTERNAL_ID" }
276
+ },
271
277
}
272
278
],
273
279
}
@@ -276,15 +282,15 @@ class AwsRegionEnum(str, Enum):
276
282
class AwsHostedIAMInstructions (BaseModel ):
277
283
step_1_prompt : str = "Step 1: Copy the JSON and paste in AWS IAM Permissions page:"
278
284
step_1_value : str = json .dumps (IAMRoleRequiredPermissions )
279
- step_2_prompt : str = (
280
- "Step 2: Copy the JSON and paste in AWS IAM Trust relationships page with External ID:"
281
- )
285
+ step_2_prompt : str = "Step 2: Copy the JSON and paste in AWS IAM Trust relationships page with External ID:"
282
286
external_id : str
283
287
284
288
@property
285
289
def step_2_value (self ) -> str :
286
290
policy = copy .deepcopy (IAMRoleTrustPolicy )
287
- policy ["Statement" ][0 ]["Condition" ]["StringEquals" ]["sts:ExternalId" ] = self .external_id
291
+ policy ["Statement" ][0 ]["Condition" ]["StringEquals" ]["sts:ExternalId" ] = (
292
+ self .external_id
293
+ )
288
294
return json .dumps (policy )
289
295
290
296
@@ -299,16 +305,26 @@ class ComputeProviderHostedValues(BaseModel):
299
305
class CreateWorkspaceConfig (BaseModel ):
300
306
workspace_id : str = Field (..., description = "Unique identifier for the workspace" )
301
307
databricks_host : str = Field (..., description = "Databricks service host URL" )
302
- databricks_token : str = Field (..., description = "Authentication token for Databricks service" )
303
- sync_api_key_id : str = Field (..., description = "API Key ID for synchronization service" )
304
- sync_api_key_secret : str = Field (..., description = "API Key secret for synchronization service" )
305
- instance_profile_arn : Optional [str ] = Field (None , description = "AWS instance profile ARN" )
308
+ databricks_token : str = Field (
309
+ ..., description = "Authentication token for Databricks service"
310
+ )
311
+ sync_api_key_id : str = Field (
312
+ ..., description = "API Key ID for synchronization service"
313
+ )
314
+ sync_api_key_secret : str = Field (
315
+ ..., description = "API Key secret for synchronization service"
316
+ )
317
+ instance_profile_arn : Optional [str ] = Field (
318
+ None , description = "AWS instance profile ARN"
319
+ )
306
320
webhook_id : Optional [str ] = Field (None , description = "Webhook ID for notifications" )
307
321
databricks_plan_type : DatabricksPlanType = Field (
308
322
DatabricksPlanType .STANDARD , description = "Plan type for Databricks deployment"
309
323
)
310
324
aws_region : Optional [str ] = Field (None , description = "AWS region if applicable" )
311
- cluster_policy_id : Optional [str ] = Field (None , description = "Cluster policy ID for Databricks" )
325
+ cluster_policy_id : Optional [str ] = Field (
326
+ None , description = "Cluster policy ID for Databricks"
327
+ )
312
328
collection_type : WorkspaceCollectionTypeEnum = Field (
313
329
..., description = "Type of hosting for the workspace"
314
330
)
@@ -318,10 +334,18 @@ class CreateWorkspaceConfig(BaseModel):
318
334
compute_provider : ComputeProvider = Field (
319
335
..., description = "Cloud provider for compute resources"
320
336
)
321
- external_id : Optional [str ] = Field (None , description = "External ID for AWS configurations" )
322
- aws_iam_role_arn : Optional [str ] = Field (None , description = "AWS IAM role ARN if needed" )
323
- azure_tenant_id : Optional [str ] = Field (None , description = "Azure tenant ID if using Azure" )
324
- azure_client_id : Optional [str ] = Field (None , description = "Azure client ID if using Azure" )
337
+ external_id : Optional [str ] = Field (
338
+ None , description = "External ID for AWS configurations"
339
+ )
340
+ aws_iam_role_arn : Optional [str ] = Field (
341
+ None , description = "AWS IAM role ARN if needed"
342
+ )
343
+ azure_tenant_id : Optional [str ] = Field (
344
+ None , description = "Azure tenant ID if using Azure"
345
+ )
346
+ azure_client_id : Optional [str ] = Field (
347
+ None , description = "Azure client ID if using Azure"
348
+ )
325
349
azure_client_secret : Optional [str ] = Field (
326
350
None , description = "Azure client secret if using Azure"
327
351
)
@@ -352,7 +376,9 @@ def check_aws_iam_role_arn(cls, aws_iam_role_arn, values):
352
376
compute_provider = values .get ("compute_provider" )
353
377
if values .get ("collection_type" ) == WorkspaceCollectionTypeEnum .HOSTED :
354
378
if compute_provider == ComputeProvider .AWS and not aws_iam_role_arn :
355
- raise ValueError ("AWS IAM Role ARN is required for AWS compute provider" )
379
+ raise ValueError (
380
+ "AWS IAM Role ARN is required for AWS compute provider"
381
+ )
356
382
return aws_iam_role_arn
357
383
358
384
@validator ("compute_provider" , pre = False )
@@ -367,7 +393,9 @@ def check_azure_hosted_fields(cls, compute_provider, values):
367
393
"azure_client_secret" ,
368
394
"azure_subscription_id" ,
369
395
]
370
- missing_fields = [field for field in required_fields if not values .get (field )]
396
+ missing_fields = [
397
+ field for field in required_fields if not values .get (field )
398
+ ]
371
399
if missing_fields :
372
400
raise ValueError (
373
401
f"Missing required fields for Azure compute provider: "
0 commit comments