diff --git a/README.md b/README.md index 3b58aba..515856c 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,25 @@ It will be helpful to have a working knowledge of AWS services and the AWS Conso By default the database will not have a public IP address and will not be accessible outside of your VPC. You will need to configure network access to the database as appropriate for your situation. +### If you want to use the existing RDS cluster + +If you want to use the existing RDS cluster, you need to create the following AWS resources: +1. One parameter in the AWS System Manager Parameter Store for Canvas Data 2 DAP Client ID +2. One parameter in the AWS System Manager Parameter Store for Canvas Data 2 DAP Client Secret + +You also need the following information: +1. RDS Database Subnets where your existing RDS cluster is located. + - Parameter Name: `DatabaseSubnetListParameter` +2. Database Admin user name for the existing RDS cluster + - Parameter Name: `DatabaseAdminUserParameter` +3. The ARN of the RDS cluster + - Parameter Name: `DatabaseClusterArnParameter` +4. The ARN of the Database Admin Secret + - Parameter Name: `DatabaseAdminUserParameter` +5. Database Security Group Name (ex: sg-.....) + - Parameter Name: `DatabaseSecurityGroupParameter` + + ## Deploying the application The Serverless Application Model Command Line Interface (SAM CLI) is an extension of the AWS CLI that adds functionality for building and testing Lambda applications. It uses Docker to run your functions in an Amazon Linux environment that matches Lambda. It can also emulate your application's build environment and API. @@ -62,7 +81,7 @@ Deploying this application will create: - An Aurora Postgres cluster - A database user credential in AWS Secrets Manager. -In order for the application to use that credential to connect to the database, a database user must be created and granted appropriate privileges. A helper script is included that will take care of this setup. +In order for the application to use that credential to connect to the database, a database user must be created and granted appropriate privileges. A helper script is included that will take care of this setup. After deploying the SAM app, run this script. You must have valid AWS credentials before running the script. @@ -71,6 +90,22 @@ pip install setup/requirements.txt -r ./setup/prepare_aurora_db.py --stack-name ``` +#### (Optional) If you are creating a new database user and schema for an additional Canvas instance: +1. If you create a new cloudformation stack for an additional Canvas instance, you need to modify `secret_name_prefix` so that it can target the correct secrets for the RDS database credential for the new DB user. + +2. When you run the `prepare_aurora_db.py` script, add `--is-additional-stack` argument. + +``` +pip install setup/requirements.txt -r +./setup/prepare_aurora_db.py --stack-name --is-additional-stack +``` + +3. You need to grant the access to the new schema for the database user `athena`. +- You need to run the following queries in order for the Athena connector to access all tables in the new schema: + - `GRANT SELECT ON ALL TABLES IN SCHEMA catalog TO athena;` + - `GRANT USAGE ON SCHEMA catalog TO athena;` + - `ALTER DEFAULT PRIVILEGES IN SCHEMA catalog GRANT SELECT ON TABLES TO athena;`: This allows any new tables created in catalog automatically give SELECT to the DB user `athena`. + Occasionally the schema for a CD2 table will change. The DAP library will take care of applying these changes to the database, but they will not succeed if you have created views that depend on the table. To handle this situation, the `sync_table` Lambda function will attempt to drop and recreate any views that depend on the table being synced. The pgsql functions necessary to do this can be found in this repository: https://github.com/rvkulikov/pg-deps-management. You will need to run the `ddl.sql` script in your database to create the necessary functions. (details tbd) ## Configuration diff --git a/init_table/app.py b/init_table/app.py index ca07609..697fd02 100644 --- a/init_table/app.py +++ b/init_table/app.py @@ -22,9 +22,10 @@ logger = Logger() env = os.environ.get('ENV', 'dev') +ssm_parameter_name = os.environ.get('SSM_PARAMETER_NAME', 'canvas_data_2') db_user_secret_name = os.environ.get('DB_USER_SECRET_NAME') -param_path = f'/{env}/canvas_data_2' +param_path = f'/{env}/{ssm_parameter_name}' api_base_url = os.environ.get('API_BASE_URL', 'https://api-gateway.instructure.com') @@ -95,7 +96,7 @@ async def init_table(credentials, api_base_url, db_connection, namespace, table_ if token: stepfunctions.send_task_success( taskToken=token, - output=json.dumps(payload)) + output=json.dumps(payload)) """ if token and result['state'] == 'complete': diff --git a/list_tables/app.py b/list_tables/app.py index 3f91d5d..42dbd59 100644 --- a/list_tables/app.py +++ b/list_tables/app.py @@ -17,12 +17,14 @@ logger = Logger() env = os.environ.get('ENV', 'dev') +ssm_parameter_name = os.environ.get('SSM_PARAMETER_NAME', 'canvas_data_2') +db_user = os.environ.get('DB_CD2_USER', 'canvas') -param_path = f'/{env}/canvas_data_2' +param_path = f'/{env}/{ssm_parameter_name}' api_base_url = os.environ.get('API_BASE_URL', 'https://api-gateway.instructure.com') -namespace = 'canvas' +namespace = db_user REGION = os.environ["AWS_REGION"] SLACK_WEBHOOK_URL_SECRET_NAME = os.getenv("SLACK_WEBHOOK_SECRET_NAME") diff --git a/setup/prepare_aurora_db.py b/setup/prepare_aurora_db.py index 35a3b0b..3813bcc 100644 --- a/setup/prepare_aurora_db.py +++ b/setup/prepare_aurora_db.py @@ -12,6 +12,12 @@ help="The name of the Canvas Data 2 CloudFormation stack containing the Aurora database", required=True, ) +parser.add_argument( + "--is-additional-stack", + help="Whether this is for the DB changes for the additional CD2 stack", + action="store_true", # If specified, sets the value as True + default=False # Default is False when not passed +) args = parser.parse_args() console = Console() @@ -22,6 +28,8 @@ cf_resource = boto3.resource("cloudformation") stack = cf_resource.Stack(args.stack_name) +is_additional_stack = args.is_additional_stack + console.print("Starting database preparation", style="bold green") # Fetch stack outputs and parameters @@ -91,7 +99,7 @@ def create_user(username, password, database_name): def create_schema(schema_name, username, database_name): """Create a schema with user as owner""" try: - create_schema_sql = f"CREATE SCHEMA IF NOT EXISTS {username} AUTHORIZATION {username}" + create_schema_sql = f"CREATE SCHEMA IF NOT EXISTS {schema_name} AUTHORIZATION {username}" execute_statement(create_schema_sql, database_name) console.print(f" - Created schema {schema_name} with owner {username}", style="bold green") except ClientError as e: @@ -127,6 +135,27 @@ def grant_user_to_admin(username, admin_username, database_name): except ClientError as e: console.print(f" ! Error granting user {username} to user {admin_username}: {e}", style="bold red") +def grant_create_permission_on_db_to_db_user(username, database_name): + """Grant CREATE permission on the database to the DB user""" + try: + grant_create_permission_sql = f"GRANT CREATE ON DATABASE {database_name} TO {username}" + execute_statement(grant_create_permission_sql, database_name) + console.print(f" - Granted CREATE permission on the database {database_name} to user {username}", style="bold green") + except ClientError as e: + console.print(f" ! Error granting CREATE permission on the database {database_name} to user {username}: {e}", style="bold red") + +def grant_access_permission_on_instructure_dap_schema_to_db_user(username, database_name): + """Grant SELECT, INSERT, UPDATE, DELETE permissions on the instructure_dap schema to the DB user""" + try: + tables = ["database_version", "table_sync"] + + for tablename in tables: + grant_access_permission_sql = f"GRANT INSERT, SELECT, UPDATE, DELETE ON instructure_dap.{tablename} TO {username}" + execute_statement(grant_access_permission_sql, database_name) + console.print(f" - Granted SELECT, INSERT, UPDATE, and DELETE permission on the schema instructure_dap to user {username}", style="bold green") + except ClientError as e: + console.print(f" ! Error granting SELECT, INSERT, UPDATE, and DELETE permission on the schema instructure_dap to user {username}: {e}", style="bold red") + # Get all database user secrets secret_name_prefix = f"{prefix}-cd2-db-user-{env}-" user_secrets = secrets_client.list_secrets( @@ -140,17 +169,17 @@ def grant_user_to_admin(username, admin_username, database_name): secret_value = json.loads(secrets_client.get_secret_value(SecretId=secret_arn)["SecretString"]) username = secret_value["username"] database_name = secret_value["dbname"] - + # Create or update the user create_user(username, secret_value["password"], database_name) - + # Grant user to admin user grant_user_to_admin(username, admin_username, database_name) - + # Create schema for user (with them as owner) if they need a schema if username in users_to_create_schema: create_schema(username, username, database_name) - + # Create instructure_dap schema for the CD2 database user with them as owner if username == db_user_username: create_schema("instructure_dap", username, database_name) @@ -158,9 +187,16 @@ def grant_user_to_admin(username, admin_username, database_name): # Assign privileges to canvas and instructure_dap schemas # Defaults to read-only if user is not set in user_roles dict user_role = get_user_role(username) - - grant_usage_to_schema(username, "canvas", database_name) - assign_privileges(username, "canvas", user_role, database_name) - + + grant_usage_to_schema(username, username, database_name) + assign_privileges(username, username, user_role, database_name) + grant_usage_to_schema(username, "instructure_dap", database_name) - assign_privileges(username, "instructure_dap", user_role, database_name) \ No newline at end of file + assign_privileges(username, "instructure_dap", user_role, database_name) + + # Grant the CREATE privilege on the cd2 database. + grant_create_permission_on_db_to_db_user(username, database_name) + + # If this is for the new additional stack, grant the access permission to the instructure_dap schema. + if is_additional_stack: + grant_access_permission_on_instructure_dap_schema_to_db_user(username, database_name) \ No newline at end of file diff --git a/sync_table/app.py b/sync_table/app.py index ad84d8d..64fb2e7 100644 --- a/sync_table/app.py +++ b/sync_table/app.py @@ -29,7 +29,8 @@ db_cluster_arn = os.environ.get("DB_CLUSTER_ARN") db_user_secret_name = os.environ.get("DB_USER_SECRET_NAME") admin_secret_arn = os.environ.get("ADMIN_SECRET_ARN") -param_path = f"/{env}/canvas_data_2" +ssm_parameter_name = os.environ.get('SSM_PARAMETER_NAME', 'canvas_data_2') +param_path = f"/{env}/{ssm_parameter_name}" api_base_url = os.environ.get("API_BASE_URL", "https://api-gateway.instructure.com") FUNCTION_NAME = 'sync_table' diff --git a/template.yaml b/template.yaml index d25b622..dd5fc3c 100644 --- a/template.yaml +++ b/template.yaml @@ -129,6 +129,16 @@ Parameters: Description: (Optional) A security group ID for the database. Leave empty to create a new security group. Default: '' + DatabaseClientSecurityGroupParameter: + Type: String + Description: (Optional) A security group ID for the DatabaseClient. Leave empty to create a new security group. + Default: '' + + ListTablesFunctionSecurityGroupParameter: + Type: String + Description: (Optional) A security group ID for the ListTables Lambda function. Leave empty to create a new security group. + Default: '' + AthenaConnectorParameter: Type: String Description: (Optional) Create an Athena connector for PostgreSQL. Default is false. Must be false when values are set for DatabaseAdminSecretArnParameter and DatabaseSecurityGroupParameter. @@ -166,6 +176,21 @@ Parameters: Description: The secret name for Slack Incoming Webhook URL for the notification Default: '' + SecretsKmsKeyIDParameter: + Type: String + Description: (Optional) The key ID for existing SecretsKmsKey. Leave empty to create a new KMS key. + Default: '' + + DataKmsKeyIDParameter: + Type: String + Description: (Optional) The Key ID for DataKmsKey. Leave empty to create a new KMS key. + Default: '' + + MainCD2StackNameParameter: + Type: String + Description: (Optional) The Cloudformation stack name of the main Canvas Data 2 Stack. + Default: 'ubcla-canvas-data-2' + Conditions: # Conditions for Bring Your Own Aurora Database Cluster @@ -181,6 +206,14 @@ Conditions: ExistingDatabaseSecurityGroup: !Not [!Equals [!Ref DatabaseSecurityGroupParameter, '']] CreateDatabaseSecurityGroup: !Equals [!Ref DatabaseSecurityGroupParameter, ''] + # Conditions for Bring Your Own Database Client Security Group + ExistingDatabaseClientSecurityGroup: !Not [!Equals [!Ref DatabaseClientSecurityGroupParameter, '']] + CreateDatabaseClientSecurityGroup: !Equals [!Ref DatabaseClientSecurityGroupParameter, ''] + + # Conditions for Bring Your Own ListTables Lambda Function Security Group + ExistingListTablesFunctionSecurityGroup: !Not [!Equals [!Ref ListTablesFunctionSecurityGroupParameter, '']] + CreateListTablesFunctionSecurityGroup: !Equals [!Ref ListTablesFunctionSecurityGroupParameter, ''] + # Conditions for Bring Your Own Notification Topic ExistingNotificationTopic: !Not [!Equals [!Ref NotificationTopicParameter, '']] CreateNotificationTopic: !Equals [!Ref NotificationTopicParameter, ''] @@ -202,6 +235,7 @@ Resources: SecretsKmsKey: Type: AWS::KMS::Key + Condition: CreateDatabase Properties: Description: Key for encrypting Canvas Data 2 secrets Enabled: true @@ -211,12 +245,14 @@ Resources: SecretsKeyAlias: Type: AWS::KMS::Alias + Condition: CreateDatabase Properties: - AliasName: !Sub alias/${ResourcePrefixParameter}-cd2-secrets + AliasName: !Sub alias/${ResourcePrefixParameter}-${AWS::StackName}-cd2-secrets TargetKeyId: !Ref SecretsKmsKey DataKmsKey: Type: AWS::KMS::Key + Condition: CreateDatabase Properties: Description: Key for encrypting Canvas Data 2 data Enabled: true @@ -239,8 +275,10 @@ Resources: StringEquals: "kms:EncryptionContext:aws:ecs:clusterAccount": - !Sub ${AWS::AccountId} + ForAnyValue:StringEquals: "kms:EncryptionContext:aws:ecs:clusterName": - - !Sub ${ResourcePrefixParameter}-cd2-cluster + - !Sub ${ResourcePrefixParameter}-${AWS::StackName}-cd2-cluster + - !Sub ${ResourcePrefixParameter}-ubcla-canvas-data-2-catalog-cd2-cluster Resource: '*' - Sid: Allow grant creation permission for Fargate tasks. Effect: Allow @@ -252,8 +290,10 @@ Resources: StringEquals: "kms:EncryptionContext:aws:ecs:clusterAccount": - !Sub ${AWS::AccountId} + ForAnyValue:StringEquals: "kms:EncryptionContext:aws:ecs:clusterName": - - !Sub ${ResourcePrefixParameter}-cd2-cluster + - !Sub ${ResourcePrefixParameter}-${AWS::StackName}-cd2-cluster + - !Sub ${ResourcePrefixParameter}-ubcla-canvas-data-2-catalog-cd2-cluster ForAllValues:StringEquals: "kms:GrantOperations": - "Decrypt" @@ -286,22 +326,23 @@ Resources: DataKeyAlias: Type: AWS::KMS::Alias + Condition: CreateDatabase Properties: - AliasName: !Sub alias/${ResourcePrefixParameter}-cd2-data + AliasName: !Sub alias/${ResourcePrefixParameter}-${AWS::StackName}-cd2-data TargetKeyId: !Ref DataKmsKey # Create a secret for the CD2 app "canvas" user DatabaseUserSecretCanvas: Type: AWS::SecretsManager::Secret Properties: - Name: !Sub ${ResourcePrefixParameter}-cd2-db-user-${EnvironmentParameter}-${DatabaseCd2UserParameter} + Name: !Sub ${AWS::StackName}-cd2-db-user-${EnvironmentParameter}-${DatabaseCd2UserParameter} Description: Database user for Canvas Data 2 canvas user GenerateSecretString: SecretStringTemplate: !Sub '{"username": "${DatabaseCd2UserParameter}"}' GenerateStringKey: password PasswordLength: 24 ExcludePunctuation: true - KmsKeyId: !Ref SecretsKmsKey + KmsKeyId: !If [CreateDatabase, !Ref SecretsKmsKey, !Ref SecretsKmsKeyIDParameter] Tags: - Key: !Sub ${TagNameParameter} Value: !Sub ${TagValueParameter} @@ -316,6 +357,7 @@ Resources: DatabaseSubnetGroup: Type: AWS::RDS::DBSubnetGroup + Condition: CreateDatabase Properties: DBSubnetGroupDescription: !Sub ${ResourcePrefixParameter}-cd2-subnetgroup-${EnvironmentParameter} SubnetIds: !Ref DatabaseSubnetListParameter @@ -325,6 +367,7 @@ Resources: DatabaseClientSecurityGroup: Type: AWS::EC2::SecurityGroup + Condition: CreateDatabaseClientSecurityGroup Properties: GroupDescription: !Sub ${ResourcePrefixParameter}-cd2-database-client-sg-${EnvironmentParameter} GroupName: Canvas Data 2 Database Client @@ -342,6 +385,7 @@ Resources: ListTablesFunctionSecurityGroup: Type: AWS::EC2::SecurityGroup + Condition: CreateListTablesFunctionSecurityGroup Properties: GroupDescription: !Sub ${ResourcePrefixParameter}-cd2-list-tables-function-sg-${EnvironmentParameter} GroupName: Canvas Data 2 ListTablesFunction @@ -368,7 +412,7 @@ Resources: - IpProtocol: tcp FromPort: 5432 ToPort: 5432 - SourceSecurityGroupId: !Ref DatabaseClientSecurityGroup + SourceSecurityGroupId: !If [ExistingDatabaseClientSecurityGroup, !Ref DatabaseClientSecurityGroupParameter, !Ref DatabaseClientSecurityGroup] # TODO: add more ingress rules based on parameter(s) Tags: - Key: Name @@ -380,7 +424,7 @@ Resources: Type: AWS::EC2::SecurityGroupEgress Condition: CreateDatabaseSecurityGroup Properties: - GroupId: !Ref DatabaseClientSecurityGroup + GroupId: !If [ExistingDatabaseClientSecurityGroup, !Ref DatabaseClientSecurityGroupParameter, !Ref DatabaseClientSecurityGroup] IpProtocol: tcp FromPort: 5432 ToPort: 5432 @@ -388,9 +432,9 @@ Resources: DatabaseClientEgressToExistingDatabase: Type: AWS::EC2::SecurityGroupEgress - Condition: ExistingDatabaseSecurityGroup + Condition: CreateDatabaseClientSecurityGroup Properties: - GroupId: !Ref DatabaseClientSecurityGroup + GroupId: !If [ExistingDatabaseClientSecurityGroup, !Ref DatabaseClientSecurityGroupParameter, !Ref DatabaseClientSecurityGroup] IpProtocol: tcp FromPort: 5432 ToPort: 5432 @@ -478,11 +522,13 @@ Resources: SKIP_TABLES: !Ref SkipTablesParameter SLACK_WEBHOOK_SECRET_NAME: !Sub ${SlackWebHookURLSecretNameParameter} STACK_NAME: !Sub ${AWS::StackName} + SSM_PARAMETER_NAME: !Sub ${SsmPathParameter} + DB_CD2_USER: !Sub ${DatabaseCd2UserParameter} Timeout: 120 MemorySize: 256 VpcConfig: SecurityGroupIds: - - !Ref ListTablesFunctionSecurityGroup + - !If [ExistingListTablesFunctionSecurityGroup, !Ref ListTablesFunctionSecurityGroupParameter, !Ref ListTablesFunctionSecurityGroup] SubnetIds: !Ref LambdaSubnetListParameter LoggingConfig: LogGroup: !Ref ListTablesLogGroup @@ -551,10 +597,10 @@ Resources: FargateCluster: Type: AWS::ECS::Cluster Properties: - ClusterName: !Sub ${ResourcePrefixParameter}-cd2-cluster + ClusterName: !Sub ${ResourcePrefixParameter}-${AWS::StackName}-cd2-cluster Configuration: ManagedStorageConfiguration: - FargateEphemeralStorageKmsKeyId: !GetAtt DataKmsKey.Arn + FargateEphemeralStorageKmsKeyId: !If [CreateDatabase, !GetAtt DataKmsKey.Arn, !Sub "arn:aws:kms:${AWS::Region}:${AWS::AccountId}:key/${DataKmsKeyIDParameter}"] Tags: - Key: !Sub ${TagNameParameter} Value: !Sub ${TagValueParameter} @@ -593,7 +639,7 @@ Resources: Action: - kms:* Resource: - - !GetAtt DataKmsKey.Arn + - !If [CreateDatabase, !GetAtt DataKmsKey.Arn, !Sub "arn:aws:kms:${AWS::Region}:${AWS::AccountId}:key/${DataKmsKeyIDParameter}"] - !If - ConfigureFalconSensor - PolicyName: falcon_parameter_store @@ -657,7 +703,7 @@ Resources: - Effect: Allow Action: - kms:Decrypt - Resource: !GetAtt SecretsKmsKey.Arn + Resource: !If [CreateDatabase, !GetAtt SecretsKmsKey.Arn, !Sub "arn:aws:kms:${AWS::Region}:${AWS::AccountId}:key/${SecretsKmsKeyIDParameter}"] - PolicyName: ecr PolicyDocument: Statement: @@ -698,7 +744,7 @@ Resources: Action: - kms:* Resource: - - !GetAtt DataKmsKey.Arn + - !If [CreateDatabase, !GetAtt DataKmsKey.Arn, !Sub "arn:aws:kms:${AWS::Region}:${AWS::AccountId}:key/${DataKmsKeyIDParameter}"] - PolicyName: logging PolicyDocument: Version: '2012-10-17' @@ -735,7 +781,11 @@ Resources: - Name: !Sub ${AWS::StackName}-InitTable Cpu: !Ref TaskCpuParameter Memory: !Ref TaskMemoryParameter - Image: !Sub ${EcrAccountNumberParameter}.dkr.ecr.${AWS::Region}.amazonaws.com/${AWS::StackName}/init-table:${EnvironmentParameter} + Image: + Fn::If: + - CreateDatabase + - Fn::Sub: ${EcrAccountNumberParameter}.dkr.ecr.${AWS::Region}.amazonaws.com/${AWS::StackName}/init-table:${EnvironmentParameter} + - Fn::Sub: ${EcrAccountNumberParameter}.dkr.ecr.${AWS::Region}.amazonaws.com/${MainCD2StackNameParameter}/init-table:${EnvironmentParameter} Essential: true LogConfiguration: LogDriver: awslogs @@ -839,7 +889,11 @@ Resources: - Name: !Sub ${AWS::StackName}-SyncTable Cpu: !Ref TaskCpuParameter Memory: !Ref TaskMemoryParameter - Image: !Sub ${EcrAccountNumberParameter}.dkr.ecr.${AWS::Region}.amazonaws.com/${AWS::StackName}/sync-table:${EnvironmentParameter} + Image: + Fn::If: + - CreateDatabase + - Fn::Sub: ${EcrAccountNumberParameter}.dkr.ecr.${AWS::Region}.amazonaws.com/${AWS::StackName}/sync-table:${EnvironmentParameter} + - Fn::Sub: ${EcrAccountNumberParameter}.dkr.ecr.${AWS::Region}.amazonaws.com/${MainCD2StackNameParameter}/sync-table:${EnvironmentParameter} Essential: true LogConfiguration: LogDriver: awslogs @@ -927,7 +981,7 @@ Resources: Type: AWS::SNS::Topic Condition: CreateNotificationTopic Properties: - DisplayName: !Sub "Canvas Data 2 Synchronization Workflow (${EnvironmentParameter})" + DisplayName: !Sub "Canvas Data 2 Synchronization Workflow (${AWS::StackName} - ${EnvironmentParameter})" Tags: - Key: !Sub ${TagNameParameter} Value: !Sub ${TagValueParameter} @@ -1090,6 +1144,7 @@ Resources: Type: ScheduleV2 Properties: Description: Execute the Canvas Data 2 Step Function every 3 hours + Name: !Sub ${AWS::StackName}-CD2-Refresh-schedule FlexibleTimeWindow: Mode: FLEXIBLE MaximumWindowInMinutes: 10 @@ -1136,7 +1191,7 @@ Resources: AwsvpcConfiguration: AssignPublicIp: DISABLED SecurityGroups: - - !Ref DatabaseClientSecurityGroup + - !If [ExistingDatabaseClientSecurityGroup, !Ref DatabaseClientSecurityGroupParameter, !Ref DatabaseClientSecurityGroup] Subnets: - Fn::ImportValue: !Sub ${ResourcePrefixParameter}-vpc--privateSubnetA - Fn::ImportValue: !Sub ${ResourcePrefixParameter}-vpc--privateSubnetB @@ -1162,6 +1217,8 @@ Resources: Value: !If [ExistingDatabaseAdminSecret, !Ref DatabaseAdminSecretArnParameter, !GetAtt AuroraDatabaseCluster.MasterUserSecret.SecretArn] - Name: DB_CLUSTER_ARN Value: !If [ExistingDatabase, !Ref DatabaseClusterArnParameter, !GetAtt AuroraDatabaseCluster.DBClusterArn] + - Name: SSM_PARAMETER_NAME + Value: !Sub ${SsmPathParameter} TimeoutSeconds: 43200 Retry: - ErrorEquals: @@ -1198,7 +1255,7 @@ Resources: AwsvpcConfiguration: AssignPublicIp: DISABLED SecurityGroups: - - !Ref DatabaseClientSecurityGroup + - !If [ExistingDatabaseClientSecurityGroup, !Ref DatabaseClientSecurityGroupParameter, !Ref DatabaseClientSecurityGroup] Subnets: - Fn::ImportValue: !Sub ${ResourcePrefixParameter}-vpc--privateSubnetA - Fn::ImportValue: !Sub ${ResourcePrefixParameter}-vpc--privateSubnetB @@ -1220,6 +1277,8 @@ Resources: Value: init_table - Name: DB_USER_SECRET_NAME Value: !Ref DatabaseUserSecretCanvas + - Name: SSM_PARAMETER_NAME + Value: !Sub ${SsmPathParameter} TimeoutSeconds: 43200 Retry: - ErrorEquals: @@ -1292,7 +1351,10 @@ Resources: LambdaTimeout: '900' # PermissionsBoundaryARN: SecretNamePrefix: !Sub ${ResourcePrefixParameter}-cd2-db-user-${EnvironmentParameter}-canvas - SecurityGroupIds: !Ref DatabaseClientSecurityGroup + SecurityGroupIds: !If + - ExistingDatabaseClientSecurityGroup + - !Ref DatabaseClientSecurityGroupParameter + - !Ref DatabaseClientSecurityGroup SpillBucket: !Ref AthenaSpillBucket SpillPrefix: postgresql SubnetIds: !Join [ ",", !Ref LambdaSubnetListParameter ]