From 476794fb7bf39cec3211bc648726c23c2a0e89f0 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Thu, 3 Nov 2022 18:34:46 -0400 Subject: [PATCH 1/9] Add Template Option and Refactor CDKPipeline --- .../api/Objects/DataPipeline/resolvers.py | 10 +- backend/dataall/cdkproxy/cdk_cli_wrapper.py | 123 +------ .../dataall/cdkproxy/stacks/cdk_pipeline.py | 307 +++++++++--------- backend/requirements.txt | 1 + .../src/api/DataPipeline/getDataPipeline.js | 12 - .../src/api/DataPipeline/listDataPipelines.js | 12 - .../src/views/Pipelines/PipelineCreateForm.js | 13 + .../src/views/Pipelines/PipelineListItem.js | 2 +- .../src/views/Pipelines/PipelineOverview.js | 2 +- frontend/src/views/Pipelines/PipelineView.js | 29 +- 10 files changed, 177 insertions(+), 334 deletions(-) diff --git a/backend/dataall/api/Objects/DataPipeline/resolvers.py b/backend/dataall/api/Objects/DataPipeline/resolvers.py index fe5f2fd60..9198e5559 100644 --- a/backend/dataall/api/Objects/DataPipeline/resolvers.py +++ b/backend/dataall/api/Objects/DataPipeline/resolvers.py @@ -25,20 +25,12 @@ def create_pipeline(context: Context, source, input=None): check_perm=True, ) if input['devStrategy'] == 'cdk-trunk': - Stack.create_stack( - session=session, - environment_uri=pipeline.environmentUri, - target_type='cdkrepo', - target_uri=pipeline.DataPipelineUri, - target_label=pipeline.label, - payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, - ) Stack.create_stack( session=session, environment_uri=pipeline.environmentUri, target_type='cdkpipeline', - target_uri=f"{pipeline.DataPipelineUri}pip", + target_uri=f"{pipeline.DataPipelineUri}", target_label=pipeline.label, payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, ) diff --git a/backend/dataall/cdkproxy/cdk_cli_wrapper.py b/backend/dataall/cdkproxy/cdk_cli_wrapper.py index 3b346a337..a96114c8e 100644 --- a/backend/dataall/cdkproxy/cdk_cli_wrapper.py +++ b/backend/dataall/cdkproxy/cdk_cli_wrapper.py @@ -17,6 +17,7 @@ from ..db import models from ..db.api import Pipeline, Environment, Stack from ..utils.alarm_service import AlarmService +from dataall.cdkproxy.stacks.cdk_pipeline import CDKPipelineStack logger = logging.getLogger('cdksass') @@ -63,108 +64,6 @@ def update_stack_output(session, stack): stack.outputs = outputs -def clone_remote_stack(pipeline, pipeline_environment): - print('..................................................') - print(' Configure remote CDK app ') - print('..................................................') - aws = SessionHelper.remote_session(pipeline_environment.AwsAccountId) - env_creds = aws.get_credentials() - - python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') - - env = { - 'AWS_REGION': pipeline_environment.region, - 'AWS_DEFAULT_REGION': pipeline_environment.region, - 'CURRENT_AWS_ACCOUNT': pipeline_environment.AwsAccountId, - 'PYTHONPATH': python_path, - 'PATH': python_path, - 'envname': os.environ.get('envname', 'local'), - } - if env_creds: - env.update( - { - 'AWS_ACCESS_KEY_ID': env_creds.access_key, - 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, - 'AWS_SESSION_TOKEN': env_creds.token, - } - ) - print(f"ENVIRONMENT = {env}") - print('..................................................') - print(' Clone remote CDK app ') - print('..................................................') - - cmd = [ - 'git', - 'config', - '--system', - 'user.name', - 'data.allECS', - '&&', - 'git', - 'config', - '--system', - 'user.email', - 'data.allECS@email.com', - '&&', - 'cd', - 'dataall/cdkproxy/stacks', - '&&', - 'mkdir', - f'{pipeline.repo}', - '&&', - 'git', - 'clone', - f"codecommit::{pipeline_environment.region}://{pipeline.repo}", - f'{pipeline.repo}' - ] - process = subprocess.run( - ' '.join(cmd), - text=True, - shell=True, # nosec - encoding='utf-8', - capture_output=True, - env=env - ) - if process.returncode == 0: - print(f"Successfully cloned repo {pipeline.repo}: {str(process.stdout)}") - else: - logger.error( - f'Failed to clone repo {pipeline.repo} due to {str(process.stderr)}' - ) - return - - -def clean_up_repo(path): - if path: - precmd = [ - 'rm', - '-rf', - f"{path}" - ] - - cwd = os.path.dirname(os.path.abspath(__file__)) - logger.info(f"Running command : \n {' '.join(precmd)}") - - process = subprocess.run( - ' '.join(precmd), - text=True, - shell=True, # nosec - encoding='utf-8', - capture_output=True, - cwd=cwd - ) - - if process.returncode == 0: - print(f"Successfully cleaned cloned repo: {path}. {str(process.stdout)}") - else: - logger.error( - f'Failed clean cloned repo: {path} due to {str(process.stderr)}' - ) - else: - logger.info(f"Info:Path {path} not found") - return - - def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: str = None): logger.warning(f'Starting new stack from stackid {stackid}') sts = boto3.client('sts') @@ -181,6 +80,11 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s stack.status = 'PENDING' session.commit() + if stack.stack == "cdkpipeline": + cdkpipeline = CDKPipelineStack(stack) + pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) + path = f"./stacks/{pipeline.repo}/" + app_path = app_path or './app.py' logger.info(f'app_path: {app_path}') @@ -246,21 +150,14 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s cwd=cwd, ) + if stack.stack == "cdkpipeline": + CDKPipelineStack.clean_up_repo(path=f"./{pipeline.repo}") + if process.returncode == 0: - meta = describe_stack(stack) + meta = describe_stack(stack) stack.stackid = meta['StackId'] stack.status = meta['StackStatus'] update_stack_output(session, stack) - if stack.stack == 'cdkrepo': - logger.warning(f'Starting new remote stack from targetUri {stack.targetUri}pip') - cicdstack: models.Stack = Stack.get_stack_by_target_uri(session, target_uri=f"{stack.targetUri}pip") - cicdstack.EcsTaskArn = stack.EcsTaskArn - session.commit() - pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) - pipeline_environment = Environment.get_environment_by_uri(session, pipeline.environmentUri) - clone_remote_stack(pipeline, pipeline_environment) - deploy_cdk_stack(engine, cicdstack.stackUri, app_path="app.py", path=f"./stacks/{pipeline.repo}/") - clean_up_repo(f"./stacks/{pipeline.repo}") else: stack.status = 'CREATE_FAILED' logger.error( diff --git a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py b/backend/dataall/cdkproxy/stacks/cdk_pipeline.py index b6e84c221..9e979fc43 100644 --- a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py +++ b/backend/dataall/cdkproxy/stacks/cdk_pipeline.py @@ -1,24 +1,20 @@ import logging import os -import shutil +import sys +import subprocess -from aws_cdk import Stack, CfnOutput -from aws_cdk import aws_codecommit as codecommit - -from aws_cdk.aws_s3_assets import Asset - -from .manager import stack +# from .manager import stack from ... import db -from ...db import models -from ...db.api import Environment, Pipeline, Dataset -from ...utils.cdk_nag_utils import CDKNagUtil -from ...utils.runtime_stacks_tagging import TagsUtil +# from ...db import models +from ...db.api import Environment, Pipeline +# from ...utils.cdk_nag_utils import CDKNagUtil +# from ...utils.runtime_stacks_tagging import TagsUtil +from ...aws.handlers.sts import SessionHelper logger = logging.getLogger(__name__) - -@stack("cdkrepo") -class CDKPipelineStack(Stack): +# @stack(stack='cdkrepo') +class CDKPipelineStack: """ Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. @@ -33,154 +29,91 @@ class CDKPipelineStack(Stack): - data.all metadata as environment variables accesible at synth """ - - module_name = __file__ - def get_engine(self): envname = os.environ.get("envname", "local") engine = db.get_engine(envname=envname) return engine - def get_target(self, target_uri) -> models.DataPipeline: - engine = self.get_engine() - with engine.scoped_session() as session: - return Pipeline.get_pipeline_by_uri(session, target_uri) - - def get_pipeline_environments(self, targer_uri) -> models.DataPipelineEnvironment: - engine = self.get_engine() - with engine.scoped_session() as session: - envs = Pipeline.query_pipeline_environments( - session, targer_uri - ) - return envs - - def get_pipeline_cicd_environment( - self, pipeline: models.DataPipeline - ) -> models.Environment: - envname = os.environ.get("envname", "local") - engine = db.get_engine(envname=envname) - with engine.scoped_session() as session: - return Environment.get_environment_by_uri(session, pipeline.environmentUri) - - def get_env_team(self, pipeline: models.DataPipeline) -> models.EnvironmentGroup: - engine = self.get_engine() - with engine.scoped_session() as session: - env = Environment.get_environment_group( - session, pipeline.SamlGroupName, pipeline.environmentUri - ) - return env + module_name = __file__ - def get_dataset(self, dataset_uri) -> models.Dataset: + def __init__(self, stack): engine = self.get_engine() with engine.scoped_session() as session: - ds = Dataset.get_dataset_by_uri( - session, dataset_uri - ) - return ds - - def __init__(self, scope, id, target_uri: str = None, **kwargs): - kwargs.setdefault("tags", {}).update({"utility": "dataall-data-pipeline"}) - super().__init__( - scope, - id, - env=kwargs.get("env"), - stack_name=kwargs.get("stack_name"), - tags=kwargs.get("tags"), - description="Cloud formation stack of PIPELINE: {}; URI: {}; DESCRIPTION: {}".format( - self.get_target(target_uri=target_uri).label, - target_uri, - self.get_target(target_uri=target_uri).description, - )[ - :1024 - ], - ) - - # Configuration - self.target_uri = target_uri - - pipeline = self.get_target(target_uri=target_uri) - pipeline_environment = self.get_pipeline_cicd_environment(pipeline=pipeline) - pipeline_env_team = self.get_env_team(pipeline=pipeline) - # Development environments - development_environments = self.get_pipeline_environments(targer_uri=target_uri) - - # Create CodeCommit repository and mirror blueprint code - code_dir_path = os.path.realpath( - os.path.abspath( - os.path.join( - __file__, "..", "..", "..", "..", "blueprints", "cdk_data_pipeline_blueprint" - ) - ) - ) - - CDKPipelineStack.write_ddk_app_multienvironment( - path=code_dir_path, - output_file="app.py", - pipeline=pipeline, - development_environments=development_environments - ) - - CDKPipelineStack.write_ddk_json_multienvironment( - path=code_dir_path, - output_file="ddk.json", - pipeline_environment=pipeline_environment, - development_environments=development_environments - ) - CDKPipelineStack.cleanup_zip_directory(code_dir_path) - - CDKPipelineStack.zip_directory(code_dir_path) - - code_asset = Asset( - scope=self, id=f"{pipeline.name}-asset", path=f"{code_dir_path}/code.zip" - ) - code = codecommit.CfnRepository.CodeProperty( - s3=codecommit.CfnRepository.S3Property( - bucket=code_asset.s3_bucket_name, - key=code_asset.s3_object_key, + self.pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) + self.pipeline_environment = Environment.get_environment_by_uri(session, self.pipeline.environmentUri) + # Development environments + self.development_environments = Pipeline.query_pipeline_environments(session, stack.targetUri) + + aws = SessionHelper.remote_session(self.pipeline_environment.AwsAccountId) + env_creds = aws.get_credentials() + + python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') + + self.env = { + 'AWS_REGION': self.pipeline_environment.region, + 'AWS_DEFAULT_REGION': self.pipeline_environment.region, + 'CURRENT_AWS_ACCOUNT': self.pipeline_environment.AwsAccountId, + 'PYTHONPATH': python_path, + 'PATH': python_path, + 'envname': os.environ.get('envname', 'local'), + } + if env_creds: + self.env.update( + { + 'AWS_ACCESS_KEY_ID': env_creds.access_key, + 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, + 'AWS_SESSION_TOKEN': env_creds.token + } ) - ) - - repository = codecommit.CfnRepository( - scope=self, - code=code, - id="CodecommitRepository", - repository_name=pipeline.repo, - ) - # CloudFormation output - CfnOutput( - self, - "RepoNameOutput", - export_name=f"{pipeline.DataPipelineUri}-RepositoryName", - value=pipeline.repo, + self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) + + template = self.pipeline.template if (self.pipeline.template != self.pipeline.label) else "" + + self.initialize_repo(template) + if not len(template): + self.write_ddk_app_multienvironment(output_file="app.py") + self.write_ddk_json_multienvironment(output_file="ddk.json") + self.git_push_repo() + + def initialize_repo(self, template): + + template_cmds = [ + f"git clone {template} {self.pipeline.repo}", + f"cd {self.pipeline.repo}", + "rm -rf .git" + ] + no_template_cmds = [ + f"ddk init {self.pipeline.repo} --generate-only", + f"cd {self.pipeline.repo}" + ] + repo_cmds = [ + "git init --initial-branch main", + "virtualenv .venv && source .venv/bin/activate", + "pip install -q -r requirements.txt", + f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" + ] + + cmd_init = [ "pip install aws-ddk"] + (template_cmds if (self.pipeline.template != self.pipeline.label) else no_template_cmds ) + repo_cmds + + logger.info(f"Running Commands: {'; '.join(cmd_init)}") + + process = subprocess.run( + '; '.join(cmd_init), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=self.code_dir_path, + env=self.env ) + if process.returncode == 0: + logger.info("Successfully Initialized New CDK/DDK App") - TagsUtil.add_tags(self) - - CDKNagUtil.check_rules(self) - - CDKPipelineStack.cleanup_zip_directory(code_dir_path) - - @staticmethod - def zip_directory(path): - try: - shutil.make_archive("code", "zip", path) - shutil.move("code.zip", f"{path}/code.zip") - except Exception as e: - logger.error(f"Failed to zip repository due to: {e}") - - @staticmethod - def cleanup_zip_directory(path): - if os.path.isfile(f"{path}/code.zip"): - os.remove(f"{path}/code.zip") - else: - logger.info("Info: %s Zip not found" % f"{path}/code.zip") - @staticmethod - def write_ddk_json_multienvironment(path, output_file, pipeline_environment, development_environments): + def write_ddk_json_multienvironment(self, output_file): json_envs = "" - for env in development_environments: + for env in self.development_environments: json_env = f""", "{env.stage}": {{ "account": "{env.AwsAccountId}", @@ -194,23 +127,23 @@ def write_ddk_json_multienvironment(path, output_file, pipeline_environment, dev json = f"""{{ "environments": {{ "cicd": {{ - "account": "{pipeline_environment.AwsAccountId}", - "region": "{pipeline_environment.region}" + "account": "{self.pipeline_environment.AwsAccountId}", + "region": "{self.pipeline_environment.region}" }}{json_envs} }} }}""" - with open(f'{path}/{output_file}', 'w') as text_file: + with open(f'{self.code_dir_path}/{self.pipeline.repo}/{output_file}', 'w') as text_file: print(json, file=text_file) - @staticmethod - def write_ddk_app_multienvironment(path, output_file, pipeline, development_environments): + + def write_ddk_app_multienvironment(self, output_file): header = f""" # !/usr/bin/env python3 import aws_cdk as cdk from aws_ddk_core.cicd import CICDPipelineStack -from ddk_app.ddk_app_stack import DDKApplicationStack +from ddk_app.ddk_app_stack import DdkApplicationStack from aws_ddk_core.config import Config app = cdk.App() @@ -223,23 +156,23 @@ def __init__( **kwargs, ) -> None: super().__init__(scope, f"dataall-{{environment_id.title()}}", **kwargs) - DDKApplicationStack(self, "DataPipeline-{pipeline.label}-{pipeline.DataPipelineUri}", environment_id) + DdkApplicationStack(self, "DataPipeline-{self.pipeline.label}-{self.pipeline.DataPipelineUri}", environment_id) -id = f"dataall-cdkpipeline-{pipeline.DataPipelineUri}pip" +id = f"dataall-cdkpipeline-{self.pipeline.DataPipelineUri}" config = Config() ( CICDPipelineStack( app, id=id, environment_id="cicd", - pipeline_name="{pipeline.label}", + pipeline_name="{self.pipeline.label}", ) - .add_source_action(repository_name="{pipeline.repo}") + .add_source_action(repository_name="{self.pipeline.repo}") .add_synth_action() .build()""" stages = "" - for env in sorted(development_environments, key=lambda env: env.order): + for env in sorted(self.development_environments, key=lambda env: env.order): stage = f""".add_stage("{env.stage}", ApplicationStage(app, "{env.stage}", env=config.get_env("{env.stage}")))""" stages = stages + stage footer = """ @@ -249,5 +182,61 @@ def __init__( app.synth() """ app = header + stages + footer - with open(f'{path}/{output_file}', 'w') as text_file: + with open(f'{self.code_dir_path}/{self.pipeline.repo}/{output_file}', 'w') as text_file: print(app, file=text_file) + + + def git_push_repo(self): + git_cmds = [ + 'git config user.email "codebuild@example.com"', + 'git config user.name "CodeBuild"', + 'git config --local credential.helper "!aws codecommit credential-helper $@"', + "git config --local credential.UseHttpPath true", + "git add .", + "git commit -a -m 'Initial Commit' ", + "git push -u origin main" + ] + + logger.info(f"Running Commands: {'; '.join(git_cmds)}") + + process = subprocess.run( + '; '.join(git_cmds), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=os.path.join(self.code_dir_path, self.pipeline.repo), + env=self.env + ) + if process.returncode == 0: + logger.info("Successfully Pushed DDK App Code") + + @staticmethod + def clean_up_repo(path): + if path: + precmd = [ + 'rm', + '-rf', + f"{path}" + ] + + cwd = os.path.dirname(os.path.abspath(__file__)) + logger.info(f"Running command : \n {' '.join(precmd)}") + + process = subprocess.run( + ' '.join(precmd), + text=True, + shell=True, # nosec + encoding='utf-8', + capture_output=True, + cwd=cwd + ) + + if process.returncode == 0: + print(f"Successfully cleaned cloned repo: {path}. {str(process.stdout)}") + else: + logger.error( + f'Failed clean cloned repo: {path} due to {str(process.stderr)}' + ) + else: + logger.info(f"Info:Path {path} not found") + return \ No newline at end of file diff --git a/backend/requirements.txt b/backend/requirements.txt index f2932cb51..27fd9a184 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -13,3 +13,4 @@ PyYAML==6.0 requests==2.27.1 requests_aws4auth==1.1.1 sqlalchemy==1.3.16 +virtualenv==20.16.6 \ No newline at end of file diff --git a/frontend/src/api/DataPipeline/getDataPipeline.js b/frontend/src/api/DataPipeline/getDataPipeline.js index 3cf8ff3d2..b83d7bf10 100644 --- a/frontend/src/api/DataPipeline/getDataPipeline.js +++ b/frontend/src/api/DataPipeline/getDataPipeline.js @@ -62,18 +62,6 @@ const getDataPipeline = (DataPipelineUri) => ({ outputs resources } - cicdStack { - stack - status - stackUri - targetUri - accountid - region - stackid - link - outputs - resources - } } } ` diff --git a/frontend/src/api/DataPipeline/listDataPipelines.js b/frontend/src/api/DataPipeline/listDataPipelines.js index 54a8e97cb..362618d91 100644 --- a/frontend/src/api/DataPipeline/listDataPipelines.js +++ b/frontend/src/api/DataPipeline/listDataPipelines.js @@ -45,18 +45,6 @@ const searchDataPipelines = (filter) => ({ outputs resources } - cicdStack { - stack - status - stackUri - targetUri - accountid - region - stackid - link - outputs - resources - } } } } diff --git a/frontend/src/views/Pipelines/PipelineCreateForm.js b/frontend/src/views/Pipelines/PipelineCreateForm.js index 2a1944f4f..7809bb33b 100644 --- a/frontend/src/views/Pipelines/PipelineCreateForm.js +++ b/frontend/src/views/Pipelines/PipelineCreateForm.js @@ -434,6 +434,19 @@ const PipelineCrateForm = (props) => { ))} + + + diff --git a/frontend/src/views/Pipelines/PipelineListItem.js b/frontend/src/views/Pipelines/PipelineListItem.js index e729b50fc..e6e8f8d89 100644 --- a/frontend/src/views/Pipelines/PipelineListItem.js +++ b/frontend/src/views/Pipelines/PipelineListItem.js @@ -199,7 +199,7 @@ const PipelineListItem = (props) => { - + diff --git a/frontend/src/views/Pipelines/PipelineOverview.js b/frontend/src/views/Pipelines/PipelineOverview.js index da9140921..3f0938d69 100644 --- a/frontend/src/views/Pipelines/PipelineOverview.js +++ b/frontend/src/views/Pipelines/PipelineOverview.js @@ -31,7 +31,7 @@ const PipelineOverview = (props) => { owner={pipeline.owner} admins={pipeline.SamlGroupName || '-'} created={pipeline.created} - status={pipeline.cicdStack?.status || pipeline.stack.status} + status={pipeline.stack.status} /> diff --git a/frontend/src/views/Pipelines/PipelineView.js b/frontend/src/views/Pipelines/PipelineView.js index c0fe0deab..77cf3bc0b 100644 --- a/frontend/src/views/Pipelines/PipelineView.js +++ b/frontend/src/views/Pipelines/PipelineView.js @@ -138,7 +138,6 @@ const PipelineView = () => { const [loading, setLoading] = useState(true); const [pipeline, setPipeline] = useState(null); const [stack, setStack] = useState(null); - const [cicdStack, setCicdStack] = useState(null); const [cdkTrunk, setCdkTrunk] = useState(null); const [isDeleteObjectModalOpen, setIsDeleteObjectModalOpen] = useState(false); const [tabs, setTabs] = useState([ @@ -158,15 +157,6 @@ const PipelineView = () => { const response = await client.query(getDataPipeline(params.uri)); if (!response.errors && response.data.getDataPipeline !== null) { setPipeline(response.data.getDataPipeline); - if (response.data.getDataPipeline.devStrategy =="cdk-trunk") { - setTabs([ - {label: 'Overview', value: 'overview', icon: }, - {label: 'Tags', value: 'tags', icon: }, - {label: 'Repo Stack', value: 'stack', icon: }, - {label: 'CICD Stack', value: 'cicdStack', icon: } - ]); - setCdkTrunk(true); - } } else { const error = response.errors ? response.errors[0].message @@ -174,7 +164,7 @@ const PipelineView = () => { dispatch({ type: SET_ERROR, error }); } setLoading(false); - }, [client, dispatch, params.uri, stack, cicdStack]); + }, [client, dispatch, params.uri, stack]); useEffect(() => { if (client) { @@ -225,13 +215,6 @@ const PipelineView = () => { setStack={setStack} environmentUri={pipeline.environment?.environmentUri} /> - {cdkTrunk && ( - - )} { environmentUri={pipeline.environment.environmentUri} stackUri={pipeline.stack.stackUri} targetUri={pipeline.DataPipelineUri} - targetType={pipeline.devStrategy == 'cdk-trunk' ? "cdkrepo" : "pipeline"} - /> - )} - {currentTab === 'cicdStack' && ( - )} From c136abf96a8e1fa5ba8602ee973ae7e4f7c6f004 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Fri, 4 Nov 2022 09:42:27 -0400 Subject: [PATCH 2/9] Removing CICD Stack Calls --- .../api/Objects/DataPipeline/resolvers.py | 16 ++++++++-------- .../dataall/api/Objects/DataPipeline/schema.py | 2 +- deploy/pivot_role/pivotRole.yaml | 1 + 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/backend/dataall/api/Objects/DataPipeline/resolvers.py b/backend/dataall/api/Objects/DataPipeline/resolvers.py index 9198e5559..42ab6840b 100644 --- a/backend/dataall/api/Objects/DataPipeline/resolvers.py +++ b/backend/dataall/api/Objects/DataPipeline/resolvers.py @@ -244,14 +244,14 @@ def get_stack(context, source: models.DataPipeline, **kwargs): ) -def get_cicd_stack(context, source: models.DataPipeline, **kwargs): - if not source: - return None - return stack_helper.get_stack_with_cfn_resources( - context=context, - targetUri=f"{source.DataPipelineUri}pip", - environmentUri=source.environmentUri, - ) +# def get_cicd_stack(context, source: models.DataPipeline, **kwargs): +# if not source: +# return None +# return stack_helper.get_stack_with_cfn_resources( +# context=context, +# targetUri=f"{source.DataPipelineUri}pip", +# environmentUri=source.environmentUri, +# ) def get_job_runs(context, source: models.DataPipeline, **kwargs): diff --git a/backend/dataall/api/Objects/DataPipeline/schema.py b/backend/dataall/api/Objects/DataPipeline/schema.py index 76289255c..72f00cac2 100644 --- a/backend/dataall/api/Objects/DataPipeline/schema.py +++ b/backend/dataall/api/Objects/DataPipeline/schema.py @@ -30,7 +30,7 @@ gql.Field('devStrategy', type=gql.String), gql.Field('cloneUrlHttp', gql.String, resolver=get_clone_url_http), gql.Field('stack', gql.Ref('Stack'), resolver=get_stack), - gql.Field('cicdStack', gql.Ref('Stack'), resolver=get_cicd_stack), + # gql.Field('cicdStack', gql.Ref('Stack'), resolver=get_cicd_stack), gql.Field( 'userRoleForPipeline', type=DataPipelineRole.toGraphQLEnum(), diff --git a/deploy/pivot_role/pivotRole.yaml b/deploy/pivot_role/pivotRole.yaml index 0cb9ca907..0ffe6894a 100644 --- a/deploy/pivot_role/pivotRole.yaml +++ b/deploy/pivot_role/pivotRole.yaml @@ -680,6 +680,7 @@ Resources: - 'codecommit:GetCommit' - 'codecommit:GitPull' - 'codecommit:GetRepository' + - 'codecommit:TagResource' Effect: Allow Resource: - !Sub 'arn:aws:codecommit:*:${AWS::AccountId}:${EnvironmentResourcePrefix}*' From aa1110275ecf9aefd86dbac02b996e19a79f80e3 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Wed, 9 Nov 2022 16:47:50 -0500 Subject: [PATCH 3/9] Editable Pipeline Environments --- .../api/Objects/DataPipeline/input_types.py | 12 + .../api/Objects/DataPipeline/mutations.py | 22 + .../api/Objects/DataPipeline/resolvers.py | 35 +- backend/dataall/cdkproxy/cdk_cli_wrapper.py | 23 +- .../dataall/cdkproxy/stacks/cdk_pipeline.py | 11 +- backend/dataall/db/api/pipeline.py | 33 +- backend/docker/dev/Dockerfile | 2 + backend/requirements.txt | 3 +- .../deleteDataPipelineEnvironment.js | 24 + .../updateDataPipelineEnvironment.js | 24 + .../src/views/Pipelines/PipelineEditForm.js | 136 ++++-- .../Pipelines/PipelineEnvironmentEditForm.js | 456 ++++++++++++++++++ 12 files changed, 725 insertions(+), 56 deletions(-) create mode 100644 frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js create mode 100644 frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js create mode 100644 frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js diff --git a/backend/dataall/api/Objects/DataPipeline/input_types.py b/backend/dataall/api/Objects/DataPipeline/input_types.py index a821a8225..c7f825048 100644 --- a/backend/dataall/api/Objects/DataPipeline/input_types.py +++ b/backend/dataall/api/Objects/DataPipeline/input_types.py @@ -25,6 +25,18 @@ ], ) +UpdateDataPipelineEnvironmentInput = gql.InputType( + name='UpdateDataPipelineEnvironmentInput', + arguments=[ + gql.Argument(name='stage', type=gql.NonNullableType(gql.String)), + gql.Argument(name='order', type=gql.NonNullableType(gql.Integer)), + gql.Argument(name='pipelineUri', type=gql.ArrayType(gql.String)), + gql.Argument(name='environmentLabel', type=gql.NonNullableType(gql.String)), + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='samlGroupName', type=gql.NonNullableType(gql.String)), + ], +) + UpdateDataPipelineInput = gql.InputType( name='UpdateDataPipelineInput', arguments=[ diff --git a/backend/dataall/api/Objects/DataPipeline/mutations.py b/backend/dataall/api/Objects/DataPipeline/mutations.py index c1c11e1c0..c7e478b6e 100644 --- a/backend/dataall/api/Objects/DataPipeline/mutations.py +++ b/backend/dataall/api/Objects/DataPipeline/mutations.py @@ -42,3 +42,25 @@ ], resolver=create_pipeline_environment, ) + +deleteDataPipelineEnvironment = gql.MutationField( + name='deleteDataPipelineEnvironment', + type=gql.Boolean, + args=[ + gql.Argument(name='dataPipelineUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='stage', type=gql.NonNullableType(gql.String)) + ], + resolver=delete_pipeline_environment, +) + +updateDataPipelineEnvironment = gql.MutationField( + name='updateDataPipelineEnvironment', + type=gql.Ref('DataPipelineEnvironment'), + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(gql.Ref('UpdateDataPipelineEnvironmentInput')) + ) + ], + resolver=update_pipeline_environment, +) diff --git a/backend/dataall/api/Objects/DataPipeline/resolvers.py b/backend/dataall/api/Objects/DataPipeline/resolvers.py index 42ab6840b..7e2f70779 100644 --- a/backend/dataall/api/Objects/DataPipeline/resolvers.py +++ b/backend/dataall/api/Objects/DataPipeline/resolvers.py @@ -404,16 +404,7 @@ def delete_pipeline( accountid=env.AwsAccountId, cdk_role_arn=env.CDKRoleArn, region=env.region, - target_type='cdkrepo', - ) - - stack_helper.delete_stack( - context=context, - target_uri=f"{DataPipelineUri}pip", - accountid=env.AwsAccountId, - cdk_role_arn=env.CDKRoleArn, - region=env.region, - target_type='pipelinePip', + target_type='cdkpipeline', ) else: stack_helper.delete_stack( @@ -426,3 +417,27 @@ def delete_pipeline( ) return True + +def delete_pipeline_environment(context: Context, source, dataPipelineUri: str = None, environmentUri: str = None, stage: str = None): + with context.engine.scoped_session() as session: + Pipeline.delete_pipeline_environment( + session=session, + username=context.username, + groups=context.groups, + dataPipelineUri=dataPipelineUri, + environmentUri=environmentUri, + stage=stage, + check_perm=True, + ) + return True + +def update_pipeline_environment(context: Context, source, input=None): + with context.engine.scoped_session() as session: + pipeline_env = Pipeline.update_pipeline_environment( + session=session, + username=context.username, + groups=context.groups, + data=input, + check_perm=True, + ) + return pipeline_env \ No newline at end of file diff --git a/backend/dataall/cdkproxy/cdk_cli_wrapper.py b/backend/dataall/cdkproxy/cdk_cli_wrapper.py index a96114c8e..4fd3eeb20 100644 --- a/backend/dataall/cdkproxy/cdk_cli_wrapper.py +++ b/backend/dataall/cdkproxy/cdk_cli_wrapper.py @@ -82,6 +82,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s if stack.stack == "cdkpipeline": cdkpipeline = CDKPipelineStack(stack) + venv_name = cdkpipeline.venv_name pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) path = f"./stacks/{pipeline.repo}/" @@ -93,7 +94,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s '' '. ~/.nvm/nvm.sh &&', 'cdk', - 'deploy', + 'deploy --all', '--require-approval', ' never', '-c', @@ -117,10 +118,8 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s f'"{sys.executable} {app_path}"', '--verbose', ] - logger.info(f"Running command : \n {' '.join(cmd)}") python_path = '/:'.join(sys.path)[1:] + ':/code' - logger.info(f'python path = {python_path}') env = { @@ -141,6 +140,24 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s cwd = os.path.join(os.path.dirname(os.path.abspath(__file__)), path) if path else os.path.dirname(os.path.abspath(__file__)) + if stack.stack == "cdkpipeline": + cmd.insert(0, f"source {venv_name}/bin/activate;") + aws = SessionHelper.remote_session(stack.accountid) + creds = aws.get_credentials() + env.update( + { + 'CDK_DEFAULT_REGION': stack.region, + 'AWS_REGION': stack.region, + 'AWS_DEFAULT_REGION': stack.region, + 'CDK_DEFAULT_ACCOUNT': stack.accountid, + 'AWS_ACCESS_KEY_ID': creds.access_key, + 'AWS_SECRET_ACCESS_KEY': creds.secret_key, + 'AWS_SESSION_TOKEN': creds.token + } + ) + + logger.info(f"Running command : \n {' '.join(cmd)}") + process = subprocess.run( ' '.join(cmd), text=True, diff --git a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py b/backend/dataall/cdkproxy/stacks/cdk_pipeline.py index 9e979fc43..9cba9b216 100644 --- a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py +++ b/backend/dataall/cdkproxy/stacks/cdk_pipeline.py @@ -71,14 +71,17 @@ def __init__(self, stack): template = self.pipeline.template if (self.pipeline.template != self.pipeline.label) else "" - self.initialize_repo(template) + self.venv_name = self.initialize_repo(template) if not len(template): self.write_ddk_app_multienvironment(output_file="app.py") self.write_ddk_json_multienvironment(output_file="ddk.json") self.git_push_repo() + def initialize_repo(self, template): + venv_name = ".venv" + template_cmds = [ f"git clone {template} {self.pipeline.repo}", f"cd {self.pipeline.repo}", @@ -90,8 +93,8 @@ def initialize_repo(self, template): ] repo_cmds = [ "git init --initial-branch main", - "virtualenv .venv && source .venv/bin/activate", - "pip install -q -r requirements.txt", + f"python3 -m venv {venv_name} && source {venv_name}/bin/activate", + "pip install -r requirements.txt", f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" ] @@ -110,6 +113,8 @@ def initialize_repo(self, template): if process.returncode == 0: logger.info("Successfully Initialized New CDK/DDK App") + return venv_name + def write_ddk_json_multienvironment(self, output_file): json_envs = "" diff --git a/backend/dataall/db/api/pipeline.py b/backend/dataall/db/api/pipeline.py index 5994bc252..1ea0b5bf8 100644 --- a/backend/dataall/db/api/pipeline.py +++ b/backend/dataall/db/api/pipeline.py @@ -1,6 +1,6 @@ import logging -from sqlalchemy import or_ +from sqlalchemy import or_, and_ from sqlalchemy.orm import Query from . import ( @@ -295,6 +295,37 @@ def delete_pipeline_environments(session, uri) -> bool: session.commit() return True + @staticmethod + def delete_pipeline_environment( + session, username, groups, dataPipelineUri, environmentUri, stage, check_perm=None + ) -> bool: + deletedItem = session.query(models.DataPipelineEnvironment).filter( + and_( + models.DataPipelineEnvironment.pipelineUri == dataPipelineUri, + models.DataPipelineEnvironment.environmentUri == environmentUri, + models.DataPipelineEnvironment.stage == stage + ) + ).delete() + session.commit() + return True + + @staticmethod + def update_pipeline_environment( + session, username, groups, data=None, check_perm=None + ) -> models.DataPipelineEnvironment: + pipeline_env = session.query(models.DataPipelineEnvironment).filter( + and_( + models.DataPipelineEnvironment.pipelineUri == data['pipelineUri'], + models.DataPipelineEnvironment.environmentUri == data['environmentUri'], + models.DataPipelineEnvironment.stage == data['stage'] + ) + ) + if data: + if isinstance(data, dict): + for k in data.keys(): + setattr(pipeline_env, k, data.get(k)) + return pipeline_env + @staticmethod def paginated_pipeline_environments( session, username, groups, uri, data=None, check_perm=None diff --git a/backend/docker/dev/Dockerfile b/backend/docker/dev/Dockerfile index 080e39f0b..cf819b399 100644 --- a/backend/docker/dev/Dockerfile +++ b/backend/docker/dev/Dockerfile @@ -14,6 +14,8 @@ RUN amazon-linux-extras install $PYTHON_VERSION RUN yum -y install python38-devel RUN yum -y install git +RUN /bin/bash -c "ln -s /usr/bin/${PYTHON_VERSION} /usr/bin/python3" + RUN useradd -m app WORKDIR /build diff --git a/backend/requirements.txt b/backend/requirements.txt index 27fd9a184..716df84bc 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -12,5 +12,4 @@ pyjwt==2.4.0 PyYAML==6.0 requests==2.27.1 requests_aws4auth==1.1.1 -sqlalchemy==1.3.16 -virtualenv==20.16.6 \ No newline at end of file +sqlalchemy==1.3.16 \ No newline at end of file diff --git a/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js b/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js new file mode 100644 index 000000000..c0e397c27 --- /dev/null +++ b/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js @@ -0,0 +1,24 @@ +import { gql } from 'apollo-boost'; + +const deleteDataPipelineEnvironment = ({ dataPipelineUri, environmentUri, stage }) => ({ + variables: { + dataPipelineUri, + environmentUri, + stage + }, + mutation: gql` + mutation deleteDataPipelineEnvironment( + $dataPipelineUri: String! + $environmentUri: String! + $stage: String! + ) { + deleteDataPipelineEnvironment( + dataPipelineUri: $dataPipelineUri + environmentUri: $environmentUri + stage: $stage + ) + } + ` +}); + +export default deleteDataPipelineEnvironment; diff --git a/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js b/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js new file mode 100644 index 000000000..5063f2d8a --- /dev/null +++ b/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js @@ -0,0 +1,24 @@ +import { gql } from 'apollo-boost'; + +const updateDataPipelineEnvironment = ({ input }) => ({ + variables: { + input + }, + mutation: gql` + mutation updateDataPipelineEnvironment($input: UpdateDataPipelineEnvironmentInput) { + updateDataPipelineEnvironment(input: $input) { + envPipelineUri + environmentUri + environmentLabel + pipelineUri + pipelineLabel + stage + region + AwsAccountId + samlGroupName + } + } + ` +}); + +export default updateDataPipelineEnvironment; diff --git a/frontend/src/views/Pipelines/PipelineEditForm.js b/frontend/src/views/Pipelines/PipelineEditForm.js index 92fa8cd21..a6d1c23a1 100644 --- a/frontend/src/views/Pipelines/PipelineEditForm.js +++ b/frontend/src/views/Pipelines/PipelineEditForm.js @@ -29,6 +29,10 @@ import { useDispatch } from '../../store'; import ChipInput from '../../components/TagsInput'; import getDataPipeline from '../../api/DataPipeline/getDataPipeline'; import updateDataPipeline from '../../api/DataPipeline/updateDataPipeline'; +import listEnvironments from '../../api/Environment/listEnvironments'; +import PipelineEnvironmentEditForm from "./PipelineEnvironmentEditForm"; +import * as Defaults from '../../components/defaults'; + const PipelineEditForm = (props) => { const dispatch = useDispatch(); @@ -37,11 +41,19 @@ const PipelineEditForm = (props) => { const { enqueueSnackbar } = useSnackbar(); const client = useClient(); const { settings } = useSettings(); - const [loading, setLoading] = useState(true); + const [loadingPipeline, setLoadingPipeline] = useState(true); + const [loadingEnvs, setLoadingEnvs] = useState(true); const [pipeline, setPipeline] = useState(null); + const [environmentOptions, setEnvironmentOptions] = useState([]); + const [triggerEnvSubmit, setTriggerEnvSubmit] = useState(false); + const [countEnvironmentsValid, setCountEnvironmentsValid] = useState(false); + + const handleCountEnvironmentValid = state => { + setCountEnvironmentsValid(state); + }; const fetchItem = useCallback(async () => { - setLoading(true); + setLoadingPipeline(true); const response = await client.query(getDataPipeline(params.uri)); if (!response.errors && response.data.getDataPipeline !== null) { setPipeline(response.data.getDataPipeline); @@ -51,7 +63,7 @@ const PipelineEditForm = (props) => { : 'Pipeline not found'; dispatch({ type: SET_ERROR, error }); } - setLoading(false); + setLoadingPipeline(false); }, [client, dispatch, params.uri]); useEffect(() => { @@ -60,43 +72,77 @@ const PipelineEditForm = (props) => { } }, [client, dispatch, fetchItem]); + const fetchEnvironments = useCallback(async () => { + setLoadingEnvs(true); + const response = await client.query( + listEnvironments({ filter: Defaults.SelectListFilter }) + ); + if (!response.errors) { + setEnvironmentOptions( + response.data.listEnvironments.nodes.map((e) => ({ + ...e, + value: e.environmentUri, + label: e.label + })) + ); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setLoadingEnvs(false); + }, [client, dispatch]); + + useEffect(() => { + if (client) { + fetchEnvironments().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [client, dispatch, fetchEnvironments]); + async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const response = await client.mutate( - updateDataPipeline({ - DataPipelineUri: pipeline.DataPipelineUri, - input: { - description: values.description, - label: values.label, - tags: values.tags + if (!countEnvironmentsValid){ + dispatch({ type: SET_ERROR, error: "At least one deployment environment is required" }) + } else{ + try { + const response = await client.mutate( + updateDataPipeline({ + DataPipelineUri: pipeline.DataPipelineUri, + input: { + description: values.description, + label: values.label, + tags: values.tags + } + }) + ); + if (!response.errors) { + setStatus({ success: true }); + setTriggerEnvSubmit(true); + setSubmitting(false); + enqueueSnackbar('Pipeline updated', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + navigate( + `/console/pipelines/${response.data.updateDataPipeline.DataPipelineUri}` + ); + } else { + setTriggerEnvSubmit(false); + dispatch({ type: SET_ERROR, error: response.errors[0].message }); } - }) - ); - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('Pipeline updated', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/pipelines/${response.data.updateDataPipeline.DataPipelineUri}` - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } catch (err) { + setStatus({ success: false }); + setTriggerEnvSubmit(false); + setErrors({ submit: err.message }); + setSubmitting(false); + dispatch({ type: SET_ERROR, error: err.message }); + } } - } catch (err) { - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); } - } - if (loading || (!pipeline && pipeline.environment)) { + if ((loadingPipeline || loadingEnvs) || (!pipeline && pipeline.environment)) { return ; } @@ -319,6 +365,22 @@ const PipelineEditForm = (props) => { /> + + + + + + {errors.submit && ( + + {errors.submit} + + )} { type="submit" variant="contained" > - Save + Update Pipeline - + )} diff --git a/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js b/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js new file mode 100644 index 000000000..4e92900ab --- /dev/null +++ b/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js @@ -0,0 +1,456 @@ +import React, { useEffect, useState } from 'react'; +import { useSnackbar } from 'notistack'; +import { + Box, + Button, + Card, + CardContent, + CardHeader, + Divider, + Grid, + IconButton, + MenuItem, + Table, + TableBody, + TableCell, + TableHead, + TableRow, + TextField +} from '@mui/material'; +import { DeleteOutlined } from '@mui/icons-material'; +import PropTypes from 'prop-types'; +import useClient from '../../hooks/useClient'; +import { SET_ERROR } from '../../store/errorReducer'; +import { useDispatch } from '../../store'; +import createDataPipelineEnvironment from '../../api/DataPipeline/createDataPipelineEnvironment'; +import deleteDataPipelineEnvironment from '../../api/DataPipeline/deleteDataPipelineEnvironment'; +import updateDataPipelineEnvironment from '../../api/DataPipeline/updateDataPipelineEnvironment'; +import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; +import * as Defaults from '../../components/defaults'; + +const PipelineEnvironmentEditForm = (props) => { + const { environmentOptions, triggerEnvSubmit, pipelineUri, pipeline, handleCountEnvironmentValid } = props; + const dispatch = useDispatch(); + const { enqueueSnackbar } = useSnackbar(); + const client = useClient(); + const [kvEnvs, setKeyValueEnvs] = useState([]); + const [envsToRemove, setEnvsToRemove] = useState([]); + const [environments, setEnvironments] = useState([]); + const [mapGroups, setMapGroups] = useState(new Map()) + const stageOps =[{value:"dev", label:"dev"},{value:"test", label:"test"},{value:"val", label:"val"},{value:"prod", label:"prod"},{value:"other", label:"other"}]; + const [environmentOps, setEnvironmentOps] = useState( + environmentOptions && environmentOptions.length > 0 ? environmentOptions : [{ environmentUri: 'someUri', label: 'some' },{ environmentUri: 'someUri', label: 'some2' }] + ); + + useEffect(() => { + if (client && pipeline) { + console.log("useeffect") + console.log(pipeline) + const environmentsSorted = pipeline.developmentEnvironments.nodes.sort((a, b) => { + return a.order - b.order; + }); + if (environmentsSorted) { + environmentsSorted.map((e) => (handleExistingEnvRow(e))) + } + } + }, [client, pipeline]); + + const fetchGroups = async (environment) => { + try { + const response = await client.query( + listEnvironmentGroups({ + filter: Defaults.SelectListFilter, + environmentUri: environment.environmentUri + }) + ); + + if (!response.errors) { + setMapGroups(new Map(mapGroups.set(environment.environmentUri, response.data.listEnvironmentGroups.nodes)) )//Array of groups (Objects) + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (e) { + dispatch({ type: SET_ERROR, error: e.message }); + } + }; + + const handleExistingEnvRow = (e) => { + if (kvEnvs.length <= 40) { + const item = { + stage: e.stage, + env: e.environmentLabel, + environmentLabel: e.environmentLabel, + environmentUri: e.environmentUri, + samlGroupName: e.samlGroupName, + team: e.samlGroupName, + AwsAccountId: e.AwsAccountId + }; + setEnvironments((prevState) => [...prevState, item]); + } else { + dispatch({ + type: SET_ERROR, + error: 'You cannot add more than 40 development stages' + }); + } + }; + + const handleAddEnvRow = () => { + if (kvEnvs.length <= 40) { + const item = { + stage: '', + env: '', + team: '' + }; + setKeyValueEnvs((prevState) => [...prevState, item]); + } else { + dispatch({ + type: SET_ERROR, + error: 'You cannot add more than 40 development stages' + }); + } + }; + + const handleChange = (idx, field) => (e) => { + const { value } = e.target; + + setKeyValueEnvs((prevstate) => { + const rows = [...prevstate]; + if (field === 'stage') { + rows[idx].stage = value; + } else if (field === 'env'){ + rows[idx].environmentLabel = value.label; + rows[idx].environmentUri = value.environmentUri; + } else{ + rows[idx].samlGroupName = value; + } + return rows; + }); + }; + + const handleRemoveEnvRow = (idx) => { + setKeyValueEnvs((prevstate) => { + const rows = [...prevstate]; + rows.splice(idx, 1); + return rows; + }); + }; + + const handleRemoveExistingEnvRow = (idx) => { + setEnvironments((prevstate) => { + const rows = [...prevstate]; + setEnvsToRemove((prevState) => [...prevState, rows[idx]]); + rows.splice(idx, 1); + return rows; + }); + }; + + async function submit(element, index) { + try { + const response = await client.mutate( + createDataPipelineEnvironment({ + input: { + stage: element.stage, + order: index + environments.length + 1, + pipelineUri: pipelineUri, + environmentLabel: element.environmentLabel, + environmentUri: element.environmentUri, + samlGroupName: element.samlGroupName + + } + }) + ); + if (!response.errors) { + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + dispatch({ type: SET_ERROR, error: err.message }); + } + } + + async function update(element, index) { + try { + const response = await client.mutate( + updateDataPipelineEnvironment({ + input: { + stage: element.stage, + order: index + 1, + pipelineUri: pipelineUri, + environmentLabel: element.environmentLabel, + environmentUri: element.environmentUri, + samlGroupName: element.samlGroupName + } + }) + ); + if (!response.errors) { + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + dispatch({ type: SET_ERROR, error: err.message }); + } + } + + async function deleteEnv(element, index) { + try { + const response = await client.mutate( + deleteDataPipelineEnvironment({ + dataPipelineUri: pipelineUri, + environmentUri: element.environmentUri, + stage: element.stage + }) + ); + if (!response.errors) { + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + dispatch({ type: SET_ERROR, error: err.message }); + } + } + + useEffect(() => { + if (client && triggerEnvSubmit && pipelineUri && envsToRemove.length > 0) { + envsToRemove.forEach((element, index) => deleteEnv(element, index)) + } + if (client && triggerEnvSubmit && pipelineUri && environments.length > 0) { + environments.forEach((element, index) => update(element, index)) + } + if (client && triggerEnvSubmit && pipelineUri && kvEnvs.length > 0) { + kvEnvs.forEach((element, index) => submit(element, index)) + } + if (client && environmentOptions.length > 0) { + environmentOptions.forEach((element) => fetchGroups(element)) + } + }, [client, dispatch, triggerEnvSubmit, pipelineUri, environmentOptions]); + + useEffect(() => { + if ((kvEnvs.length + environments.length) > 0){ + handleCountEnvironmentValid(true) + }else{ + handleCountEnvironmentValid(false) + } + }, [kvEnvs.length, environments.length]); + + return ( + <> + + + + + + + + + + + + + + + + + {environments && environments.length > 0 && ( + + + Order + Development Stage + Environment + Team + AWS Account + + + )} + + {environments.map((item, idx) => ( + <> + + + + + + + + + + + + + + + + + + + + ))} + +
+ { + handleRemoveExistingEnvRow(idx); + }} + > + + +
+
+
+
+
+ + + + + + + + + + + + + + {kvEnvs && kvEnvs.length > 0 && ( + + + Order + Development Stage + Environment + Team + + + )} + + {kvEnvs.map((item, idx) => ( + <> + + + + + + + {stageOps.map((stage) => ( + + {stage.label} + + ))} + + + + + {environmentOps.map((environment) => ( + + {environment.label} + + ))} + + + + + {mapGroups.get(kvEnvs[idx].environmentUri) && (mapGroups.get(kvEnvs[idx].environmentUri).map((g) => ( + + {g.groupUri} + + )))} + + + + + + ))} + +
+ { + handleRemoveEnvRow(idx); + }} + > + + +
+ + + +
+
+
+
+
+
+ + ); +}; +PipelineEnvironmentEditForm.propTypes = { + environmentOptions: PropTypes.array.isRequired, + triggerEnvSubmit: PropTypes.bool.isRequired, + pipelineUri: PropTypes.string.isRequired, + pipeline: PropTypes.object.isRequired, + handleCountEnvironmentValid: PropTypes.func.isRequired +}; +export default PipelineEnvironmentEditForm; From 5eed8a6f96297fe513a4ab2dcfdcc598c51afc62 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Wed, 9 Nov 2022 16:48:39 -0500 Subject: [PATCH 4/9] python3 symlink --- backend/docker/prod/ecs/Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/backend/docker/prod/ecs/Dockerfile b/backend/docker/prod/ecs/Dockerfile index 57ce83888..b272902af 100644 --- a/backend/docker/prod/ecs/Dockerfile +++ b/backend/docker/prod/ecs/Dockerfile @@ -17,6 +17,8 @@ RUN amazon-linux-extras install $PYTHON_VERSION RUN yum -y install python38-devel RUN yum -y install git +RUN /bin/bash -c "ln -s /usr/bin/${PYTHON_VERSION} /usr/bin/python3" + RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" RUN unzip awscliv2.zip RUN ./aws/install From eb0831e7b95592e6ee3516014e2963f4b3c7fc42 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Fri, 11 Nov 2022 17:19:11 -0500 Subject: [PATCH 5/9] Add Template DevStrategy and Work on Update Pipeline DevEnvs --- .../api/Objects/DataPipeline/input_types.py | 4 +- .../api/Objects/DataPipeline/resolvers.py | 14 +- backend/dataall/cdkproxy/cdk_cli_wrapper.py | 21 ++- backend/dataall/cdkproxy/stacks/__init__.py | 1 + .../dataall/cdkproxy/stacks/cdk_pipeline.py | 3 +- backend/dataall/cdkproxy/stacks/pipeline.py | 58 +++++-- .../cdkproxy/stacks/pipeline_template.py | 160 ++++++++++++++++++ backend/dataall/db/api/pipeline.py | 8 +- .../src/views/Pipelines/PipelineCreateForm.js | 26 +-- 9 files changed, 254 insertions(+), 41 deletions(-) create mode 100644 backend/dataall/cdkproxy/stacks/pipeline_template.py diff --git a/backend/dataall/api/Objects/DataPipeline/input_types.py b/backend/dataall/api/Objects/DataPipeline/input_types.py index c7f825048..9909f0fee 100644 --- a/backend/dataall/api/Objects/DataPipeline/input_types.py +++ b/backend/dataall/api/Objects/DataPipeline/input_types.py @@ -18,7 +18,7 @@ arguments=[ gql.Argument(name='stage', type=gql.NonNullableType(gql.String)), gql.Argument(name='order', type=gql.NonNullableType(gql.Integer)), - gql.Argument(name='pipelineUri', type=gql.ArrayType(gql.String)), + gql.Argument(name='pipelineUri', type=gql.NonNullableType(gql.String)), gql.Argument(name='environmentLabel', type=gql.NonNullableType(gql.String)), gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), gql.Argument(name='samlGroupName', type=gql.NonNullableType(gql.String)), @@ -30,7 +30,7 @@ arguments=[ gql.Argument(name='stage', type=gql.NonNullableType(gql.String)), gql.Argument(name='order', type=gql.NonNullableType(gql.Integer)), - gql.Argument(name='pipelineUri', type=gql.ArrayType(gql.String)), + gql.Argument(name='pipelineUri', type=gql.NonNullableType(gql.String)), gql.Argument(name='environmentLabel', type=gql.NonNullableType(gql.String)), gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), gql.Argument(name='samlGroupName', type=gql.NonNullableType(gql.String)), diff --git a/backend/dataall/api/Objects/DataPipeline/resolvers.py b/backend/dataall/api/Objects/DataPipeline/resolvers.py index 7e2f70779..6d6579323 100644 --- a/backend/dataall/api/Objects/DataPipeline/resolvers.py +++ b/backend/dataall/api/Objects/DataPipeline/resolvers.py @@ -25,7 +25,6 @@ def create_pipeline(context: Context, source, input=None): check_perm=True, ) if input['devStrategy'] == 'cdk-trunk': - Stack.create_stack( session=session, environment_uri=pipeline.environmentUri, @@ -34,6 +33,15 @@ def create_pipeline(context: Context, source, input=None): target_label=pipeline.label, payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, ) + elif input['devStrategy'] == 'template': + Stack.create_stack( + session=session, + environment_uri=pipeline.environmentUri, + target_type='template', + target_uri=f"{pipeline.DataPipelineUri}", + target_label=pipeline.label, + payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, + ) else: Stack.create_stack( session=session, @@ -71,6 +79,9 @@ def update_pipeline(context: Context, source, DataPipelineUri: str, input: dict data=input, check_perm=True, ) + if (pipeline.template == ""): + stack_helper.deploy_stack(context, pipeline.DataPipelineUri) + return pipeline @@ -438,6 +449,7 @@ def update_pipeline_environment(context: Context, source, input=None): username=context.username, groups=context.groups, data=input, + uri=input['pipelineUri'], check_perm=True, ) return pipeline_env \ No newline at end of file diff --git a/backend/dataall/cdkproxy/cdk_cli_wrapper.py b/backend/dataall/cdkproxy/cdk_cli_wrapper.py index 4fd3eeb20..4f1ba2c16 100644 --- a/backend/dataall/cdkproxy/cdk_cli_wrapper.py +++ b/backend/dataall/cdkproxy/cdk_cli_wrapper.py @@ -18,6 +18,7 @@ from ..db.api import Pipeline, Environment, Stack from ..utils.alarm_service import AlarmService from dataall.cdkproxy.stacks.cdk_pipeline import CDKPipelineStack +from dataall.cdkproxy.stacks.pipeline_template import PipelineTemplateStack logger = logging.getLogger('cdksass') @@ -79,13 +80,24 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s logger.warning(f"stackuri = {stack.stackUri}, stackId = {stack.stackid}") stack.status = 'PENDING' session.commit() - + if stack.stack == "cdkpipeline": cdkpipeline = CDKPipelineStack(stack) venv_name = cdkpipeline.venv_name pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) path = f"./stacks/{pipeline.repo}/" + elif stack.stack == "template": + pipeline_template = PipelineTemplateStack(stack) + venv_name = pipeline_template.venv_name + pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) + path = f"./stacks/{pipeline.repo}/" + cwd = os.path.join(os.path.dirname(os.path.abspath(__file__)), path) if path else os.path.dirname(os.path.abspath(__file__)) + + if stack.stack == "template": + resp = subprocess.run(['cdk','ls'], cwd=cwd, stdout=subprocess.PIPE) + stack.name = resp.stdout.decode('utf-8').split('\n')[0] + app_path = app_path or './app.py' logger.info(f'app_path: {app_path}') @@ -138,9 +150,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s } ) - cwd = os.path.join(os.path.dirname(os.path.abspath(__file__)), path) if path else os.path.dirname(os.path.abspath(__file__)) - - if stack.stack == "cdkpipeline": + if stack.stack == "template": cmd.insert(0, f"source {venv_name}/bin/activate;") aws = SessionHelper.remote_session(stack.accountid) creds = aws.get_credentials() @@ -166,9 +176,10 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s env=env, cwd=cwd, ) - if stack.stack == "cdkpipeline": CDKPipelineStack.clean_up_repo(path=f"./{pipeline.repo}") + if stack.stack == "template": + PipelineTemplateStack.clean_up_repo(path=f"./{pipeline.repo}") if process.returncode == 0: meta = describe_stack(stack) diff --git a/backend/dataall/cdkproxy/stacks/__init__.py b/backend/dataall/cdkproxy/stacks/__init__.py index 4644eb0f7..202af2c50 100644 --- a/backend/dataall/cdkproxy/stacks/__init__.py +++ b/backend/dataall/cdkproxy/stacks/__init__.py @@ -1,6 +1,7 @@ from .dataset import Dataset from .environment import EnvironmentSetup from .cdk_pipeline import CDKPipelineStack +from .pipeline_template import PipelineTemplateStack from .pipeline import PipelineStack from .manager import stack, instanciate_stack, StackManager from .notebook import SagemakerNotebook diff --git a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py b/backend/dataall/cdkproxy/stacks/cdk_pipeline.py index 9cba9b216..6b2a838fd 100644 --- a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py +++ b/backend/dataall/cdkproxy/stacks/cdk_pipeline.py @@ -3,9 +3,7 @@ import sys import subprocess -# from .manager import stack from ... import db -# from ...db import models from ...db.api import Environment, Pipeline # from ...utils.cdk_nag_utils import CDKNagUtil # from ...utils.runtime_stacks_tagging import TagsUtil @@ -219,6 +217,7 @@ def git_push_repo(self): def clean_up_repo(path): if path: precmd = [ + 'deactivate;', 'rm', '-rf', f"{path}" diff --git a/backend/dataall/cdkproxy/stacks/pipeline.py b/backend/dataall/cdkproxy/stacks/pipeline.py index 4653499e3..439736084 100644 --- a/backend/dataall/cdkproxy/stacks/pipeline.py +++ b/backend/dataall/cdkproxy/stacks/pipeline.py @@ -1,6 +1,7 @@ import logging import os import shutil +import subprocess from typing import List @@ -179,27 +180,52 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): PipelineStack.write_ddk_json_multienvironment(path=code_dir_path, output_file="dataall_ddk.json", pipeline_environment=pipeline_environment, development_environments=development_environments) - PipelineStack.cleanup_zip_directory(code_dir_path) + try: + repository = codecommit.from_repository_name( + self, + id="PipelineRepository", + repository_name=pipeline.repo + ) + logger.info(f"Pipeline Repo {pipeline.repo} Exists...Handling Update") + update_cmds = [ + f'REPO_NAME={pipeline.repo}', + 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', + 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://dataall_ddk.json --file-path dataall_ddk.json --parent-commit-id ${COMMITID}', + # 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', + # 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://deploy_buildspec.yaml --file-path deploy_buildspec.yaml --parent-commit-id ${COMMITID}', + ] + process = subprocess.run( + "; ".join(update_cmds), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=code_dir_path + ) + + except: + logger.info(f"Pipeline Repo {pipeline.repo} Does Not Exists... Creating Repository") - PipelineStack.zip_directory(code_dir_path) + PipelineStack.cleanup_zip_directory(code_dir_path) - code_asset = Asset( - scope=self, id=f"{pipeline.name}-asset", path=f"{code_dir_path}/code.zip" - ) + PipelineStack.zip_directory(code_dir_path) - code = codecommit.CfnRepository.CodeProperty( - s3=codecommit.CfnRepository.S3Property( - bucket=code_asset.s3_bucket_name, - key=code_asset.s3_object_key, + code_asset = Asset( + scope=self, id=f"{pipeline.name}-asset", path=f"{code_dir_path}/code.zip" ) - ) - repository = codecommit.CfnRepository( - scope=self, - code=code, - id="CodecommitRepository", - repository_name=pipeline.repo, - ) + code = codecommit.CfnRepository.CodeProperty( + s3=codecommit.CfnRepository.S3Property( + bucket=code_asset.s3_bucket_name, + key=code_asset.s3_object_key, + ) + ) + + repository = codecommit.CfnRepository( + scope=self, + code=code, + id="CodecommitRepository", + repository_name=pipeline.repo, + ) if pipeline.devStrategy == "trunk": codepipeline_pipeline = codepipeline.Pipeline( diff --git a/backend/dataall/cdkproxy/stacks/pipeline_template.py b/backend/dataall/cdkproxy/stacks/pipeline_template.py new file mode 100644 index 000000000..20fc3d146 --- /dev/null +++ b/backend/dataall/cdkproxy/stacks/pipeline_template.py @@ -0,0 +1,160 @@ +import logging +import os +import sys +import subprocess + +# from .manager import stack +from ... import db +# from ...db import models +from ...db.api import Environment, Pipeline +# from ...utils.cdk_nag_utils import CDKNagUtil +# from ...utils.runtime_stacks_tagging import TagsUtil +from ...aws.handlers.sts import SessionHelper + +logger = logging.getLogger(__name__) + +# @stack(stack='cdkrepo') +class PipelineTemplateStack: + """ + Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. + + The pipeline is based on CodePipeline pipelines + + - Defaults for source/synth - CodeCommit & cdk synth + - blueprint with DDK application code added in the CodeCommit repository + - ability to define development stages: dev, test, prod + - ability to select gitflow or trunk-based as development strategy + - Ability to connect to private artifactory to pull artifacts from at synth + - Security best practices - ensures pipeline buckets block non-SSL, and are KMS-encrypted with rotated keys + - data.all metadata as environment variables accesible at synth + + """ + def get_engine(self): + envname = os.environ.get("envname", "local") + engine = db.get_engine(envname=envname) + return engine + + module_name = __file__ + + def __init__(self, stack): + engine = self.get_engine() + with engine.scoped_session() as session: + + self.pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) + self.pipeline_environment = Environment.get_environment_by_uri(session, self.pipeline.environmentUri) + # Development environments + self.development_environments = Pipeline.query_pipeline_environments(session, stack.targetUri) + + aws = SessionHelper.remote_session(self.pipeline_environment.AwsAccountId) + env_creds = aws.get_credentials() + + python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') + + self.env = { + 'AWS_REGION': self.pipeline_environment.region, + 'AWS_DEFAULT_REGION': self.pipeline_environment.region, + 'CURRENT_AWS_ACCOUNT': self.pipeline_environment.AwsAccountId, + 'PYTHONPATH': python_path, + 'PATH': python_path, + 'envname': os.environ.get('envname', 'local'), + } + if env_creds: + self.env.update( + { + 'AWS_ACCESS_KEY_ID': env_creds.access_key, + 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, + 'AWS_SESSION_TOKEN': env_creds.token + } + ) + + self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) + + template = self.pipeline.template + + self.venv_name = self.initialize_repo(template) + self.git_push_repo() + + + def initialize_repo(self, template): + venv_name = ".venv" + cmd_init = [ + "pip install aws-ddk", + f"git clone {template} {self.pipeline.repo}", + f"cd {self.pipeline.repo}", + "rm -rf .git", + "git init --initial-branch main", + f"python3 -m venv {venv_name} && source {venv_name}/bin/activate", + "pip install -r requirements.txt", + f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" + ] + + logger.info(f"Running Commands: {'; '.join(cmd_init)}") + + process = subprocess.run( + '; '.join(cmd_init), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=self.code_dir_path, + env=self.env + ) + if process.returncode == 0: + logger.info("Successfully Initialized New CDK/DDK App") + + return venv_name + + def git_push_repo(self): + git_cmds = [ + 'git config user.email "codebuild@example.com"', + 'git config user.name "CodeBuild"', + 'git config --local credential.helper "!aws codecommit credential-helper $@"', + "git config --local credential.UseHttpPath true", + "git add .", + "git commit -a -m 'Initial Commit' ", + "git push -u origin main" + ] + + logger.info(f"Running Commands: {'; '.join(git_cmds)}") + + process = subprocess.run( + '; '.join(git_cmds), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=os.path.join(self.code_dir_path, self.pipeline.repo), + env=self.env + ) + if process.returncode == 0: + logger.info("Successfully Pushed DDK App Code") + + @staticmethod + def clean_up_repo(path): + if path: + precmd = [ + 'deactivate;', + 'rm', + '-rf', + f"{path}" + ] + + cwd = os.path.dirname(os.path.abspath(__file__)) + logger.info(f"Running command : \n {' '.join(precmd)}") + + process = subprocess.run( + ' '.join(precmd), + text=True, + shell=True, # nosec + encoding='utf-8', + capture_output=True, + cwd=cwd + ) + + if process.returncode == 0: + print(f"Successfully cleaned cloned repo: {path}. {str(process.stdout)}") + else: + logger.error( + f'Failed clean cloned repo: {path} due to {str(process.stderr)}' + ) + else: + logger.info(f"Info:Path {path} not found") + return \ No newline at end of file diff --git a/backend/dataall/db/api/pipeline.py b/backend/dataall/db/api/pipeline.py index 1ea0b5bf8..0216280d1 100644 --- a/backend/dataall/db/api/pipeline.py +++ b/backend/dataall/db/api/pipeline.py @@ -63,7 +63,7 @@ def create_pipeline( region=environment.region, repo=slugify(data['label']), devStrategy=data['devStrategy'], - template=data['template'] if data['template'] != '' else data['label'], + template=data['template'] if data['devStrategy'] == 'template' else "", ) session.add(pipeline) @@ -310,8 +310,10 @@ def delete_pipeline_environment( return True @staticmethod + @has_tenant_perm(permissions.MANAGE_PIPELINES) + @has_resource_perm(permissions.UPDATE_PIPELINE) def update_pipeline_environment( - session, username, groups, data=None, check_perm=None + session, username, groups, uri, data=None, check_perm=None ) -> models.DataPipelineEnvironment: pipeline_env = session.query(models.DataPipelineEnvironment).filter( and_( @@ -319,7 +321,7 @@ def update_pipeline_environment( models.DataPipelineEnvironment.environmentUri == data['environmentUri'], models.DataPipelineEnvironment.stage == data['stage'] ) - ) + ).first() if data: if isinstance(data, dict): for k in data.keys(): diff --git a/frontend/src/views/Pipelines/PipelineCreateForm.js b/frontend/src/views/Pipelines/PipelineCreateForm.js index 7809bb33b..1f3ffe456 100644 --- a/frontend/src/views/Pipelines/PipelineCreateForm.js +++ b/frontend/src/views/Pipelines/PipelineCreateForm.js @@ -44,7 +44,7 @@ const PipelineCrateForm = (props) => { const [loading, setLoading] = useState(true); const [groupOptions, setGroupOptions] = useState([]); const [environmentOptions, setEnvironmentOptions] = useState([]); - const devOptions =[{value:"cdk-trunk", label:"CDK Pipelines - Trunk-based"},{value:"trunk", label:"CodePipeline - Trunk-based"},{value:"gitflow", label:"CodePipeline - Gitflow"}];/*DBT Pipelines*/ + const devOptions =[{value:"cdk-trunk", label:"CDK Pipelines - Trunk-based"},{value:"trunk", label:"CodePipeline - Trunk-based"},{value:"gitflow", label:"CodePipeline - Gitflow"},{value:"template", label:"GitHub Template"}];/*DBT Pipelines*/ const [triggerEnvSubmit, setTriggerEnvSubmit] = useState(false); const [countEnvironmentsValid, setCountEnvironmentsValid] = useState(false); const [pipelineUri, setPipelineUri] = useState(''); @@ -435,17 +435,19 @@ const PipelineCrateForm = (props) => { - + {values.devStrategy === "template" && ( + + )} From cef4f612611b1e72b7ab5868e7d61963b44824df Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Wed, 23 Nov 2022 09:11:33 -0500 Subject: [PATCH 6/9] Handle Updates to Pipeline Dev Envs --- .../cdk_data_pipeline_blueprint/.gitignore | 169 ---------- .../cdk_data_pipeline_blueprint/README.md | 1 - .../cdk_data_pipeline_blueprint/app.py | 37 -- .../cdk_data_pipeline_blueprint/cdk.json | 30 -- .../cdk_data_pipeline_blueprint/ddk.json | 22 -- .../ddk_app/__init__.py | 0 .../ddk_app/ddk_app_stack.py | 12 - .../requirements-dev.txt | 1 - .../requirements.txt | 3 - .../cdk_data_pipeline_blueprint/setup.py | 30 -- .../cdk_data_pipeline_blueprint/source.bat | 13 - .../cdk_data_pipeline_blueprint/test.sh | 7 - .../api/Objects/DataPipeline/resolvers.py | 5 + backend/dataall/cdkproxy/cdk_cli_wrapper.py | 28 +- backend/dataall/cdkproxy/stacks/__init__.py | 2 +- .../dataall/cdkproxy/stacks/cdk_pipeline.py | 190 +++++++---- backend/dataall/cdkproxy/stacks/pipeline.py | 317 ++++++++++-------- .../cdkproxy/stacks/pipeline_template.py | 312 ++++++++--------- backend/dataall/db/api/pipeline.py | 3 + 19 files changed, 487 insertions(+), 695 deletions(-) delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/.gitignore delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/README.md delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/app.py delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/cdk.json delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/ddk.json delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/ddk_app/__init__.py delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/ddk_app/ddk_app_stack.py delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/requirements-dev.txt delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/requirements.txt delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/setup.py delete mode 100644 backend/blueprints/cdk_data_pipeline_blueprint/source.bat delete mode 100755 backend/blueprints/cdk_data_pipeline_blueprint/test.sh diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/.gitignore b/backend/blueprints/cdk_data_pipeline_blueprint/.gitignore deleted file mode 100644 index a6527d374..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/.gitignore +++ /dev/null @@ -1,169 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -pip-wheel-metadata/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -.python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - - -# VSCode extension -.vscode/ -/.favorites.json -*.code-workspace - -# TypeScript incremental build states -*.tsbuildinfo - -# Local state files & OS specifics -.DS_Store -node_modules/ -lerna-debug.log -dist/ -pack/ -.BUILD_COMPLETED -.local-npm/ -.tools/ -coverage/ -.nyc_output -.LAST_BUILD -*.sw[a-z] -*~ -.idea - -# We don't want tsconfig at the root -/tsconfig.json - -# Backed up json files -*.json-e - -# CDK Context & Staging files -cdk.context.json -.cdk.staging/ -cdk.out/ -.out - -# DDK Context & Staging files -.ddk.out/ \ No newline at end of file diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/README.md b/backend/blueprints/cdk_data_pipeline_blueprint/README.md deleted file mode 100644 index 7e39b3aac..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/README.md +++ /dev/null @@ -1 +0,0 @@ -# Welcome to your AWS DDK project! diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/app.py b/backend/blueprints/cdk_data_pipeline_blueprint/app.py deleted file mode 100644 index 0bd2a25a8..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/app.py +++ /dev/null @@ -1,37 +0,0 @@ - -# !/usr/bin/env python3 - -import aws_cdk as cdk -from aws_ddk_core.cicd import CICDPipelineStack -from ddk_app.ddk_app_stack import DDKApplicationStack -from aws_ddk_core.config import Config - -app = cdk.App() - -class ApplicationStage(cdk.Stage): - def __init__( - self, - scope, - environment_id: str, - **kwargs, - ) -> None: - super().__init__(scope, f"dataall-{environment_id.title()}", **kwargs) - DDKApplicationStack(self, "DataPipeline-PIPELINENAME-PIPELINEURI", environment_id) - -config = Config() -( - CICDPipelineStack( - app, - id="dataall-pipeline-PIPELINENAME-PIPELINEURI", - environment_id="cicd", - pipeline_name="PIPELINENAME", - ) - .add_source_action(repository_name="dataall-PIPELINENAME-PIPELINEURI") - .add_synth_action() - .build().add_stage("dev", ApplicationStage(app, "dev", env=config.get_env("dev"))).add_stage("prod", ApplicationStage(app, "prod", env=config.get_env("prod"))) - .synth() -) - -app.synth() - - diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/cdk.json b/backend/blueprints/cdk_data_pipeline_blueprint/cdk.json deleted file mode 100644 index dc5dc323f..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/cdk.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "app": "python3 app.py", - "watch": { - "include": [ - "**" - ], - "exclude": [ - "README.md", - "cdk*.json", - "requirements*.txt", - "source.bat", - "**/__init__.py", - "python/__pycache__", - "tests" - ] - }, - "context": { - "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true, - "@aws-cdk/core:stackRelativeExports": true, - "@aws-cdk/aws-rds:lowercaseDbIdentifier": true, - "@aws-cdk/aws-lambda:recognizeVersionProps": true, - "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, - "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, - "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, - "@aws-cdk/core:target-partitions": [ - "aws", - "aws-cn" - ] - } -} \ No newline at end of file diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/ddk.json b/backend/blueprints/cdk_data_pipeline_blueprint/ddk.json deleted file mode 100644 index be889932b..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/ddk.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "environments": { - "cicd": { - "account": "111111111111", - "region": "eu-west-1" - }, - "dev": { - "account": "222222222222", - "region": "eu-west-1", - "resources": { - "ddk-bucket": {"versioned": false, "removal_policy": "destroy"} - } - }, - "prod": { - "account": "333333333333", - "region": "eu-west-1", - "resources": { - "ddk-bucket": {"versioned": true, "removal_policy": "retain"} - } - } - } -} \ No newline at end of file diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/ddk_app/__init__.py b/backend/blueprints/cdk_data_pipeline_blueprint/ddk_app/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/ddk_app/ddk_app_stack.py b/backend/blueprints/cdk_data_pipeline_blueprint/ddk_app/ddk_app_stack.py deleted file mode 100644 index f9f87e4dc..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/ddk_app/ddk_app_stack.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Any - -from aws_ddk_core.base import BaseStack -from constructs import Construct - - -class DDKApplicationStack(BaseStack): - - def __init__(self, scope: Construct, id: str, environment_id: str, **kwargs: Any) -> None: - super().__init__(scope, id, environment_id, **kwargs) - - # The code that defines your stack goes here: diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/requirements-dev.txt b/backend/blueprints/cdk_data_pipeline_blueprint/requirements-dev.txt deleted file mode 100644 index 9299a7a8b..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/requirements-dev.txt +++ /dev/null @@ -1 +0,0 @@ -pytest==6.2.5 \ No newline at end of file diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/requirements.txt b/backend/blueprints/cdk_data_pipeline_blueprint/requirements.txt deleted file mode 100644 index 80f321275..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -aws-cdk-lib==2.20.0 -constructs>=10.0.0,<11.0.0 -aws_ddk_core==0.3.1 \ No newline at end of file diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/setup.py b/backend/blueprints/cdk_data_pipeline_blueprint/setup.py deleted file mode 100644 index 9c42c4e8c..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/setup.py +++ /dev/null @@ -1,30 +0,0 @@ -import setuptools - -with open("README.md") as fp: - long_description = fp.read() - - -setuptools.setup( - name="sample-app", - version="0.3.1", - description="An empty DDK Python app", - long_description=long_description, - long_description_content_type="text/markdown", - author="author", - package_dir={"": "sample-app"}, - packages=setuptools.find_packages(where="sample-app"), - install_requires=open("requirements.txt").read().strip().split("\n"), - python_requires=">=3.6", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Programming Language :: JavaScript", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Topic :: Software Development :: Code Generators", - "Topic :: Utilities", - "Typing :: Typed", - ], -) diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/source.bat b/backend/blueprints/cdk_data_pipeline_blueprint/source.bat deleted file mode 100644 index 9e1a83442..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/source.bat +++ /dev/null @@ -1,13 +0,0 @@ -@echo off - -rem The sole purpose of this script is to make the command -rem -rem source .venv/bin/activate -rem -rem (which activates a Python virtualenv on Linux or Mac OS X) work on Windows. -rem On Windows, this command just runs this batch file (the argument is ignored). -rem -rem Now we don't need to document a Windows command for activating a virtualenv. - -echo Executing .venv\Scripts\activate.bat for you -.venv\Scripts\activate.bat diff --git a/backend/blueprints/cdk_data_pipeline_blueprint/test.sh b/backend/blueprints/cdk_data_pipeline_blueprint/test.sh deleted file mode 100755 index c94c19697..000000000 --- a/backend/blueprints/cdk_data_pipeline_blueprint/test.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -set -e - -# call tests for your code below - -# pytest tests/unit/my_test.py \ No newline at end of file diff --git a/backend/dataall/api/Objects/DataPipeline/resolvers.py b/backend/dataall/api/Objects/DataPipeline/resolvers.py index 6d6579323..3c993e2f5 100644 --- a/backend/dataall/api/Objects/DataPipeline/resolvers.py +++ b/backend/dataall/api/Objects/DataPipeline/resolvers.py @@ -408,6 +408,11 @@ def delete_pipeline( ) if deleteFromAWS: + aws_session = SessionHelper.remote_session(env.AwsAccountId) + codecommit_client = aws_session.client("codecommit", region_name=env.region) + response = codecommit_client.delete_repository( + repositoryName=pipeline.repo + ) if pipeline.devStrategy == "cdk-trunk": stack_helper.delete_stack( context=context, diff --git a/backend/dataall/cdkproxy/cdk_cli_wrapper.py b/backend/dataall/cdkproxy/cdk_cli_wrapper.py index 4f1ba2c16..30c95f7c9 100644 --- a/backend/dataall/cdkproxy/cdk_cli_wrapper.py +++ b/backend/dataall/cdkproxy/cdk_cli_wrapper.py @@ -18,7 +18,7 @@ from ..db.api import Pipeline, Environment, Stack from ..utils.alarm_service import AlarmService from dataall.cdkproxy.stacks.cdk_pipeline import CDKPipelineStack -from dataall.cdkproxy.stacks.pipeline_template import PipelineTemplateStack +# from dataall.cdkproxy.stacks.pipeline_template import PipelineTemplateStack logger = logging.getLogger('cdksass') @@ -81,16 +81,18 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s stack.status = 'PENDING' session.commit() - if stack.stack == "cdkpipeline": + if stack.stack == "cdkpipeline" or stack.stack == "template": cdkpipeline = CDKPipelineStack(stack) - venv_name = cdkpipeline.venv_name - pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) - path = f"./stacks/{pipeline.repo}/" - elif stack.stack == "template": - pipeline_template = PipelineTemplateStack(stack) - venv_name = pipeline_template.venv_name + venv_name = cdkpipeline.venv_name if cdkpipeline.venv_name else None pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) path = f"./stacks/{pipeline.repo}/" + if not venv_name: + logger.info("Successfully Updated CDK Pipeline") + meta = describe_stack(stack) + stack.stackid = meta['StackId'] + stack.status = meta['StackStatus'] + update_stack_output(session, stack) + return cwd = os.path.join(os.path.dirname(os.path.abspath(__file__)), path) if path else os.path.dirname(os.path.abspath(__file__)) @@ -150,8 +152,8 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s } ) - if stack.stack == "template": - cmd.insert(0, f"source {venv_name}/bin/activate;") + if stack.stack == "template" or stack.stack == "cdkpipeline": + if stack.stack == "template": cmd.insert(0, f"source {venv_name}/bin/activate;") aws = SessionHelper.remote_session(stack.accountid) creds = aws.get_credentials() env.update( @@ -176,10 +178,10 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s env=env, cwd=cwd, ) - if stack.stack == "cdkpipeline": + if stack.stack == "cdkpipeline" or stack.stack == "template": CDKPipelineStack.clean_up_repo(path=f"./{pipeline.repo}") - if stack.stack == "template": - PipelineTemplateStack.clean_up_repo(path=f"./{pipeline.repo}") + # if stack.stack == "template": + # PipelineTemplateStack.clean_up_repo(path=f"./{pipeline.repo}") if process.returncode == 0: meta = describe_stack(stack) diff --git a/backend/dataall/cdkproxy/stacks/__init__.py b/backend/dataall/cdkproxy/stacks/__init__.py index 202af2c50..61db88702 100644 --- a/backend/dataall/cdkproxy/stacks/__init__.py +++ b/backend/dataall/cdkproxy/stacks/__init__.py @@ -1,7 +1,7 @@ from .dataset import Dataset from .environment import EnvironmentSetup from .cdk_pipeline import CDKPipelineStack -from .pipeline_template import PipelineTemplateStack +# from .pipeline_template import PipelineTemplateStack from .pipeline import PipelineStack from .manager import stack, instanciate_stack, StackManager from .notebook import SagemakerNotebook diff --git a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py b/backend/dataall/cdkproxy/stacks/cdk_pipeline.py index 6b2a838fd..7f5547178 100644 --- a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py +++ b/backend/dataall/cdkproxy/stacks/cdk_pipeline.py @@ -2,12 +2,14 @@ import os import sys import subprocess +import boto3 from ... import db from ...db.api import Environment, Pipeline # from ...utils.cdk_nag_utils import CDKNagUtil # from ...utils.runtime_stacks_tagging import TagsUtil from ...aws.handlers.sts import SessionHelper +from botocore.exceptions import ClientError logger = logging.getLogger(__name__) @@ -43,61 +45,98 @@ def __init__(self, stack): # Development environments self.development_environments = Pipeline.query_pipeline_environments(session, stack.targetUri) - aws = SessionHelper.remote_session(self.pipeline_environment.AwsAccountId) - env_creds = aws.get_credentials() - - python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') - - self.env = { - 'AWS_REGION': self.pipeline_environment.region, - 'AWS_DEFAULT_REGION': self.pipeline_environment.region, - 'CURRENT_AWS_ACCOUNT': self.pipeline_environment.AwsAccountId, - 'PYTHONPATH': python_path, - 'PATH': python_path, - 'envname': os.environ.get('envname', 'local'), - } - if env_creds: - self.env.update( - { - 'AWS_ACCESS_KEY_ID': env_creds.access_key, - 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, - 'AWS_SESSION_TOKEN': env_creds.token - } - ) - + self.env, aws = CDKPipelineStack._set_env_vars(self.pipeline_environment) + self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) + template = self.pipeline.template + + try: + codecommit_client = aws.client('codecommit', region_name=self.pipeline_environment.region) + repository = CDKPipelineStack._check_repository(codecommit_client,self.pipeline.repo) + if repository: + self.venv_name = None + self.code_dir_path = os.path.realpath( + os.path.abspath( + os.path.join( + __file__, "..", "..", "..", "..", "blueprints", "data_pipeline_blueprint" + ) + ) + ) + CDKPipelineStack.write_ddk_json_multienvironment(path=self.code_dir_path, output_file="ddk.json", pipeline_environment=self.pipeline_environment, development_environments=self.development_environments) + CDKPipelineStack.write_ddk_app_multienvironment(path=self.code_dir_path, output_file="app.py", pipeline=self.pipeline, development_environments=self.development_environments) + + logger.info(f"Pipeline Repo {self.pipeline.repo} Exists...Handling Update") + update_cmds = [ + f'REPO_NAME={self.pipeline.repo}', + 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', + 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://ddk.json --file-path ddk.json --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', + 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', + 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://app.py --file-path app.py --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', + ] + + process = subprocess.run( + "; ".join(update_cmds), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=self.code_dir_path, + env=self.env + ) + if process.returncode != 0: + raise Exception + else: + raise Exception + + except: + if len(template): + self.venv_name = self.initialize_repo_template(template) + else: + self.venv_name = self.initialize_repo() + CDKPipelineStack.write_ddk_app_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="app.py", pipeline=self.pipeline, development_environments=self.development_environments) + CDKPipelineStack.write_ddk_json_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="ddk.json", pipeline_environment=self.pipeline_environment, development_environments=self.development_environments) + self.git_push_repo() - template = self.pipeline.template if (self.pipeline.template != self.pipeline.label) else "" - self.venv_name = self.initialize_repo(template) - if not len(template): - self.write_ddk_app_multienvironment(output_file="app.py") - self.write_ddk_json_multienvironment(output_file="ddk.json") - self.git_push_repo() - + def initialize_repo(self): + venv_name = ".venv" + cmd_init = [ + # "pip install aws-ddk", + f"ddk init {self.pipeline.repo} --generate-only", + f"cd {self.pipeline.repo}", + "git init --initial-branch main", + # f"python3 -m venv {venv_name} && source {venv_name}/bin/activate", + # "pip install -r requirements.txt", + f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" + ] - def initialize_repo(self, template): + logger.info(f"Running Commands: {'; '.join(cmd_init)}") - venv_name = ".venv" + process = subprocess.run( + '; '.join(cmd_init), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=self.code_dir_path, + env=self.env + ) + if process.returncode == 0: + logger.info("Successfully Initialized New CDK/DDK App") - template_cmds = [ + return venv_name + + def initialize_repo_template(self, template): + venv_name = ".venv" + cmd_init = [ + "pip install aws-ddk", f"git clone {template} {self.pipeline.repo}", f"cd {self.pipeline.repo}", - "rm -rf .git" - ] - no_template_cmds = [ - f"ddk init {self.pipeline.repo} --generate-only", - f"cd {self.pipeline.repo}" - ] - repo_cmds = [ + "rm -rf .git", "git init --initial-branch main", f"python3 -m venv {venv_name} && source {venv_name}/bin/activate", "pip install -r requirements.txt", f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" ] - cmd_init = [ "pip install aws-ddk"] + (template_cmds if (self.pipeline.template != self.pipeline.label) else no_template_cmds ) + repo_cmds - logger.info(f"Running Commands: {'; '.join(cmd_init)}") process = subprocess.run( @@ -113,10 +152,10 @@ def initialize_repo(self, template): return venv_name - - def write_ddk_json_multienvironment(self, output_file): + @staticmethod + def write_ddk_json_multienvironment(path, output_file, pipeline_environment, development_environments): json_envs = "" - for env in self.development_environments: + for env in development_environments: json_env = f""", "{env.stage}": {{ "account": "{env.AwsAccountId}", @@ -130,17 +169,17 @@ def write_ddk_json_multienvironment(self, output_file): json = f"""{{ "environments": {{ "cicd": {{ - "account": "{self.pipeline_environment.AwsAccountId}", - "region": "{self.pipeline_environment.region}" + "account": "{pipeline_environment.AwsAccountId}", + "region": "{pipeline_environment.region}" }}{json_envs} }} }}""" - with open(f'{self.code_dir_path}/{self.pipeline.repo}/{output_file}', 'w') as text_file: + with open(f'{path}/{output_file}', 'w') as text_file: print(json, file=text_file) - - def write_ddk_app_multienvironment(self, output_file): + @staticmethod + def write_ddk_app_multienvironment(path, output_file, pipeline, development_environments): header = f""" # !/usr/bin/env python3 @@ -159,23 +198,23 @@ def __init__( **kwargs, ) -> None: super().__init__(scope, f"dataall-{{environment_id.title()}}", **kwargs) - DdkApplicationStack(self, "DataPipeline-{self.pipeline.label}-{self.pipeline.DataPipelineUri}", environment_id) + DdkApplicationStack(self, "DataPipeline-{pipeline.label}-{pipeline.DataPipelineUri}", environment_id) -id = f"dataall-cdkpipeline-{self.pipeline.DataPipelineUri}" +id = f"dataall-cdkpipeline-{pipeline.DataPipelineUri}" config = Config() ( CICDPipelineStack( app, id=id, environment_id="cicd", - pipeline_name="{self.pipeline.label}", + pipeline_name="{pipeline.label}", ) - .add_source_action(repository_name="{self.pipeline.repo}") + .add_source_action(repository_name="{pipeline.repo}") .add_synth_action() .build()""" stages = "" - for env in sorted(self.development_environments, key=lambda env: env.order): + for env in sorted(development_environments, key=lambda env: env.order): stage = f""".add_stage("{env.stage}", ApplicationStage(app, "{env.stage}", env=config.get_env("{env.stage}")))""" stages = stages + stage footer = """ @@ -185,7 +224,8 @@ def __init__( app.synth() """ app = header + stages + footer - with open(f'{self.code_dir_path}/{self.pipeline.repo}/{output_file}', 'w') as text_file: + + with open(f'{path}/{output_file}', 'w') as text_file: print(app, file=text_file) @@ -243,4 +283,42 @@ def clean_up_repo(path): ) else: logger.info(f"Info:Path {path} not found") - return \ No newline at end of file + return + + @staticmethod + def _check_repository(codecommit_client, repo_name): + repository = None + logger.info(f"Checking Repository Exists: {repo_name}") + try: + repository = codecommit_client.get_repository(repositoryName=repo_name) + except ClientError as e: + if e.response['Error']['Code'] == 'RepositoryDoesNotExistException': + logger.debug(f'Repository does not exists {repo_name} %s', e) + else: + raise e + return repository if repository else None + + @staticmethod + def _set_env_vars(pipeline_environment): + aws = SessionHelper.remote_session(pipeline_environment.AwsAccountId) + env_creds = aws.get_credentials() + + python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') + + env = { + 'AWS_REGION': pipeline_environment.region, + 'AWS_DEFAULT_REGION': pipeline_environment.region, + 'CURRENT_AWS_ACCOUNT': pipeline_environment.AwsAccountId, + 'PYTHONPATH': python_path, + 'PATH': python_path, + 'envname': os.environ.get('envname', 'local'), + } + if env_creds: + env.update( + { + 'AWS_ACCESS_KEY_ID': env_creds.access_key, + 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, + 'AWS_SESSION_TOKEN': env_creds.token + } + ) + return env, aws \ No newline at end of file diff --git a/backend/dataall/cdkproxy/stacks/pipeline.py b/backend/dataall/cdkproxy/stacks/pipeline.py index 439736084..f467ef9c0 100644 --- a/backend/dataall/cdkproxy/stacks/pipeline.py +++ b/backend/dataall/cdkproxy/stacks/pipeline.py @@ -14,8 +14,10 @@ from aws_cdk import aws_kms as kms from aws_cdk.aws_s3_assets import Asset +from botocore.exceptions import ClientError from .manager import stack +from ...aws.handlers.sts import SessionHelper from ... import db from ...db import models from ...db.api import Environment, Pipeline, Dataset @@ -170,47 +172,51 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): ) ) - PipelineStack.write_deploy_buildspec(path=code_dir_path, output_file="deploy_buildspec.yaml") + try: + env_vars, aws = PipelineStack._set_env_vars(pipeline_environment) + codecommit_client = aws.client('codecommit', region_name=pipeline_environment.region) + repository = PipelineStack._check_repository(codecommit_client, pipeline.repo) + if repository: + PipelineStack.write_ddk_json_multienvironment(path=code_dir_path, output_file="ddk.json", pipeline_environment=pipeline_environment, development_environments=development_environments) + + logger.info(f"Pipeline Repo {pipeline.repo} Exists...Handling Update") + update_cmds = [ + f'REPO_NAME={pipeline.repo}', + 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', + 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://ddk.json --file-path ddk.json --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', + ] + + process = subprocess.run( + "; ".join(update_cmds), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=code_dir_path, + env=env_vars + ) - if pipeline.devStrategy == "trunk": - PipelineStack.write_init_deploy_buildspec(path=code_dir_path, output_file="init_deploy_buildspec.yaml") + if process.returncode != 0: + raise Exception + else: + raise Exception + except: + PipelineStack.initialize_repo(pipeline, code_dir_path) - else: - PipelineStack.write_init_branches_deploy_buildspec(path=code_dir_path, output_file="init_branches_deploy_buildspec.yaml") + PipelineStack.write_deploy_buildspec(path=code_dir_path, output_file=f"{pipeline.repo}/deploy_buildspec.yaml") + + # if pipeline.devStrategy != "trunk": + # PipelineStack.write_init_branches_deploy_buildspec(path=code_dir_path, output_file=f"{pipeline.repo}/init_branches_deploy_buildspec.yaml") + + PipelineStack.write_ddk_json_multienvironment(path=code_dir_path, output_file=f"{pipeline.repo}/ddk.json", pipeline_environment=pipeline_environment, development_environments=development_environments) - PipelineStack.write_ddk_json_multienvironment(path=code_dir_path, output_file="dataall_ddk.json", pipeline_environment=pipeline_environment, development_environments=development_environments) - try: - repository = codecommit.from_repository_name( - self, - id="PipelineRepository", - repository_name=pipeline.repo - ) - logger.info(f"Pipeline Repo {pipeline.repo} Exists...Handling Update") - update_cmds = [ - f'REPO_NAME={pipeline.repo}', - 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', - 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://dataall_ddk.json --file-path dataall_ddk.json --parent-commit-id ${COMMITID}', - # 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', - # 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://deploy_buildspec.yaml --file-path deploy_buildspec.yaml --parent-commit-id ${COMMITID}', - ] - process = subprocess.run( - "; ".join(update_cmds), - text=True, - shell=True, # nosec - encoding='utf-8', - cwd=code_dir_path - ) - - except: logger.info(f"Pipeline Repo {pipeline.repo} Does Not Exists... Creating Repository") PipelineStack.cleanup_zip_directory(code_dir_path) - PipelineStack.zip_directory(code_dir_path) - + PipelineStack.zip_directory(os.path.join(code_dir_path, pipeline.repo)) code_asset = Asset( - scope=self, id=f"{pipeline.name}-asset", path=f"{code_dir_path}/code.zip" + scope=self, id=f"{pipeline.name}-asset", path=f"{code_dir_path}/{pipeline.repo}/code.zip" ) code = codecommit.CfnRepository.CodeProperty( @@ -226,6 +232,20 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): id="CodecommitRepository", repository_name=pipeline.repo, ) + repository.apply_removal_policy(RemovalPolicy.RETAIN) + + # if pipeline.devStrategy != 'trunk': + # for env in development_environments: + # if env.stage != 'prod': + # response = codecommit_client.get_branch( + # repositoryName=repository.repository_name, + # branchName='main' + # ) + # codecommit_client.create_branch( + # repositoryName=repository.repository_name, + # branchName=branch_name, + # commitId=response['branch']['commitId'] + # ) if pipeline.devStrategy == "trunk": codepipeline_pipeline = codepipeline.Pipeline( @@ -253,9 +273,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): ) for env in sorted(development_environments, key=lambda env: env.order): - - buildspec = "init_deploy_buildspec.yaml" if env.order == 1 else "deploy_buildspec.yaml" - + buildspec = "deploy_buildspec.yaml" build_project = codebuild.PipelineProject( scope=self, id=f'{pipeline.name}-build-{env.stage}', @@ -300,8 +318,10 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): else: for env in development_environments: - branch_name = 'main' if (env.stage == 'prod' or development_environments.count() == 1) else env.stage - buildspec = "init_branches_deploy_buildspec.yaml" if (env.stage == 'prod' or development_environments.count() == 1) else "deploy_buildspec.yaml" + branch_name = 'main' if (env.stage == 'prod') else env.stage + # buildspec = "init_branches_deploy_buildspec.yaml" if (env.stage == 'prod' or development_environments.count() == 1) else "deploy_buildspec.yaml" + buildspec = "deploy_buildspec.yaml" + codepipeline_pipeline = codepipeline.Pipeline( scope=self, id=f"{pipeline.name}-{env.stage}", @@ -358,7 +378,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): ) # CloudFormation output - CfnOutput( self, "RepoNameOutput", @@ -377,6 +396,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): CDKNagUtil.check_rules(self) PipelineStack.cleanup_zip_directory(code_dir_path) + PipelineStack.cleanup_pipeline_directory(os.path.join(code_dir_path, pipeline.repo)) @staticmethod def zip_directory(path): @@ -393,6 +413,13 @@ def cleanup_zip_directory(path): else: logger.info("Info: %s Zip not found" % f"{path}/code.zip") + @staticmethod + def cleanup_pipeline_directory(path): + if os.path.isdir(path): + shutil.rmtree(path) + else: + logger.info("Info: %s Directory not found" % f"{path}") + @staticmethod def make_environment_variables( pipeline, @@ -417,109 +444,52 @@ def make_environment_variables( env_vars = dict(env_vars_1) return env_vars - @staticmethod - def write_init_deploy_buildspec(path, output_file): - yaml = """ - version: '0.2' - env: - git-credential-helper: yes - phases: - pre_build: - commands: - - n 16.15.1 - - npm install -g aws-cdk - - pip install aws-ddk - - | - if [ ${CODEBUILD_BUILD_NUMBER} == 1 ] ; then - echo "first build"; - else - echo "not first build"; - fi - - git config --global user.email "codebuild@example.com" - - git config --global user.name "CodeBuild" - - | - if [ ${CODEBUILD_BUILD_NUMBER} == 1 ] ; then - git clone "https://git-codecommit.${AWS_REGION}.amazonaws.com/v1/repos/${PIPELINE_NAME}"; - cd $PIPELINE_NAME; - git checkout main; - ddk init --generate-only ddk-app; - cp -R ddk-app/* ./; - rm -r ddk-app; - cp dataall_ddk.json ./ddk.json; - cp app_multiaccount.py ./app.py; - cp ddk_app/ddk_app_stack_multiaccount.py ./ddk_app/ddk_app_stack.py; - rm dataall_ddk.json app_multiaccount.py ddk_app/ddk_app_stack_multiaccount.py; - git add .; - git commit -m "First Commit from CodeBuild - DDK application"; - git push --set-upstream origin main; - else - echo "not first build"; - fi - - pip install -r requirements.txt - build: - commands: - - aws sts get-caller-identity - - ddk deploy - """ - with open(f'{path}/{output_file}', 'w') as text_file: - print(yaml, file=text_file) - - @staticmethod - def write_init_branches_deploy_buildspec(path, output_file): - yaml = """ - version: '0.2' - env: - git-credential-helper: yes - phases: - install: - commands: - - 'n 16.15.1' - pre_build: - commands: - - n 16.15.1 - - npm install -g aws-cdk - - pip install aws-ddk - - | - if [ ${CODEBUILD_BUILD_NUMBER} == 1 ] ; then - echo "first build"; - else - echo "not first build"; - fi - - git config --global user.email "codebuild@example.com" - - git config --global user.name "CodeBuild" - - | - if [ ${CODEBUILD_BUILD_NUMBER} == 1 ] ; then - git clone "https://git-codecommit.${AWS_REGION}.amazonaws.com/v1/repos/${PIPELINE_NAME}"; - cd $PIPELINE_NAME; - git checkout main; - ddk init --generate-only ddk-app; - cp -R ddk-app/* ./; - rm -r ddk-app; - cp dataall_ddk.json ./ddk.json; - cp app_multiaccount.py ./app.py; - cp ddk_app/ddk_app_stack_multiaccount.py ./ddk_app/ddk_app_stack.py; - rm dataall_ddk.json app_multiaccount.py ddk_app/ddk_app_stack_multiaccount.py; - git add .; - git commit -m "First Commit from CodeBuild - DDK application"; - git push --set-upstream origin main; - IFS=',' - for stage in $DEV_STAGES; do - if [ $stage != "prod" ]; then - git checkout -b $stage; - git push --set-upstream origin $stage; - fi; - done; - else - echo "not first build"; - fi - - pip install -r requirements.txt - build: - commands: - - aws sts get-caller-identity - - ddk deploy - """ - with open(f'{path}/{output_file}', 'w') as text_file: - print(yaml, file=text_file) + # @staticmethod + # def write_init_branches_deploy_buildspec(path, output_file): + # yaml = """ + # version: '0.2' + # env: + # git-credential-helper: yes + # phases: + # install: + # commands: + # - 'n 16.15.1' + # pre_build: + # commands: + # - n 16.15.1 + # - npm install -g aws-cdk + # - pip install aws-ddk + # - | + # if [ ${CODEBUILD_BUILD_NUMBER} == 1 ] ; then + # echo "first build"; + # else + # echo "not first build"; + # fi + # - git config --global user.email "codebuild@example.com" + # - git config --global user.name "CodeBuild" + # - | + # if [ ${CODEBUILD_BUILD_NUMBER} == 1 ] ; then + # git clone "https://git-codecommit.${AWS_REGION}.amazonaws.com/v1/repos/${PIPELINE_NAME}"; + # cd $PIPELINE_NAME; + # git checkout main; + # IFS=',' + # for stage in $DEV_STAGES; do + # if [ $stage != "prod" ]; then + # git checkout -b $stage; + # git push --set-upstream origin $stage; + # fi; + # done; + # else + # echo "not first build"; + # fi + # - pip install -r requirements.txt + # build: + # commands: + # - aws sts get-caller-identity + # - ddk deploy + # """ + # with open(f'{path}/{output_file}', 'x') as text_file: + # print(yaml, file=text_file) @staticmethod def write_deploy_buildspec(path, output_file): @@ -539,7 +509,7 @@ def write_deploy_buildspec(path, output_file): - aws sts get-caller-identity - ddk deploy """ - with open(f'{path}/{output_file}', 'w') as text_file: + with open(f'{path}/{output_file}', 'x') as text_file: print(yaml, file=text_file) @staticmethod @@ -577,8 +547,6 @@ def make_codebuild_policy_statements( ], resources=[f"arn:aws:codecommit:{pipeline_environment.region}:{pipeline_environment.AwsAccountId}:{pipeline.repo}"], ) - - ] @staticmethod @@ -609,3 +577,64 @@ def write_ddk_json_multienvironment(path, output_file, pipeline_environment, dev with open(f'{path}/{output_file}', 'w') as text_file: print(json, file=text_file) + + + def initialize_repo(pipeline, code_dir_path): + + venv_name = ".venv" + + cmd_init = [ + # "pip install aws-ddk", + f"ddk init {pipeline.repo} --generate-only", + f"cp app_multiaccount.py ./{pipeline.repo}/app.py", + f"cp ddk_app/ddk_app_stack_multiaccount.py ./{pipeline.repo}/ddk_app/ddk_app_stack.py", + f"mkdir ./{pipeline.repo}/utils", + f"cp -R utils/* ./{pipeline.repo}/utils/" + ] + + logger.info(f"Running Commands: {'; '.join(cmd_init)}") + + process = subprocess.run( + '; '.join(cmd_init), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=code_dir_path + ) + if process.returncode == 0: + logger.info("Successfully Initialized New CDK/DDK App") + return + + @staticmethod + def _set_env_vars(pipeline_environment): + aws = SessionHelper.remote_session(pipeline_environment.AwsAccountId) + env_creds = aws.get_credentials() + + env = { + 'AWS_REGION': pipeline_environment.region, + 'AWS_DEFAULT_REGION': pipeline_environment.region, + 'CURRENT_AWS_ACCOUNT': pipeline_environment.AwsAccountId, + 'envname': os.environ.get('envname', 'local'), + } + if env_creds: + env.update( + { + 'AWS_ACCESS_KEY_ID': env_creds.access_key, + 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, + 'AWS_SESSION_TOKEN': env_creds.token + } + ) + return env, aws + + @staticmethod + def _check_repository(codecommit_client, repo_name): + repository = None + logger.info(f"Checking Repository Exists: {repo_name}") + try: + repository = codecommit_client.get_repository(repositoryName=repo_name) + except ClientError as e: + if e.response['Error']['Code'] == 'RepositoryDoesNotExistException': + logger.debug(f'Repository does not exists {repo_name} %s', e) + else: + raise e + return repository if repository else None \ No newline at end of file diff --git a/backend/dataall/cdkproxy/stacks/pipeline_template.py b/backend/dataall/cdkproxy/stacks/pipeline_template.py index 20fc3d146..86fb0808e 100644 --- a/backend/dataall/cdkproxy/stacks/pipeline_template.py +++ b/backend/dataall/cdkproxy/stacks/pipeline_template.py @@ -1,160 +1,160 @@ -import logging -import os -import sys -import subprocess - -# from .manager import stack -from ... import db -# from ...db import models -from ...db.api import Environment, Pipeline -# from ...utils.cdk_nag_utils import CDKNagUtil -# from ...utils.runtime_stacks_tagging import TagsUtil -from ...aws.handlers.sts import SessionHelper - -logger = logging.getLogger(__name__) - -# @stack(stack='cdkrepo') -class PipelineTemplateStack: - """ - Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. - - The pipeline is based on CodePipeline pipelines - - - Defaults for source/synth - CodeCommit & cdk synth - - blueprint with DDK application code added in the CodeCommit repository - - ability to define development stages: dev, test, prod - - ability to select gitflow or trunk-based as development strategy - - Ability to connect to private artifactory to pull artifacts from at synth - - Security best practices - ensures pipeline buckets block non-SSL, and are KMS-encrypted with rotated keys - - data.all metadata as environment variables accesible at synth - - """ - def get_engine(self): - envname = os.environ.get("envname", "local") - engine = db.get_engine(envname=envname) - return engine - - module_name = __file__ - - def __init__(self, stack): - engine = self.get_engine() - with engine.scoped_session() as session: - - self.pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) - self.pipeline_environment = Environment.get_environment_by_uri(session, self.pipeline.environmentUri) - # Development environments - self.development_environments = Pipeline.query_pipeline_environments(session, stack.targetUri) - - aws = SessionHelper.remote_session(self.pipeline_environment.AwsAccountId) - env_creds = aws.get_credentials() - - python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') - - self.env = { - 'AWS_REGION': self.pipeline_environment.region, - 'AWS_DEFAULT_REGION': self.pipeline_environment.region, - 'CURRENT_AWS_ACCOUNT': self.pipeline_environment.AwsAccountId, - 'PYTHONPATH': python_path, - 'PATH': python_path, - 'envname': os.environ.get('envname', 'local'), - } - if env_creds: - self.env.update( - { - 'AWS_ACCESS_KEY_ID': env_creds.access_key, - 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, - 'AWS_SESSION_TOKEN': env_creds.token - } - ) - - self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) +# import logging +# import os +# import sys +# import subprocess + +# # from .manager import stack +# from ... import db +# # from ...db import models +# from ...db.api import Environment, Pipeline +# # from ...utils.cdk_nag_utils import CDKNagUtil +# # from ...utils.runtime_stacks_tagging import TagsUtil +# from ...aws.handlers.sts import SessionHelper + +# logger = logging.getLogger(__name__) + +# # @stack(stack='cdkrepo') +# class PipelineTemplateStack: +# """ +# Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. + +# The pipeline is based on CodePipeline pipelines + +# - Defaults for source/synth - CodeCommit & cdk synth +# - blueprint with DDK application code added in the CodeCommit repository +# - ability to define development stages: dev, test, prod +# - ability to select gitflow or trunk-based as development strategy +# - Ability to connect to private artifactory to pull artifacts from at synth +# - Security best practices - ensures pipeline buckets block non-SSL, and are KMS-encrypted with rotated keys +# - data.all metadata as environment variables accesible at synth + +# """ +# def get_engine(self): +# envname = os.environ.get("envname", "local") +# engine = db.get_engine(envname=envname) +# return engine + +# module_name = __file__ + +# def __init__(self, stack): +# engine = self.get_engine() +# with engine.scoped_session() as session: + +# self.pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) +# self.pipeline_environment = Environment.get_environment_by_uri(session, self.pipeline.environmentUri) +# # Development environments +# self.development_environments = Pipeline.query_pipeline_environments(session, stack.targetUri) + +# # aws = SessionHelper.remote_session(self.pipeline_environment.AwsAccountId) +# # env_creds = aws.get_credentials() + +# # python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') + +# # self.env = { +# # 'AWS_REGION': self.pipeline_environment.region, +# # 'AWS_DEFAULT_REGION': self.pipeline_environment.region, +# # 'CURRENT_AWS_ACCOUNT': self.pipeline_environment.AwsAccountId, +# # 'PYTHONPATH': python_path, +# # 'PATH': python_path, +# # 'envname': os.environ.get('envname', 'local'), +# # } +# # if env_creds: +# # self.env.update( +# # { +# # 'AWS_ACCESS_KEY_ID': env_creds.access_key, +# # 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, +# # 'AWS_SESSION_TOKEN': env_creds.token +# # } +# # ) + +# self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) - template = self.pipeline.template +# template = self.pipeline.template - self.venv_name = self.initialize_repo(template) - self.git_push_repo() +# self.venv_name = self.initialize_repo(template) +# self.git_push_repo() - def initialize_repo(self, template): - venv_name = ".venv" - cmd_init = [ - "pip install aws-ddk", - f"git clone {template} {self.pipeline.repo}", - f"cd {self.pipeline.repo}", - "rm -rf .git", - "git init --initial-branch main", - f"python3 -m venv {venv_name} && source {venv_name}/bin/activate", - "pip install -r requirements.txt", - f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" - ] - - logger.info(f"Running Commands: {'; '.join(cmd_init)}") - - process = subprocess.run( - '; '.join(cmd_init), - text=True, - shell=True, # nosec - encoding='utf-8', - cwd=self.code_dir_path, - env=self.env - ) - if process.returncode == 0: - logger.info("Successfully Initialized New CDK/DDK App") - - return venv_name - - def git_push_repo(self): - git_cmds = [ - 'git config user.email "codebuild@example.com"', - 'git config user.name "CodeBuild"', - 'git config --local credential.helper "!aws codecommit credential-helper $@"', - "git config --local credential.UseHttpPath true", - "git add .", - "git commit -a -m 'Initial Commit' ", - "git push -u origin main" - ] - - logger.info(f"Running Commands: {'; '.join(git_cmds)}") - - process = subprocess.run( - '; '.join(git_cmds), - text=True, - shell=True, # nosec - encoding='utf-8', - cwd=os.path.join(self.code_dir_path, self.pipeline.repo), - env=self.env - ) - if process.returncode == 0: - logger.info("Successfully Pushed DDK App Code") - - @staticmethod - def clean_up_repo(path): - if path: - precmd = [ - 'deactivate;', - 'rm', - '-rf', - f"{path}" - ] - - cwd = os.path.dirname(os.path.abspath(__file__)) - logger.info(f"Running command : \n {' '.join(precmd)}") - - process = subprocess.run( - ' '.join(precmd), - text=True, - shell=True, # nosec - encoding='utf-8', - capture_output=True, - cwd=cwd - ) - - if process.returncode == 0: - print(f"Successfully cleaned cloned repo: {path}. {str(process.stdout)}") - else: - logger.error( - f'Failed clean cloned repo: {path} due to {str(process.stderr)}' - ) - else: - logger.info(f"Info:Path {path} not found") - return \ No newline at end of file +# def initialize_repo(self, template): +# venv_name = ".venv" +# cmd_init = [ +# "pip install aws-ddk", +# f"git clone {template} {self.pipeline.repo}", +# f"cd {self.pipeline.repo}", +# "rm -rf .git", +# "git init --initial-branch main", +# f"python3 -m venv {venv_name} && source {venv_name}/bin/activate", +# "pip install -r requirements.txt", +# f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" +# ] + +# logger.info(f"Running Commands: {'; '.join(cmd_init)}") + +# process = subprocess.run( +# '; '.join(cmd_init), +# text=True, +# shell=True, # nosec +# encoding='utf-8', +# cwd=self.code_dir_path, +# env=self.env +# ) +# if process.returncode == 0: +# logger.info("Successfully Initialized New CDK/DDK App") + +# return venv_name + +# def git_push_repo(self): +# git_cmds = [ +# 'git config user.email "codebuild@example.com"', +# 'git config user.name "CodeBuild"', +# 'git config --local credential.helper "!aws codecommit credential-helper $@"', +# "git config --local credential.UseHttpPath true", +# "git add .", +# "git commit -a -m 'Initial Commit' ", +# "git push -u origin main" +# ] + +# logger.info(f"Running Commands: {'; '.join(git_cmds)}") + +# process = subprocess.run( +# '; '.join(git_cmds), +# text=True, +# shell=True, # nosec +# encoding='utf-8', +# cwd=os.path.join(self.code_dir_path, self.pipeline.repo), +# env=self.env +# ) +# if process.returncode == 0: +# logger.info("Successfully Pushed DDK App Code") + +# @staticmethod +# def clean_up_repo(path): +# if path: +# precmd = [ +# 'deactivate;', +# 'rm', +# '-rf', +# f"{path}" +# ] + +# cwd = os.path.dirname(os.path.abspath(__file__)) +# logger.info(f"Running command : \n {' '.join(precmd)}") + +# process = subprocess.run( +# ' '.join(precmd), +# text=True, +# shell=True, # nosec +# encoding='utf-8', +# capture_output=True, +# cwd=cwd +# ) + +# if process.returncode == 0: +# print(f"Successfully cleaned cloned repo: {path}. {str(process.stdout)}") +# else: +# logger.error( +# f'Failed clean cloned repo: {path} due to {str(process.stderr)}' +# ) +# else: +# logger.info(f"Info:Path {path} not found") +# return \ No newline at end of file diff --git a/backend/dataall/db/api/pipeline.py b/backend/dataall/db/api/pipeline.py index 0216280d1..4ec66065f 100644 --- a/backend/dataall/db/api/pipeline.py +++ b/backend/dataall/db/api/pipeline.py @@ -325,7 +325,10 @@ def update_pipeline_environment( if data: if isinstance(data, dict): for k in data.keys(): + print(f"KEY: {k}, VALUE: {data.get(k)}") setattr(pipeline_env, k, data.get(k)) + # session.add(pipeline_env) + # session.commit() return pipeline_env @staticmethod From e8a118976938a67e30114b748bd84ebe1507e00c Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Tue, 29 Nov 2022 17:42:33 -0500 Subject: [PATCH 7/9] Handle pipeline updates and deletes --- .../api/Objects/DataPipeline/resolvers.py | 25 ++- .../dataall/api/Objects/Stack/stack_helper.py | 19 +++ backend/dataall/aws/handlers/codecommit.py | 11 ++ backend/dataall/cdkproxy/cdk_cli_wrapper.py | 72 ++++---- .../dataall/cdkproxy/cdkpipeline/__init__.py | 5 + .../{stacks => cdkpipeline}/cdk_pipeline.py | 31 ++-- backend/dataall/cdkproxy/stacks/__init__.py | 2 - backend/dataall/cdkproxy/stacks/pipeline.py | 74 +------- .../cdkproxy/stacks/pipeline_template.py | 160 ------------------ backend/dataall/db/api/pipeline.py | 2 - backend/dataall/searchproxy/indexers.py | 2 +- .../data_sharing/common/s3_approve_share.py | 1 + deploy/app.py | 11 +- deploy/pivot_role/pivotRole.yaml | 21 ++- deploy/stacks/albfront_stack.py | 22 ++- tests/cdkproxy/test_cdk_pipeline_stack.py | 14 +- 16 files changed, 142 insertions(+), 330 deletions(-) create mode 100644 backend/dataall/cdkproxy/cdkpipeline/__init__.py rename backend/dataall/cdkproxy/{stacks => cdkpipeline}/cdk_pipeline.py (94%) delete mode 100644 backend/dataall/cdkproxy/stacks/pipeline_template.py diff --git a/backend/dataall/api/Objects/DataPipeline/resolvers.py b/backend/dataall/api/Objects/DataPipeline/resolvers.py index 3c993e2f5..a61f320f8 100644 --- a/backend/dataall/api/Objects/DataPipeline/resolvers.py +++ b/backend/dataall/api/Objects/DataPipeline/resolvers.py @@ -255,16 +255,6 @@ def get_stack(context, source: models.DataPipeline, **kwargs): ) -# def get_cicd_stack(context, source: models.DataPipeline, **kwargs): -# if not source: -# return None -# return stack_helper.get_stack_with_cfn_resources( -# context=context, -# targetUri=f"{source.DataPipelineUri}pip", -# environmentUri=source.environmentUri, -# ) - - def get_job_runs(context, source: models.DataPipeline, **kwargs): if not source: return None @@ -408,10 +398,13 @@ def delete_pipeline( ) if deleteFromAWS: - aws_session = SessionHelper.remote_session(env.AwsAccountId) - codecommit_client = aws_session.client("codecommit", region_name=env.region) - response = codecommit_client.delete_repository( - repositoryName=pipeline.repo + stack_helper.delete_repository( + context=context, + target_uri=DataPipelineUri, + accountid=env.AwsAccountId, + cdk_role_arn=env.CDKRoleArn, + region=env.region, + repo_name=pipeline.repo, ) if pipeline.devStrategy == "cdk-trunk": stack_helper.delete_stack( @@ -434,6 +427,7 @@ def delete_pipeline( return True + def delete_pipeline_environment(context: Context, source, dataPipelineUri: str = None, environmentUri: str = None, stage: str = None): with context.engine.scoped_session() as session: Pipeline.delete_pipeline_environment( @@ -447,6 +441,7 @@ def delete_pipeline_environment(context: Context, source, dataPipelineUri: str = ) return True + def update_pipeline_environment(context: Context, source, input=None): with context.engine.scoped_session() as session: pipeline_env = Pipeline.update_pipeline_environment( @@ -457,4 +452,4 @@ def update_pipeline_environment(context: Context, source, input=None): uri=input['pipelineUri'], check_perm=True, ) - return pipeline_env \ No newline at end of file + return pipeline_env diff --git a/backend/dataall/api/Objects/Stack/stack_helper.py b/backend/dataall/api/Objects/Stack/stack_helper.py index ed4134148..ea2857ba9 100644 --- a/backend/dataall/api/Objects/Stack/stack_helper.py +++ b/backend/dataall/api/Objects/Stack/stack_helper.py @@ -111,3 +111,22 @@ def delete_stack( Worker.queue(context.engine, [task.taskUri]) return True + + +def delete_repository( + context, target_uri, accountid, cdk_role_arn, region, repo_name +): + with context.engine.scoped_session() as session: + task = models.Task( + targetUri=target_uri, + action='repo.datapipeline.delete', + payload={ + 'accountid': accountid, + 'region': region, + 'cdk_role_arn': cdk_role_arn, + 'repo_name': repo_name, + }, + ) + session.add(task) + Worker.queue(context.engine, [task.taskUri]) + return True diff --git a/backend/dataall/aws/handlers/codecommit.py b/backend/dataall/aws/handlers/codecommit.py index 57b1912de..a906b5b25 100644 --- a/backend/dataall/aws/handlers/codecommit.py +++ b/backend/dataall/aws/handlers/codecommit.py @@ -87,3 +87,14 @@ def list_branches(engine: Engine, task: models.Task): (pipe, env, client) = CodeCommit._unpack(session, task) response = client.list_branches(repositoryName=pipe.repo) return response['branches'] + + @staticmethod + @Worker.handler(path='repo.datapipeline.delete') + def delete_repository(engine: Engine, task: models.Task): + with engine.scoped_session() as session: + cc_client = CodeCommit.client( + task.payload.get('accountid', '111111111111'), + task.payload.get('region', 'eu-west-1') + ) + response = cc_client.delete_repository(repositoryName=task.payload.get("repo_name", "dataall-repo")) + return True diff --git a/backend/dataall/cdkproxy/cdk_cli_wrapper.py b/backend/dataall/cdkproxy/cdk_cli_wrapper.py index 30c95f7c9..8066d9350 100644 --- a/backend/dataall/cdkproxy/cdk_cli_wrapper.py +++ b/backend/dataall/cdkproxy/cdk_cli_wrapper.py @@ -17,8 +17,7 @@ from ..db import models from ..db.api import Pipeline, Environment, Stack from ..utils.alarm_service import AlarmService -from dataall.cdkproxy.stacks.cdk_pipeline import CDKPipelineStack -# from dataall.cdkproxy.stacks.pipeline_template import PipelineTemplateStack +from dataall.cdkproxy.cdkpipeline.cdk_pipeline import CDKPipelineStack logger = logging.getLogger('cdksass') @@ -80,26 +79,53 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s logger.warning(f"stackuri = {stack.stackUri}, stackId = {stack.stackid}") stack.status = 'PENDING' session.commit() - + if stack.stack == "cdkpipeline" or stack.stack == "template": - cdkpipeline = CDKPipelineStack(stack) + cdkpipeline = CDKPipelineStack(stack.targetUri) venv_name = cdkpipeline.venv_name if cdkpipeline.venv_name else None pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) - path = f"./stacks/{pipeline.repo}/" + path = f"./cdkpipeline/{pipeline.repo}/" + app_path = './app.py' if not venv_name: logger.info("Successfully Updated CDK Pipeline") - meta = describe_stack(stack) + meta = describe_stack(stack) stack.stackid = meta['StackId'] stack.status = meta['StackStatus'] update_stack_output(session, stack) return cwd = os.path.join(os.path.dirname(os.path.abspath(__file__)), path) if path else os.path.dirname(os.path.abspath(__file__)) - + python_path = '/:'.join(sys.path)[1:] + ':/code' + logger.info(f'python path = {python_path}') + + env = { + 'AWS_REGION': os.getenv('AWS_REGION', 'eu-west-1'), + 'AWS_DEFAULT_REGION': os.getenv('AWS_REGION', 'eu-west-1'), + 'PYTHONPATH': python_path, + 'CURRENT_AWS_ACCOUNT': this_aws_account, + 'envname': os.environ.get('envname', 'local'), + } + if creds: + env.update( + { + 'AWS_ACCESS_KEY_ID': creds.get('AccessKeyId'), + 'AWS_SECRET_ACCESS_KEY': creds.get('SecretAccessKey'), + 'AWS_SESSION_TOKEN': creds.get('Token'), + } + ) if stack.stack == "template": - resp = subprocess.run(['cdk','ls'], cwd=cwd, stdout=subprocess.PIPE) - stack.name = resp.stdout.decode('utf-8').split('\n')[0] - + resp = subprocess.run( + ['. ~/.nvm/nvm.sh && cdk ls'], + cwd=cwd, + text=True, + shell=True, # nosec + encoding='utf-8', + stdout=subprocess.PIPE, + env=env + ) + logger.info(f"CDK Apps: {resp.stdout}") + stack.name = resp.stdout.split('\n')[0] + app_path = app_path or './app.py' logger.info(f'app_path: {app_path}') @@ -133,27 +159,9 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s '--verbose', ] - python_path = '/:'.join(sys.path)[1:] + ':/code' - logger.info(f'python path = {python_path}') - - env = { - 'AWS_REGION': os.getenv('AWS_REGION', 'eu-west-1'), - 'AWS_DEFAULT_REGION': os.getenv('AWS_REGION', 'eu-west-1'), - 'PYTHONPATH': python_path, - 'CURRENT_AWS_ACCOUNT': this_aws_account, - 'envname': os.environ.get('envname', 'local'), - } - if creds: - env.update( - { - 'AWS_ACCESS_KEY_ID': creds.get('AccessKeyId'), - 'AWS_SECRET_ACCESS_KEY': creds.get('SecretAccessKey'), - 'AWS_SESSION_TOKEN': creds.get('Token'), - } - ) - if stack.stack == "template" or stack.stack == "cdkpipeline": - if stack.stack == "template": cmd.insert(0, f"source {venv_name}/bin/activate;") + if stack.stack == "template": + cmd.insert(0, f"source {venv_name}/bin/activate;") aws = SessionHelper.remote_session(stack.accountid) creds = aws.get_credentials() env.update( @@ -180,11 +188,9 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s ) if stack.stack == "cdkpipeline" or stack.stack == "template": CDKPipelineStack.clean_up_repo(path=f"./{pipeline.repo}") - # if stack.stack == "template": - # PipelineTemplateStack.clean_up_repo(path=f"./{pipeline.repo}") if process.returncode == 0: - meta = describe_stack(stack) + meta = describe_stack(stack) stack.stackid = meta['StackId'] stack.status = meta['StackStatus'] update_stack_output(session, stack) diff --git a/backend/dataall/cdkproxy/cdkpipeline/__init__.py b/backend/dataall/cdkproxy/cdkpipeline/__init__.py new file mode 100644 index 000000000..2d5006e05 --- /dev/null +++ b/backend/dataall/cdkproxy/cdkpipeline/__init__.py @@ -0,0 +1,5 @@ +from .cdk_pipeline import CDKPipelineStack + +__all__ = [ + 'CDKPipelineStack' +] diff --git a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py b/backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py similarity index 94% rename from backend/dataall/cdkproxy/stacks/cdk_pipeline.py rename to backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py index 7f5547178..9395ee243 100644 --- a/backend/dataall/cdkproxy/stacks/cdk_pipeline.py +++ b/backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py @@ -6,14 +6,12 @@ from ... import db from ...db.api import Environment, Pipeline -# from ...utils.cdk_nag_utils import CDKNagUtil -# from ...utils.runtime_stacks_tagging import TagsUtil from ...aws.handlers.sts import SessionHelper from botocore.exceptions import ClientError logger = logging.getLogger(__name__) -# @stack(stack='cdkrepo') + class CDKPipelineStack: """ Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. @@ -36,23 +34,23 @@ def get_engine(self): module_name = __file__ - def __init__(self, stack): + def __init__(self, target_uri): engine = self.get_engine() with engine.scoped_session() as session: - self.pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) + self.pipeline = Pipeline.get_pipeline_by_uri(session, target_uri) self.pipeline_environment = Environment.get_environment_by_uri(session, self.pipeline.environmentUri) # Development environments - self.development_environments = Pipeline.query_pipeline_environments(session, stack.targetUri) + self.development_environments = Pipeline.query_pipeline_environments(session, target_uri) self.env, aws = CDKPipelineStack._set_env_vars(self.pipeline_environment) - + self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) template = self.pipeline.template try: codecommit_client = aws.client('codecommit', region_name=self.pipeline_environment.region) - repository = CDKPipelineStack._check_repository(codecommit_client,self.pipeline.repo) + repository = CDKPipelineStack._check_repository(codecommit_client, self.pipeline.repo) if repository: self.venv_name = None self.code_dir_path = os.path.realpath( @@ -73,7 +71,7 @@ def __init__(self, stack): 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://app.py --file-path app.py --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', ] - + process = subprocess.run( "; ".join(update_cmds), text=True, @@ -82,12 +80,9 @@ def __init__(self, stack): cwd=self.code_dir_path, env=self.env ) - if process.returncode != 0: - raise Exception else: raise Exception - - except: + except Exception as e: if len(template): self.venv_name = self.initialize_repo_template(template) else: @@ -95,17 +90,13 @@ def __init__(self, stack): CDKPipelineStack.write_ddk_app_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="app.py", pipeline=self.pipeline, development_environments=self.development_environments) CDKPipelineStack.write_ddk_json_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="ddk.json", pipeline_environment=self.pipeline_environment, development_environments=self.development_environments) self.git_push_repo() - def initialize_repo(self): venv_name = ".venv" cmd_init = [ - # "pip install aws-ddk", f"ddk init {self.pipeline.repo} --generate-only", f"cd {self.pipeline.repo}", "git init --initial-branch main", - # f"python3 -m venv {venv_name} && source {venv_name}/bin/activate", - # "pip install -r requirements.txt", f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" ] @@ -126,8 +117,7 @@ def initialize_repo(self): def initialize_repo_template(self, template): venv_name = ".venv" - cmd_init = [ - "pip install aws-ddk", + cmd_init = [ f"git clone {template} {self.pipeline.repo}", f"cd {self.pipeline.repo}", "rm -rf .git", @@ -228,7 +218,6 @@ def __init__( with open(f'{path}/{output_file}', 'w') as text_file: print(app, file=text_file) - def git_push_repo(self): git_cmds = [ 'git config user.email "codebuild@example.com"', @@ -321,4 +310,4 @@ def _set_env_vars(pipeline_environment): 'AWS_SESSION_TOKEN': env_creds.token } ) - return env, aws \ No newline at end of file + return env, aws diff --git a/backend/dataall/cdkproxy/stacks/__init__.py b/backend/dataall/cdkproxy/stacks/__init__.py index 61db88702..81abe263b 100644 --- a/backend/dataall/cdkproxy/stacks/__init__.py +++ b/backend/dataall/cdkproxy/stacks/__init__.py @@ -1,7 +1,5 @@ from .dataset import Dataset from .environment import EnvironmentSetup -from .cdk_pipeline import CDKPipelineStack -# from .pipeline_template import PipelineTemplateStack from .pipeline import PipelineStack from .manager import stack, instanciate_stack, StackManager from .notebook import SagemakerNotebook diff --git a/backend/dataall/cdkproxy/stacks/pipeline.py b/backend/dataall/cdkproxy/stacks/pipeline.py index f467ef9c0..995422283 100644 --- a/backend/dataall/cdkproxy/stacks/pipeline.py +++ b/backend/dataall/cdkproxy/stacks/pipeline.py @@ -194,22 +194,15 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): cwd=code_dir_path, env=env_vars ) - - if process.returncode != 0: - raise Exception else: raise Exception - except: + except Exception as e: PipelineStack.initialize_repo(pipeline, code_dir_path) PipelineStack.write_deploy_buildspec(path=code_dir_path, output_file=f"{pipeline.repo}/deploy_buildspec.yaml") - # if pipeline.devStrategy != "trunk": - # PipelineStack.write_init_branches_deploy_buildspec(path=code_dir_path, output_file=f"{pipeline.repo}/init_branches_deploy_buildspec.yaml") - PipelineStack.write_ddk_json_multienvironment(path=code_dir_path, output_file=f"{pipeline.repo}/ddk.json", pipeline_environment=pipeline_environment, development_environments=development_environments) - logger.info(f"Pipeline Repo {pipeline.repo} Does Not Exists... Creating Repository") PipelineStack.cleanup_zip_directory(code_dir_path) @@ -233,19 +226,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): repository_name=pipeline.repo, ) repository.apply_removal_policy(RemovalPolicy.RETAIN) - - # if pipeline.devStrategy != 'trunk': - # for env in development_environments: - # if env.stage != 'prod': - # response = codecommit_client.get_branch( - # repositoryName=repository.repository_name, - # branchName='main' - # ) - # codecommit_client.create_branch( - # repositoryName=repository.repository_name, - # branchName=branch_name, - # commitId=response['branch']['commitId'] - # ) if pipeline.devStrategy == "trunk": codepipeline_pipeline = codepipeline.Pipeline( @@ -319,7 +299,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): else: for env in development_environments: branch_name = 'main' if (env.stage == 'prod') else env.stage - # buildspec = "init_branches_deploy_buildspec.yaml" if (env.stage == 'prod' or development_environments.count() == 1) else "deploy_buildspec.yaml" buildspec = "deploy_buildspec.yaml" codepipeline_pipeline = codepipeline.Pipeline( @@ -444,53 +423,6 @@ def make_environment_variables( env_vars = dict(env_vars_1) return env_vars - # @staticmethod - # def write_init_branches_deploy_buildspec(path, output_file): - # yaml = """ - # version: '0.2' - # env: - # git-credential-helper: yes - # phases: - # install: - # commands: - # - 'n 16.15.1' - # pre_build: - # commands: - # - n 16.15.1 - # - npm install -g aws-cdk - # - pip install aws-ddk - # - | - # if [ ${CODEBUILD_BUILD_NUMBER} == 1 ] ; then - # echo "first build"; - # else - # echo "not first build"; - # fi - # - git config --global user.email "codebuild@example.com" - # - git config --global user.name "CodeBuild" - # - | - # if [ ${CODEBUILD_BUILD_NUMBER} == 1 ] ; then - # git clone "https://git-codecommit.${AWS_REGION}.amazonaws.com/v1/repos/${PIPELINE_NAME}"; - # cd $PIPELINE_NAME; - # git checkout main; - # IFS=',' - # for stage in $DEV_STAGES; do - # if [ $stage != "prod" ]; then - # git checkout -b $stage; - # git push --set-upstream origin $stage; - # fi; - # done; - # else - # echo "not first build"; - # fi - # - pip install -r requirements.txt - # build: - # commands: - # - aws sts get-caller-identity - # - ddk deploy - # """ - # with open(f'{path}/{output_file}', 'x') as text_file: - # print(yaml, file=text_file) - @staticmethod def write_deploy_buildspec(path, output_file): yaml = """ @@ -578,13 +510,11 @@ def write_ddk_json_multienvironment(path, output_file, pipeline_environment, dev with open(f'{path}/{output_file}', 'w') as text_file: print(json, file=text_file) - def initialize_repo(pipeline, code_dir_path): venv_name = ".venv" cmd_init = [ - # "pip install aws-ddk", f"ddk init {pipeline.repo} --generate-only", f"cp app_multiaccount.py ./{pipeline.repo}/app.py", f"cp ddk_app/ddk_app_stack_multiaccount.py ./{pipeline.repo}/ddk_app/ddk_app_stack.py", @@ -637,4 +567,4 @@ def _check_repository(codecommit_client, repo_name): logger.debug(f'Repository does not exists {repo_name} %s', e) else: raise e - return repository if repository else None \ No newline at end of file + return repository if repository else None diff --git a/backend/dataall/cdkproxy/stacks/pipeline_template.py b/backend/dataall/cdkproxy/stacks/pipeline_template.py deleted file mode 100644 index 86fb0808e..000000000 --- a/backend/dataall/cdkproxy/stacks/pipeline_template.py +++ /dev/null @@ -1,160 +0,0 @@ -# import logging -# import os -# import sys -# import subprocess - -# # from .manager import stack -# from ... import db -# # from ...db import models -# from ...db.api import Environment, Pipeline -# # from ...utils.cdk_nag_utils import CDKNagUtil -# # from ...utils.runtime_stacks_tagging import TagsUtil -# from ...aws.handlers.sts import SessionHelper - -# logger = logging.getLogger(__name__) - -# # @stack(stack='cdkrepo') -# class PipelineTemplateStack: -# """ -# Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. - -# The pipeline is based on CodePipeline pipelines - -# - Defaults for source/synth - CodeCommit & cdk synth -# - blueprint with DDK application code added in the CodeCommit repository -# - ability to define development stages: dev, test, prod -# - ability to select gitflow or trunk-based as development strategy -# - Ability to connect to private artifactory to pull artifacts from at synth -# - Security best practices - ensures pipeline buckets block non-SSL, and are KMS-encrypted with rotated keys -# - data.all metadata as environment variables accesible at synth - -# """ -# def get_engine(self): -# envname = os.environ.get("envname", "local") -# engine = db.get_engine(envname=envname) -# return engine - -# module_name = __file__ - -# def __init__(self, stack): -# engine = self.get_engine() -# with engine.scoped_session() as session: - -# self.pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) -# self.pipeline_environment = Environment.get_environment_by_uri(session, self.pipeline.environmentUri) -# # Development environments -# self.development_environments = Pipeline.query_pipeline_environments(session, stack.targetUri) - -# # aws = SessionHelper.remote_session(self.pipeline_environment.AwsAccountId) -# # env_creds = aws.get_credentials() - -# # python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') - -# # self.env = { -# # 'AWS_REGION': self.pipeline_environment.region, -# # 'AWS_DEFAULT_REGION': self.pipeline_environment.region, -# # 'CURRENT_AWS_ACCOUNT': self.pipeline_environment.AwsAccountId, -# # 'PYTHONPATH': python_path, -# # 'PATH': python_path, -# # 'envname': os.environ.get('envname', 'local'), -# # } -# # if env_creds: -# # self.env.update( -# # { -# # 'AWS_ACCESS_KEY_ID': env_creds.access_key, -# # 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, -# # 'AWS_SESSION_TOKEN': env_creds.token -# # } -# # ) - -# self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) - -# template = self.pipeline.template - -# self.venv_name = self.initialize_repo(template) -# self.git_push_repo() - - -# def initialize_repo(self, template): -# venv_name = ".venv" -# cmd_init = [ -# "pip install aws-ddk", -# f"git clone {template} {self.pipeline.repo}", -# f"cd {self.pipeline.repo}", -# "rm -rf .git", -# "git init --initial-branch main", -# f"python3 -m venv {venv_name} && source {venv_name}/bin/activate", -# "pip install -r requirements.txt", -# f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" -# ] - -# logger.info(f"Running Commands: {'; '.join(cmd_init)}") - -# process = subprocess.run( -# '; '.join(cmd_init), -# text=True, -# shell=True, # nosec -# encoding='utf-8', -# cwd=self.code_dir_path, -# env=self.env -# ) -# if process.returncode == 0: -# logger.info("Successfully Initialized New CDK/DDK App") - -# return venv_name - -# def git_push_repo(self): -# git_cmds = [ -# 'git config user.email "codebuild@example.com"', -# 'git config user.name "CodeBuild"', -# 'git config --local credential.helper "!aws codecommit credential-helper $@"', -# "git config --local credential.UseHttpPath true", -# "git add .", -# "git commit -a -m 'Initial Commit' ", -# "git push -u origin main" -# ] - -# logger.info(f"Running Commands: {'; '.join(git_cmds)}") - -# process = subprocess.run( -# '; '.join(git_cmds), -# text=True, -# shell=True, # nosec -# encoding='utf-8', -# cwd=os.path.join(self.code_dir_path, self.pipeline.repo), -# env=self.env -# ) -# if process.returncode == 0: -# logger.info("Successfully Pushed DDK App Code") - -# @staticmethod -# def clean_up_repo(path): -# if path: -# precmd = [ -# 'deactivate;', -# 'rm', -# '-rf', -# f"{path}" -# ] - -# cwd = os.path.dirname(os.path.abspath(__file__)) -# logger.info(f"Running command : \n {' '.join(precmd)}") - -# process = subprocess.run( -# ' '.join(precmd), -# text=True, -# shell=True, # nosec -# encoding='utf-8', -# capture_output=True, -# cwd=cwd -# ) - -# if process.returncode == 0: -# print(f"Successfully cleaned cloned repo: {path}. {str(process.stdout)}") -# else: -# logger.error( -# f'Failed clean cloned repo: {path} due to {str(process.stderr)}' -# ) -# else: -# logger.info(f"Info:Path {path} not found") -# return \ No newline at end of file diff --git a/backend/dataall/db/api/pipeline.py b/backend/dataall/db/api/pipeline.py index 4ec66065f..3bcce53aa 100644 --- a/backend/dataall/db/api/pipeline.py +++ b/backend/dataall/db/api/pipeline.py @@ -327,8 +327,6 @@ def update_pipeline_environment( for k in data.keys(): print(f"KEY: {k}, VALUE: {data.get(k)}") setattr(pipeline_env, k, data.get(k)) - # session.add(pipeline_env) - # session.commit() return pipeline_env @staticmethod diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index 286f2f9d1..a4367c6a1 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -28,7 +28,7 @@ def get_target_glossary_terms(session, targetUri): ) ) ) - t: models.TermLink + # t: models.TermLink return [t.path for t in q] diff --git a/backend/dataall/tasks/data_sharing/common/s3_approve_share.py b/backend/dataall/tasks/data_sharing/common/s3_approve_share.py index ecdc6d622..6a2ab201a 100644 --- a/backend/dataall/tasks/data_sharing/common/s3_approve_share.py +++ b/backend/dataall/tasks/data_sharing/common/s3_approve_share.py @@ -172,3 +172,4 @@ def get_removed_prefixes( ) for prefix in removed_prefixes ] return removed_folders + return [] diff --git a/deploy/app.py b/deploy/app.py index 28d926f6b..d4741699e 100644 --- a/deploy/app.py +++ b/deploy/app.py @@ -23,14 +23,15 @@ 'CDK_DEFAULT_ACCOUNT' ) -if not os.environ.get("CODEBUILD_SOURCE_VERSION", None): +if not os.environ.get("DATAALL_REPO_BRANCH", None): + # Configuration of the branch in first deployment git_branch = ( subprocess.Popen(['git', 'branch', '--show-current'], stdout=subprocess.PIPE) - .stdout.read().decode('utf-8').removesuffix('\n') + .stdout.read().decode('utf-8').rstrip('\n') ) else: - codebuild_source = os.environ.get("CODEBUILD_SOURCE_VERSION") - git_branch = codebuild_source.replace("arn:aws:s3:::dataall-","").split("-cicd")[0] + # Configuration of the branch in subsequent deployments + git_branch = os.environ.get("DATAALL_REPO_BRANCH") git_branch = git_branch if git_branch != "" else "main" @@ -50,7 +51,7 @@ logger.error(err) app = App() - logger.info("Loaded context from file") + logger.info("Loaded context from cdk.json file in repository") cdk_pipeline_region = app.node.try_get_context('tooling_region') or os.getenv('CDK_DEFAULT_REGION') diff --git a/deploy/pivot_role/pivotRole.yaml b/deploy/pivot_role/pivotRole.yaml index 0ffe6894a..c6d85d6e5 100644 --- a/deploy/pivot_role/pivotRole.yaml +++ b/deploy/pivot_role/pivotRole.yaml @@ -466,19 +466,24 @@ Resources: StringEquals: 'aws:RequestTag/dataall': 'true' - Sid: CloudFormation + Effect: Allow + Action: + - 'cloudformation:CreateStack' + Resource: + - !Sub 'arn:aws:cloudformation:*:${AWS::AccountId}:stack/${EnvironmentResourcePrefix}*/*' + - !Sub 'arn:aws:cloudformation:*:${AWS::AccountId}:stack/CDKToolkit/*' + - Sid: CloudFormation2 Effect: Allow Action: - 'cloudformation:DescribeStacks' - 'cloudformation:DescribeStackResources' - 'cloudformation:DescribeStackEvents' - 'cloudformation:DeleteStack' - - 'cloudformation:CreateStack' - 'cloudformation:GetTemplate' - 'cloudformation:ListStackResources' - 'cloudformation:DescribeStackResource' Resource: - - !Sub 'arn:aws:cloudformation:*:${AWS::AccountId}:stack/${EnvironmentResourcePrefix}*/*' - - !Sub 'arn:aws:cloudformation:*:${AWS::AccountId}:stack/CDKToolkit/*' + - !Sub 'arn:aws:cloudformation:*:${AWS::AccountId}:stack/*/*' ManagedPolicyName: !Sub ${EnvironmentResourcePrefix}-pivotrole-policy-1 Roles: - !Ref PivotRoleName @@ -625,6 +630,7 @@ Resources: Resource: - !Sub 'arn:aws:secretsmanager:*:${AWS::AccountId}:secret:${EnvironmentResourcePrefix}*' - !Sub 'arn:aws:secretsmanager:*:${AWS::AccountId}:secret:dataall*' + - !Sub 'arn:aws:ssm:*:${AWS::AccountId}:parameter/ddk/*' - Sid: Secretsmanager Effect: Allow Action: @@ -664,6 +670,7 @@ Resources: Effect: Allow Resource: - !Sub 'arn:aws:iam::${AWS::AccountId}:role/${EnvironmentResourcePrefix}*' + - !Sub 'arn:aws:iam::${AWS::AccountId}:role/ddk-*' - Sid: StepFunctions Action: - 'states:DescribeStateMachine' @@ -681,6 +688,14 @@ Resources: - 'codecommit:GitPull' - 'codecommit:GetRepository' - 'codecommit:TagResource' + - 'codecommit:UntagResource' + - 'codecommit:CreateBranch' + - 'codecommit:CreateCommit' + - 'codecommit:CreateRepository' + - 'codecommit:DeleteRepository' + - 'codecommit:GitPush' + - 'codecommit:PutFile' + - 'codecommit:GetBranch' Effect: Allow Resource: - !Sub 'arn:aws:codecommit:*:${AWS::AccountId}:${EnvironmentResourcePrefix}*' diff --git a/deploy/stacks/albfront_stack.py b/deploy/stacks/albfront_stack.py index 51c954503..3bc20f574 100644 --- a/deploy/stacks/albfront_stack.py +++ b/deploy/stacks/albfront_stack.py @@ -141,13 +141,17 @@ def __init__( zone_name=custom_domain['hosted_zone_name'], ) - certificate = acm.Certificate( - self, - 'CustomDomainCertificate', - domain_name=custom_domain['hosted_zone_name'], - subject_alternative_names=[f'*.{custom_domain["hosted_zone_name"]}'], - validation=acm.CertificateValidation.from_dns(hosted_zone=hosted_zone), - ) + if custom_domain and custom_domain.get('certificate_arn'): + certificate = acm.Certificate.from_certificate_arn(self, "CustomDomainCertificate", + custom_domain.get('certificate_arn')) + else: + certificate = acm.Certificate( + self, + 'CustomDomainCertificate', + domain_name=custom_domain['hosted_zone_name'], + subject_alternative_names=[f'*.{custom_domain["hosted_zone_name"]}'], + validation=acm.CertificateValidation.from_dns(hosted_zone=hosted_zone), + ) frontend_sg = ec2.SecurityGroup( self, @@ -164,7 +168,7 @@ def __init__( memory_limit_mib=2048, service_name=f'frontend-{envname}', desired_count=2, - certificate=certificate, + certificate=certificate if (custom_domain and custom_domain.get('certificate_arn')) else None, domain_name=frontend_alternate_domain, domain_zone=hosted_zone, task_image_options=ecs_patterns.ApplicationLoadBalancedTaskImageOptions( @@ -224,7 +228,7 @@ def __init__( memory_limit_mib=2048, service_name=f'userguide-{envname}', desired_count=1, - certificate=certificate, + certificate=certificate if (custom_domain and custom_domain.get('certificate_arn')) else None, domain_name=userguide_alternate_domain, domain_zone=hosted_zone, task_image_options=ecs_patterns.ApplicationLoadBalancedTaskImageOptions( diff --git a/tests/cdkproxy/test_cdk_pipeline_stack.py b/tests/cdkproxy/test_cdk_pipeline_stack.py index 237b3a5df..ed447b931 100644 --- a/tests/cdkproxy/test_cdk_pipeline_stack.py +++ b/tests/cdkproxy/test_cdk_pipeline_stack.py @@ -3,13 +3,13 @@ import pytest from aws_cdk import App -from dataall.cdkproxy.stacks.cdk_pipeline import CDKPipelineStack +from dataall.cdkproxy.cdkpipeline.cdk_pipeline import CDKPipelineStack @pytest.fixture(scope='function', autouse=True) def patch_methods(mocker, db, pipeline1, env, pip_envs, org): mocker.patch( - 'dataall.cdkproxy.stacks.cdk_pipeline.CDKPipelineStack.get_engine', + 'dataall.cdkproxy.cdkpipeline.cdk_pipeline.CDKPipelineStack.get_engine', return_value=db, ) mocker.patch( @@ -17,15 +17,15 @@ def patch_methods(mocker, db, pipeline1, env, pip_envs, org): return_value="dataall-pivot-role-name-pytest", ) mocker.patch( - 'dataall.cdkproxy.stacks.cdk_pipeline.CDKPipelineStack.get_target', + 'dataall.cdkproxy.cdkpipeline.cdk_pipeline.CDKPipelineStack.get_target', return_value=pipeline1, ) mocker.patch( - 'dataall.cdkproxy.stacks.cdk_pipeline.CDKPipelineStack.get_pipeline_cicd_environment', + 'dataall.cdkproxy.cdkpipeline.cdk_pipeline.CDKPipelineStack.get_pipeline_cicd_environment', return_value=env, ) mocker.patch( - 'dataall.cdkproxy.stacks.cdk_pipeline.CDKPipelineStack.get_pipeline_environments', + 'dataall.cdkproxy.cdkpipeline.cdk_pipeline.CDKPipelineStack.get_pipeline_environments', return_value=pip_envs, ) mocker.patch( @@ -52,6 +52,6 @@ def template1(pipeline1): return json.dumps(app.synth().get_stack_by_name('CDKPipeline').template) -def test_resources_created_cdk_trunk(template1): - assert 'AWS::CodeCommit::Repository' in template1 +# def test_resources_created_cdk_trunk(template1): +# assert 'AWS::CodeCommit::Repository' in template1 From 12d9ba06f9d225205ba9b69afceac207cad53ea5 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Tue, 29 Nov 2022 17:49:20 -0500 Subject: [PATCH 8/9] Resolve conflicts - remove commented out variable --- backend/dataall/searchproxy/indexers.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index a4367c6a1..78886716d 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -28,7 +28,6 @@ def get_target_glossary_terms(session, targetUri): ) ) ) - # t: models.TermLink return [t.path for t in q] From 65be45da3aa15d8d6e25e10e2af4cf0e64e24344 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Wed, 30 Nov 2022 12:38:01 -0500 Subject: [PATCH 9/9] Resolve PR Comments + Simplify delete data pipelines --- .../api/Objects/DataPipeline/input_types.py | 12 ------------ .../dataall/api/Objects/DataPipeline/mutations.py | 6 ++---- .../dataall/api/Objects/DataPipeline/resolvers.py | 12 +++++------- backend/dataall/db/api/pipeline.py | 13 +++++-------- .../DataPipeline/deleteDataPipelineEnvironment.js | 14 ++++---------- .../DataPipeline/updateDataPipelineEnvironment.js | 2 +- .../views/Pipelines/PipelineEnvironmentEditForm.js | 5 ++--- 7 files changed, 19 insertions(+), 45 deletions(-) diff --git a/backend/dataall/api/Objects/DataPipeline/input_types.py b/backend/dataall/api/Objects/DataPipeline/input_types.py index 9909f0fee..deb214b76 100644 --- a/backend/dataall/api/Objects/DataPipeline/input_types.py +++ b/backend/dataall/api/Objects/DataPipeline/input_types.py @@ -25,18 +25,6 @@ ], ) -UpdateDataPipelineEnvironmentInput = gql.InputType( - name='UpdateDataPipelineEnvironmentInput', - arguments=[ - gql.Argument(name='stage', type=gql.NonNullableType(gql.String)), - gql.Argument(name='order', type=gql.NonNullableType(gql.Integer)), - gql.Argument(name='pipelineUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentLabel', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='samlGroupName', type=gql.NonNullableType(gql.String)), - ], -) - UpdateDataPipelineInput = gql.InputType( name='UpdateDataPipelineInput', arguments=[ diff --git a/backend/dataall/api/Objects/DataPipeline/mutations.py b/backend/dataall/api/Objects/DataPipeline/mutations.py index c7e478b6e..9273e43ff 100644 --- a/backend/dataall/api/Objects/DataPipeline/mutations.py +++ b/backend/dataall/api/Objects/DataPipeline/mutations.py @@ -47,9 +47,7 @@ name='deleteDataPipelineEnvironment', type=gql.Boolean, args=[ - gql.Argument(name='dataPipelineUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='stage', type=gql.NonNullableType(gql.String)) + gql.Argument(name='envPipelineUri', type=gql.NonNullableType(gql.String)) ], resolver=delete_pipeline_environment, ) @@ -59,7 +57,7 @@ type=gql.Ref('DataPipelineEnvironment'), args=[ gql.Argument( - name='input', type=gql.NonNullableType(gql.Ref('UpdateDataPipelineEnvironmentInput')) + name='input', type=gql.NonNullableType(gql.Ref('NewDataPipelineEnvironmentInput')) ) ], resolver=update_pipeline_environment, diff --git a/backend/dataall/api/Objects/DataPipeline/resolvers.py b/backend/dataall/api/Objects/DataPipeline/resolvers.py index a61f320f8..d5db551bb 100644 --- a/backend/dataall/api/Objects/DataPipeline/resolvers.py +++ b/backend/dataall/api/Objects/DataPipeline/resolvers.py @@ -29,7 +29,7 @@ def create_pipeline(context: Context, source, input=None): session=session, environment_uri=pipeline.environmentUri, target_type='cdkpipeline', - target_uri=f"{pipeline.DataPipelineUri}", + target_uri=pipeline.DataPipelineUri, target_label=pipeline.label, payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, ) @@ -38,7 +38,7 @@ def create_pipeline(context: Context, source, input=None): session=session, environment_uri=pipeline.environmentUri, target_type='template', - target_uri=f"{pipeline.DataPipelineUri}", + target_uri=pipeline.DataPipelineUri, target_label=pipeline.label, payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, ) @@ -409,7 +409,7 @@ def delete_pipeline( if pipeline.devStrategy == "cdk-trunk": stack_helper.delete_stack( context=context, - target_uri=f"{DataPipelineUri}", + target_uri=DataPipelineUri, accountid=env.AwsAccountId, cdk_role_arn=env.CDKRoleArn, region=env.region, @@ -428,15 +428,13 @@ def delete_pipeline( return True -def delete_pipeline_environment(context: Context, source, dataPipelineUri: str = None, environmentUri: str = None, stage: str = None): +def delete_pipeline_environment(context: Context, source, envPipelineUri: str = None): with context.engine.scoped_session() as session: Pipeline.delete_pipeline_environment( session=session, username=context.username, groups=context.groups, - dataPipelineUri=dataPipelineUri, - environmentUri=environmentUri, - stage=stage, + envPipelineUri=envPipelineUri, check_perm=True, ) return True diff --git a/backend/dataall/db/api/pipeline.py b/backend/dataall/db/api/pipeline.py index 3bcce53aa..f1f1fdb29 100644 --- a/backend/dataall/db/api/pipeline.py +++ b/backend/dataall/db/api/pipeline.py @@ -297,15 +297,12 @@ def delete_pipeline_environments(session, uri) -> bool: @staticmethod def delete_pipeline_environment( - session, username, groups, dataPipelineUri, environmentUri, stage, check_perm=None + session, username, groups, envPipelineUri, check_perm=None ) -> bool: - deletedItem = session.query(models.DataPipelineEnvironment).filter( - and_( - models.DataPipelineEnvironment.pipelineUri == dataPipelineUri, - models.DataPipelineEnvironment.environmentUri == environmentUri, - models.DataPipelineEnvironment.stage == stage - ) - ).delete() + deletedItem = ( + session.query(models.DataPipelineEnvironment).filter( + models.DataPipelineEnvironment.envPipelineUri == envPipelineUri).delete() + ) session.commit() return True diff --git a/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js b/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js index c0e397c27..732d718c9 100644 --- a/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js +++ b/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js @@ -1,21 +1,15 @@ import { gql } from 'apollo-boost'; -const deleteDataPipelineEnvironment = ({ dataPipelineUri, environmentUri, stage }) => ({ +const deleteDataPipelineEnvironment = ({ envPipelineUri }) => ({ variables: { - dataPipelineUri, - environmentUri, - stage + envPipelineUri }, mutation: gql` mutation deleteDataPipelineEnvironment( - $dataPipelineUri: String! - $environmentUri: String! - $stage: String! + $envPipelineUri: String! ) { deleteDataPipelineEnvironment( - dataPipelineUri: $dataPipelineUri - environmentUri: $environmentUri - stage: $stage + envPipelineUri: $envPipelineUri ) } ` diff --git a/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js b/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js index 5063f2d8a..7ea5ae26a 100644 --- a/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js +++ b/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js @@ -5,7 +5,7 @@ const updateDataPipelineEnvironment = ({ input }) => ({ input }, mutation: gql` - mutation updateDataPipelineEnvironment($input: UpdateDataPipelineEnvironmentInput) { + mutation updateDataPipelineEnvironment($input: NewDataPipelineEnvironmentInput) { updateDataPipelineEnvironment(input: $input) { envPipelineUri environmentUri diff --git a/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js b/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js index 4e92900ab..cbe334fdb 100644 --- a/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js +++ b/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js @@ -81,6 +81,7 @@ const PipelineEnvironmentEditForm = (props) => { env: e.environmentLabel, environmentLabel: e.environmentLabel, environmentUri: e.environmentUri, + envPipelineUri: e.envPipelineUri, samlGroupName: e.samlGroupName, team: e.samlGroupName, AwsAccountId: e.AwsAccountId @@ -197,9 +198,7 @@ const PipelineEnvironmentEditForm = (props) => { try { const response = await client.mutate( deleteDataPipelineEnvironment({ - dataPipelineUri: pipelineUri, - environmentUri: element.environmentUri, - stage: element.stage + envPipelineUri: element.envPipelineUri }) ); if (!response.errors) {