From 45a1b919b0fe6498ea4121348c484f3b5aeca1ef Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Tue, 4 Feb 2025 17:48:54 -0800 Subject: [PATCH 01/10] Create pipeline --- tests/ci/cdk/README.md | 20 ++ tests/ci/cdk/app.py | 79 ++--- tests/ci/cdk/cdk/aws_lc_analytics_stack.py | 28 +- tests/ci/cdk/cdk/aws_lc_android_ci_stack.py | 28 +- .../cdk/aws_lc_ec2_test_framework_ci_stack.py | 47 +-- tests/ci/cdk/cdk/aws_lc_github_ci_stack.py | 47 ++- .../ci/cdk/cdk/aws_lc_github_ci_x509_stack.py | 24 +- .../ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py | 31 +- tests/ci/cdk/cdk/bm_framework_stack.py | 2 +- .../codebuild/ec2_test_framework_omnibus.yaml | 8 +- tests/ci/cdk/cdk/components.py | 22 +- tests/ci/cdk/cdk/ecr_stack.py | 12 +- .../linux_docker_image_batch_build_stack.py | 43 ++- .../windows_docker_build_ssm_document.yaml | 8 + .../cdk/windows_docker_image_build_stack.py | 44 ++- tests/ci/cdk/pipeline/__init__.py | 0 tests/ci/cdk/pipeline/ci_stage.py | 238 +++++++++++++++ tests/ci/cdk/pipeline/codebuild_batch_step.py | 94 ++++++ tests/ci/cdk/pipeline/deploy_util.py | 6 + .../linux_docker_image_build_stage.py | 107 +++++++ tests/ci/cdk/pipeline/pipeline_stack.py | 289 ++++++++++++++++++ tests/ci/cdk/pipeline/scripts/build_target.sh | 168 ++++++++++ .../scripts/check_trigger_conditions.sh | 124 ++++++++ .../scripts/cleanup_orphaned_images.sh | 81 +++++ .../cdk/pipeline/scripts/finalize_images.sh | 115 +++++++ tests/ci/cdk/pipeline/scripts/util.sh | 149 +++++++++ tests/ci/cdk/pipeline/setup_stage.py | 80 +++++ .../windows_docker_image_build_stage.py | 100 ++++++ tests/ci/cdk/run-cdk.sh | 71 +++-- tests/ci/cdk/util/build_spec_loader.py | 15 +- tests/ci/cdk/util/ecr_util.py | 7 +- tests/ci/cdk/util/env_util.py | 9 +- tests/ci/cdk/util/iam_policies.py | 70 +++-- tests/ci/cdk/util/metadata.py | 29 +- .../docker_images/linux-aarch/build_images.sh | 2 + tests/ci/docker_images/linux-aarch/common.sh | 12 +- tests/ci/docker_images/linux-x86/common.sh | 12 +- .../ci/docker_images/windows/push_images.ps1 | 14 +- 38 files changed, 2012 insertions(+), 223 deletions(-) create mode 100644 tests/ci/cdk/pipeline/__init__.py create mode 100644 tests/ci/cdk/pipeline/ci_stage.py create mode 100644 tests/ci/cdk/pipeline/codebuild_batch_step.py create mode 100644 tests/ci/cdk/pipeline/deploy_util.py create mode 100644 tests/ci/cdk/pipeline/linux_docker_image_build_stage.py create mode 100644 tests/ci/cdk/pipeline/pipeline_stack.py create mode 100644 tests/ci/cdk/pipeline/scripts/build_target.sh create mode 100644 tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh create mode 100644 tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh create mode 100644 tests/ci/cdk/pipeline/scripts/finalize_images.sh create mode 100644 tests/ci/cdk/pipeline/scripts/util.sh create mode 100644 tests/ci/cdk/pipeline/setup_stage.py create mode 100644 tests/ci/cdk/pipeline/windows_docker_image_build_stage.py diff --git a/tests/ci/cdk/README.md b/tests/ci/cdk/README.md index c03b920a9d..e9883a7c05 100644 --- a/tests/ci/cdk/README.md +++ b/tests/ci/cdk/README.md @@ -63,6 +63,26 @@ To setup or update the CI in your account you will need the following IAM permis * secretsmanager:DeleteSecret * secretsmanager:GetSecretValue +### Pipeline Commands +Bootstrap pipeline account +``` +AWS_ACCOUNT_ID=183295444613 +PIPELINE_ACCOUNT_ID=774305600158 +cdk bootstrap aws://${PIPELINE_ACCOUNT_ID}/us-west-2 +``` + +Give pipeline account administrator access to deployment account's CloudFormation +``` +cdk bootstrap aws://${AWS_ACCOUNT_ID}/us-west-2 --trust ${PIPELINE_ACCOUNT_ID} --trust-for-lookup ${PIPELINE_ACCOUNT_ID} --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess +``` + +Deploy pipeline +``` +GITHUB_REPO_OWNER=nhatnghiho +GITHUB_SOURCE_VERSION=ci-pipeline +./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --github-source-version ${GITHUB_SOURCE_VERSION} --aws-account ${AWS_ACCOUNT_ID} --action invoke --command "cdk deploy AwsLcCiPipeline --require-approval never" +``` + ### Commands These commands are run from `aws-lc/tests/ci/cdk`. \ diff --git a/tests/ci/cdk/app.py b/tests/ci/cdk/app.py index 74f4e0352f..8db6537e67 100644 --- a/tests/ci/cdk/app.py +++ b/tests/ci/cdk/app.py @@ -12,49 +12,54 @@ from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack from cdk.aws_lc_ec2_test_framework_ci_stack import AwsLcEC2TestingCIStack from cdk.linux_docker_image_batch_build_stack import LinuxDockerImageBatchBuildStack +from pipeline.pipeline_stack import AwsLcCiPipeline from cdk.windows_docker_image_build_stack import WindowsDockerImageBuildStack from cdk.aws_lc_github_ci_x509_stack import AwsLcGitHubX509CIStack from cdk.ecr_stack import EcrStack -from util.metadata import AWS_ACCOUNT, AWS_REGION, LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO, WINDOWS_X86_ECR_REPO +from util.metadata import LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO, WINDOWS_X86_ECR_REPO, \ + PIPELINE_ACCOUNT, PIPELINE_REGION, DEPLOY_ACCOUNT, DEPLOY_REGION # Initialize app. app = App() -# Initialize env. -env = Environment(account=AWS_ACCOUNT, region=AWS_REGION) - -# Define AWS ECR stacks. -# ECR holds the docker images, which are pre-built to accelerate the code builds/tests of git pull requests. -EcrStack(app, "aws-lc-ecr-linux-x86", LINUX_X86_ECR_REPO, env=env) -EcrStack(app, "aws-lc-ecr-linux-aarch", LINUX_AARCH_ECR_REPO, env=env) -EcrStack(app, "aws-lc-ecr-windows-x86", WINDOWS_X86_ECR_REPO, env=env) - -# Define CodeBuild Batch job for building Docker images. -LinuxDockerImageBatchBuildStack(app, "aws-lc-docker-image-build-linux", env=env) - -# AWS CodeBuild cannot build Windows Docker images because DIND (Docker In Docker) is not supported on Windows. -# Windows Docker images are created by running commands in Windows EC2 instance. -WindowsDockerImageBuildStack(app, "aws-lc-docker-image-build-windows", env=env) - -# Define CodeBuild Batch job for testing code. -x86_build_spec_file = "cdk/codebuild/github_ci_linux_x86_omnibus.yaml" -AwsLcGitHubCIStack(app, "aws-lc-ci-linux-x86", x86_build_spec_file, env=env) -arm_build_spec_file = "cdk/codebuild/github_ci_linux_arm_omnibus.yaml" -AwsLcGitHubCIStack(app, "aws-lc-ci-linux-arm", arm_build_spec_file, env=env) -integration_build_spec_file = "cdk/codebuild/github_ci_integration_omnibus.yaml" -AwsLcGitHubCIStack(app, "aws-lc-ci-integration", integration_build_spec_file, env=env) -win_x86_build_spec_file = "cdk/codebuild/github_ci_windows_x86_omnibus.yaml" -AwsLcGitHubCIStack(app, "aws-lc-ci-windows-x86", win_x86_build_spec_file, env=env) -fuzz_build_spec_file = "cdk/codebuild/github_ci_fuzzing_omnibus.yaml" -AwsLcGitHubFuzzCIStack(app, "aws-lc-ci-fuzzing", fuzz_build_spec_file, env=env) -analytics_build_spec_file = "cdk/codebuild/github_ci_analytics_omnibus.yaml" -AwsLcGitHubAnalyticsStack(app, "aws-lc-ci-analytics", analytics_build_spec_file, env=env) -# bm_framework_build_spec_file = "cdk/codebuild/bm_framework_omnibus.yaml" -# BmFrameworkStack(app, "aws-lc-ci-bm-framework", bm_framework_build_spec_file, env=env) -ec2_test_framework_build_spec_file = "cdk/codebuild/ec2_test_framework_omnibus.yaml" -AwsLcEC2TestingCIStack(app, "aws-lc-ci-ec2-test-framework", ec2_test_framework_build_spec_file, env=env) -android_build_spec_file = "cdk/codebuild/github_ci_android_omnibus.yaml" -AwsLcAndroidCIStack(app, "aws-lc-ci-devicefarm-android", android_build_spec_file, env=env) -AwsLcGitHubX509CIStack(app, "aws-lc-ci-x509") +AwsLcCiPipeline(app, "AwsLcCiPipeline", env=Environment(account=PIPELINE_ACCOUNT, region=PIPELINE_REGION)) + +if DEPLOY_ACCOUNT is not None and DEPLOY_REGION is not None: + # Initialize env. + env = Environment(account=DEPLOY_ACCOUNT, region=DEPLOY_REGION) + + # Define AWS ECR stacks. + # ECR holds the docker images, which are pre-built to accelerate the code builds/tests of git pull requests. + EcrStack(app, "aws-lc-ecr-linux-x86", LINUX_X86_ECR_REPO, env=env) + EcrStack(app, "aws-lc-ecr-linux-aarch", LINUX_AARCH_ECR_REPO, env=env) + EcrStack(app, "aws-lc-ecr-windows-x86", WINDOWS_X86_ECR_REPO, env=env) + + # Define CodeBuild Batch job for building Docker images. + LinuxDockerImageBatchBuildStack(app, "aws-lc-docker-image-build-linux", env=env) + + # AWS CodeBuild cannot build Windows Docker images because DIND (Docker In Docker) is not supported on Windows. + # Windows Docker images are created by running commands in Windows EC2 instance. + WindowsDockerImageBuildStack(app, "aws-lc-docker-image-build-windows", env=env) + + # Define CodeBuild Batch job for testing code. + x86_build_spec_file = "cdk/codebuild/github_ci_linux_x86_omnibus.yaml" + AwsLcGitHubCIStack(app, "aws-lc-ci-linux-x86", x86_build_spec_file, env=env) + arm_build_spec_file = "cdk/codebuild/github_ci_linux_arm_omnibus.yaml" + AwsLcGitHubCIStack(app, "aws-lc-ci-linux-arm", arm_build_spec_file, env=env) + integration_build_spec_file = "cdk/codebuild/github_ci_integration_omnibus.yaml" + AwsLcGitHubCIStack(app, "aws-lc-ci-integration", integration_build_spec_file, env=env) + win_x86_build_spec_file = "cdk/codebuild/github_ci_windows_x86_omnibus.yaml" + AwsLcGitHubCIStack(app, "aws-lc-ci-windows-x86", win_x86_build_spec_file, env=env) + fuzz_build_spec_file = "cdk/codebuild/github_ci_fuzzing_omnibus.yaml" + AwsLcGitHubFuzzCIStack(app, "aws-lc-ci-fuzzing", fuzz_build_spec_file, env=env) + analytics_build_spec_file = "cdk/codebuild/github_ci_analytics_omnibus.yaml" + AwsLcGitHubAnalyticsStack(app, "aws-lc-ci-analytics", analytics_build_spec_file, env=env) + # bm_framework_build_spec_file = "cdk/codebuild/bm_framework_omnibus.yaml" + # BmFrameworkStack(app, "aws-lc-ci-bm-framework", bm_framework_build_spec_file, env=env) + ec2_test_framework_build_spec_file = "cdk/codebuild/ec2_test_framework_omnibus.yaml" + AwsLcEC2TestingCIStack(app, "aws-lc-ci-ec2-test-framework", ec2_test_framework_build_spec_file, env=env) + android_build_spec_file = "cdk/codebuild/github_ci_android_omnibus.yaml" + AwsLcAndroidCIStack(app, "aws-lc-ci-devicefarm-android", android_build_spec_file, env=env) + AwsLcGitHubX509CIStack(app, "aws-lc-ci-x509", env=env) app.synth() diff --git a/tests/ci/cdk/cdk/aws_lc_analytics_stack.py b/tests/ci/cdk/cdk/aws_lc_analytics_stack.py index 216c33e4db..e52144d190 100644 --- a/tests/ci/cdk/cdk/aws_lc_analytics_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_analytics_stack.py @@ -1,12 +1,15 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_ec2 as ec2, aws_efs as efs +from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_ec2 as ec2, aws_efs as efs, \ + Environment from constructs import Construct from cdk.components import PruneStaleGitHubBuilds from util.iam_policies import code_build_publish_metrics_in_json -from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME +from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, \ + STAGING_GITHUB_REPO_NAME from util.build_spec_loader import BuildSpecLoader @@ -17,13 +20,22 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, + env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], **kwargs) -> None: - super().__init__(scope, id, **kwargs) + super().__init__(scope, id, env=env, **kwargs) + + # Define CodeBuild resource. + github_repo_owner = GITHUB_REPO_OWNER + github_repo_name = GITHUB_REPO_NAME + + if env.account == PRE_PROD_ACCOUNT: + github_repo_owner = STAGING_GITHUB_REPO_OWNER + github_repo_name = STAGING_GITHUB_REPO_NAME # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( - owner=GITHUB_REPO_OWNER, - repo=GITHUB_REPO_NAME, + owner=github_repo_owner, + repo=github_repo_name, webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH) @@ -34,7 +46,7 @@ def __init__(self, webhook_triggers_batch_build=True) # Define a IAM role for this stack. - metrics_policy = iam.PolicyDocument.from_json(code_build_publish_metrics_in_json()) + metrics_policy = iam.PolicyDocument.from_json(code_build_publish_metrics_in_json(env)) inline_policies = {"metric_policy": metrics_policy} role = iam.Role(scope=self, id="{}-role".format(id), @@ -52,7 +64,7 @@ def __init__(self, environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.LARGE, privileged=True, build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path)) + build_spec=BuildSpecLoader.load(spec_file_path, env)) analytics.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=analytics, ec2_permissions=False) + PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=analytics, ec2_permissions=False, env=env) diff --git a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py index 9a3fc00a61..a548a26eed 100644 --- a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py @@ -1,12 +1,14 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam +from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, Environment from constructs import Construct from cdk.components import PruneStaleGitHubBuilds from util.iam_policies import code_build_batch_policy_in_json, device_farm_access_policy_in_json -from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME, GITHUB_PUSH_CI_BRANCH_TARGETS +from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME, GITHUB_PUSH_CI_BRANCH_TARGETS, PRE_PROD_ACCOUNT, \ + STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME from util.build_spec_loader import BuildSpecLoader @@ -20,13 +22,21 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, + env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], **kwargs) -> None: - super().__init__(scope, id, **kwargs) + super().__init__(scope, id, env=env, **kwargs) + + github_repo_owner = GITHUB_REPO_OWNER + github_repo_name = GITHUB_REPO_NAME + + if env.account == PRE_PROD_ACCOUNT: + github_repo_owner = STAGING_GITHUB_REPO_OWNER + github_repo_name = STAGING_GITHUB_REPO_NAME # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( - owner=GITHUB_REPO_OWNER, - repo=GITHUB_REPO_NAME, + owner=github_repo_owner, + repo=github_repo_name, webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of( @@ -40,10 +50,10 @@ def __init__(self, # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( - code_build_batch_policy_in_json([id]) + code_build_batch_policy_in_json([id], env) ) device_farm_policy = iam.PolicyDocument.from_json( - device_farm_access_policy_in_json() + device_farm_access_policy_in_json(env) ) inline_policies = {"code_build_batch_policy": code_build_batch_policy, "device_farm_policy": device_farm_policy} role = iam.Role(scope=self, @@ -62,7 +72,7 @@ def __init__(self, environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.SMALL, privileged=False, build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path)) + build_spec=BuildSpecLoader.load(spec_file_path, env)) project.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=False) + PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=False, env=env) diff --git a/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py index 0dccf5b02a..cd7533f1d7 100644 --- a/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py @@ -2,15 +2,19 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC import subprocess +import typing + import boto3 from botocore.exceptions import ClientError -from aws_cdk import CfnTag, Duration, Stack, Tags, aws_ec2 as ec2, aws_codebuild as codebuild, aws_iam as iam, aws_s3 as s3, aws_logs as logs +from aws_cdk import CfnTag, Duration, Stack, Tags, aws_ec2 as ec2, aws_codebuild as codebuild, aws_iam as iam, \ + aws_s3 as s3, aws_logs as logs, Environment from constructs import Construct from cdk.components import PruneStaleGitHubBuilds -from util.metadata import AWS_ACCOUNT, AWS_REGION, GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, LINUX_AARCH_ECR_REPO, \ - LINUX_X86_ECR_REPO +from util.metadata import GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, \ + LINUX_AARCH_ECR_REPO, \ + LINUX_X86_ECR_REPO, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME from util.iam_policies import code_build_batch_policy_in_json, ec2_policies_in_json, ssm_policies_in_json, s3_read_write_policy_in_json, ecr_power_user_policy_in_json from util.build_spec_loader import BuildSpecLoader @@ -23,13 +27,21 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, + env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], **kwargs) -> None: - super().__init__(scope, id, **kwargs) + super().__init__(scope, id, env=env, **kwargs) + + github_repo_owner = GITHUB_REPO_OWNER + github_repo_name = GITHUB_REPO_NAME + + if env.account == PRE_PROD_ACCOUNT: + github_repo_owner = STAGING_GITHUB_REPO_OWNER + github_repo_name = STAGING_GITHUB_REPO_NAME # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( - owner=GITHUB_REPO_OWNER, - repo=GITHUB_REPO_NAME, + owner=github_repo_owner, + repo=github_repo_name, webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of( @@ -43,7 +55,7 @@ def __init__(self, # S3 bucket for testing internal fixes. s3_read_write_policy = iam.PolicyDocument.from_json(s3_read_write_policy_in_json("aws-lc-codebuild")) - ecr_power_user_policy = iam.PolicyDocument.from_json(ecr_power_user_policy_in_json([LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO])) + ecr_power_user_policy = iam.PolicyDocument.from_json(ecr_power_user_policy_in_json([LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO], env)) ec2_inline_policies = {"s3_read_write_policy": s3_read_write_policy, "ecr_power_user_policy": ecr_power_user_policy} ec2_role = iam.Role(scope=self, id="{}-ec2-role".format(id), role_name="{}-ec2-role".format(id), @@ -62,16 +74,15 @@ def __init__(self, selected_subnets = vpc.select_subnets(subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS) # create security group with default rules - security_group = ec2.SecurityGroup(self, id="{}-ec2-sg".format(id), - allow_all_outbound=True, - vpc=vpc, - security_group_name='codebuild_ec2_sg') - + # security_group = ec2.SecurityGroup(self, id="{}-ec2-sg".format(id), + # allow_all_outbound=True, + # vpc=vpc, + # security_group_name='codebuild_ec2_sg') # Define a IAM role for this stack. - code_build_batch_policy = iam.PolicyDocument.from_json(code_build_batch_policy_in_json([id])) - ec2_policy = iam.PolicyDocument.from_json(ec2_policies_in_json(ec2_role.role_name, security_group.security_group_id, selected_subnets.subnets[0].subnet_id, vpc.vpc_id)) - ssm_policy = iam.PolicyDocument.from_json(ssm_policies_in_json()) + code_build_batch_policy = iam.PolicyDocument.from_json(code_build_batch_policy_in_json([id], env)) + ec2_policy = iam.PolicyDocument.from_json(ec2_policies_in_json(ec2_role.role_name, vpc.vpc_default_security_group, selected_subnets.subnets[0].subnet_id, vpc.vpc_id, env)) + ssm_policy = iam.PolicyDocument.from_json(ssm_policies_in_json(env)) codebuild_inline_policies = {"code_build_batch_policy": code_build_batch_policy, "ec2_policy": ec2_policy, "ssm_policy": ssm_policy} @@ -94,10 +105,10 @@ def __init__(self, environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.SMALL, privileged=False, build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path), + build_spec=BuildSpecLoader.load(spec_file_path, env), environment_variables= { "EC2_SECURITY_GROUP_ID": codebuild.BuildEnvironmentVariable( - value=security_group.security_group_id + value=vpc.vpc_default_security_group ), "EC2_SUBNET_ID": codebuild.BuildEnvironmentVariable( value=selected_subnets.subnets[0].subnet_id @@ -108,7 +119,7 @@ def __init__(self, }) project.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=True) + PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=True, env=env) # Define logs for SSM. log_group_name = "{}-cw-logs".format(id) diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py index f3a262af3d..6789d9048b 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py @@ -1,12 +1,16 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_s3_assets, aws_logs as logs +from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_s3_assets, aws_logs as logs, \ + Environment from constructs import Construct from cdk.components import PruneStaleGitHubBuilds -from util.iam_policies import code_build_batch_policy_in_json, code_build_publish_metrics_in_json, code_build_cloudwatch_logs_policy_in_json -from util.metadata import CAN_AUTOLOAD, GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME +from util.iam_policies import code_build_batch_policy_in_json, code_build_publish_metrics_in_json, \ + code_build_cloudwatch_logs_policy_in_json, s3_read_policy_in_json +from util.metadata import GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, \ + PIPELINE_ACCOUNT, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME from util.build_spec_loader import BuildSpecLoader @@ -17,13 +21,21 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, + env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], **kwargs) -> None: - super().__init__(scope, id, **kwargs) + super().__init__(scope, id, env=env, **kwargs) + + github_repo_owner = GITHUB_REPO_OWNER + github_repo_name = GITHUB_REPO_NAME + + if env.account == PRE_PROD_ACCOUNT: + github_repo_owner = STAGING_GITHUB_REPO_OWNER + github_repo_name = STAGING_GITHUB_REPO_NAME # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( - owner=GITHUB_REPO_OWNER, - repo=GITHUB_REPO_NAME, + owner=github_repo_owner, + repo=github_repo_name, webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of( @@ -40,18 +52,24 @@ def __init__(self, code_build_cloudwatch_logs_policy = iam.PolicyDocument.from_json( code_build_cloudwatch_logs_policy_in_json([log_group]) ) + s3_assets_policy = iam.PolicyDocument.from_json(s3_read_policy_in_json()) resource_access_role = iam.Role(scope=self, id="{}-resource-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + assumed_by=iam.CompositePrincipal( + iam.ServicePrincipal("codebuild.amazonaws.com"), + iam.ArnPrincipal(f'arn:aws:iam::{PIPELINE_ACCOUNT}:role/CrossAccountCodeBuildRole') + ), inline_policies={ - "code_build_cloudwatch_logs_policy": code_build_cloudwatch_logs_policy + "code_build_cloudwatch_logs_policy": code_build_cloudwatch_logs_policy, + "s3_assets_policy": s3_assets_policy }) # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( - code_build_batch_policy_in_json([id]) + code_build_batch_policy_in_json([id], env) ) - metrics_policy = iam.PolicyDocument.from_json(code_build_publish_metrics_in_json()) + metrics_policy = iam.PolicyDocument.from_json(code_build_publish_metrics_in_json(env)) + inline_policies = {"code_build_batch_policy": code_build_batch_policy, "metrics_policy": metrics_policy, } @@ -66,6 +84,11 @@ def __init__(self, ) ) + # test = iam.Role(scope=self, + # id="test", + # assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + # inline_policies=inline_policies) + # Define CodeBuild. project = codebuild.Project( scope=self, @@ -78,10 +101,10 @@ def __init__(self, environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.SMALL, privileged=False, build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path)) + build_spec=BuildSpecLoader.load(spec_file_path, env=env)) cfn_project = project.node.default_child cfn_project.add_property_override("Visibility", "PUBLIC_READ") cfn_project.add_property_override("ResourceAccessRole", resource_access_role.role_arn) project.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=False) + PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=False, env=env) diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py index a6e4faf400..c71bd372f8 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py @@ -1,10 +1,12 @@ -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_s3 as s3 +import typing + +from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_s3 as s3, Environment from constructs import Construct from util.build_spec_loader import BuildSpecLoader from util.metadata import ( GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_NAME, - GITHUB_REPO_OWNER, + GITHUB_REPO_OWNER, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME, ) @@ -13,13 +15,22 @@ def __init__( self, scope: Construct, id: str, + env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], **kwargs, ) -> None: - super().__init__(scope, id, **kwargs) + super().__init__(scope, id, env=env, **kwargs) + + github_repo_owner = GITHUB_REPO_OWNER + github_repo_name = GITHUB_REPO_NAME + + if env.account == PRE_PROD_ACCOUNT: + github_repo_owner = STAGING_GITHUB_REPO_OWNER + github_repo_name = STAGING_GITHUB_REPO_NAME + # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( - owner=GITHUB_REPO_OWNER, - repo=GITHUB_REPO_NAME, + owner=github_repo_owner, + repo=github_repo_name, webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of( @@ -78,7 +89,8 @@ def __init__( project_name=id, source=git_hub_source, build_spec=BuildSpecLoader.load( - "cdk/codebuild/github_ci_x509_omnibus.yaml" + "cdk/codebuild/github_ci_x509_omnibus.yaml", + env ), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_6_0, diff --git a/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py index 6f6e2d4d06..2cea259269 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py @@ -1,14 +1,17 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import typing -from aws_cdk import Duration, Size, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_ec2 as ec2, aws_efs as efs +from aws_cdk import Duration, Size, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_ec2 as ec2, aws_efs as efs, \ + Environment from constructs import Construct from cdk.components import PruneStaleGitHubBuilds from util.ecr_util import ecr_arn from util.iam_policies import code_build_batch_policy_in_json, \ code_build_publish_metrics_in_json -from util.metadata import AWS_ACCOUNT, AWS_REGION, GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME +from util.metadata import GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, \ + PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME from util.build_spec_loader import BuildSpecLoader @@ -19,13 +22,21 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, + env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], **kwargs) -> None: - super().__init__(scope, id, **kwargs) + super().__init__(scope, id, env=env, **kwargs) + + github_repo_owner = GITHUB_REPO_OWNER + github_repo_name = GITHUB_REPO_NAME + + if env.account == PRE_PROD_ACCOUNT: + github_repo_owner = STAGING_GITHUB_REPO_OWNER + github_repo_name = STAGING_GITHUB_REPO_NAME # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( - owner=GITHUB_REPO_OWNER, - repo=GITHUB_REPO_NAME, + owner=github_repo_owner, + repo=github_repo_name, webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of( @@ -39,9 +50,9 @@ def __init__(self, # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( - code_build_batch_policy_in_json([id]) + code_build_batch_policy_in_json([id], env) ) - fuzz_policy = iam.PolicyDocument.from_json(code_build_publish_metrics_in_json()) + fuzz_policy = iam.PolicyDocument.from_json(code_build_publish_metrics_in_json(env)) inline_policies = {"code_build_batch_policy": code_build_batch_policy, "fuzz_policy": fuzz_policy} role = iam.Role(scope=self, @@ -108,7 +119,7 @@ def __init__(self, environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.LARGE, privileged=True, build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path), + build_spec=BuildSpecLoader.load(spec_file_path, env), vpc=fuzz_vpc, security_groups=[build_security_group]) fuzz_codebuild.enable_batch_builds() @@ -123,9 +134,9 @@ def __init__(self, cfn_codebuild = fuzz_codebuild.node.default_child cfn_codebuild.add_override("Properties.FileSystemLocations", [{ "Identifier": "fuzzing_root", - "Location": "%s.efs.%s.amazonaws.com:/" % (fuzz_filesystem.file_system_id, AWS_REGION), + "Location": "%s.efs.%s.amazonaws.com:/" % (fuzz_filesystem.file_system_id, env.region), "MountPoint": "/efs_fuzzing_root", "Type": "EFS" }]) - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=fuzz_codebuild, ec2_permissions=False) + PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=fuzz_codebuild, ec2_permissions=False, env=env) diff --git a/tests/ci/cdk/cdk/bm_framework_stack.py b/tests/ci/cdk/cdk/bm_framework_stack.py index c9a880ab45..1b59ac159a 100644 --- a/tests/ci/cdk/cdk/bm_framework_stack.py +++ b/tests/ci/cdk/cdk/bm_framework_stack.py @@ -9,7 +9,7 @@ from constructs import Construct from cdk.components import PruneStaleGitHubBuilds -from util.metadata import AWS_ACCOUNT, AWS_REGION, GITHUB_REPO_OWNER, GITHUB_REPO_NAME +from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME from util.iam_policies import code_build_batch_policy_in_json, ec2_bm_framework_policies_in_json, \ ssm_bm_framework_policies_in_json, ecr_power_user_policy_in_json from util.build_spec_loader import BuildSpecLoader diff --git a/tests/ci/cdk/cdk/codebuild/ec2_test_framework_omnibus.yaml b/tests/ci/cdk/cdk/codebuild/ec2_test_framework_omnibus.yaml index 69b8ea8292..355c2742e0 100644 --- a/tests/ci/cdk/cdk/codebuild/ec2_test_framework_omnibus.yaml +++ b/tests/ci/cdk/cdk/codebuild/ec2_test_framework_omnibus.yaml @@ -13,7 +13,7 @@ batch: type: LINUX_CONTAINER privileged-mode: false compute-type: BUILD_GENERAL1_SMALL - image: 620771051181.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest + image: 183295444613.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest variables: EC2_AMI: "ami-0c29a2c5cf69b5a9c" EC2_INSTANCE_TYPE: "c6g.2xlarge" @@ -26,7 +26,7 @@ batch: type: LINUX_CONTAINER privileged-mode: false compute-type: BUILD_GENERAL1_SMALL - image: 620771051181.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest + image: 183295444613.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest variables: EC2_AMI: "ami-0c29a2c5cf69b5a9c" EC2_INSTANCE_TYPE: "c6g.4xlarge" @@ -40,7 +40,7 @@ batch: type: LINUX_CONTAINER privileged-mode: false compute-type: BUILD_GENERAL1_SMALL - image: 620771051181.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest + image: 183295444613.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest variables: EC2_AMI: "ami-0c29a2c5cf69b5a9c" EC2_INSTANCE_TYPE: "r8g.2xlarge" @@ -53,7 +53,7 @@ batch: type: LINUX_CONTAINER privileged-mode: false compute-type: BUILD_GENERAL1_SMALL - image: 620771051181.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest + image: 183295444613.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest variables: EC2_AMI: "ami-0c29a2c5cf69b5a9c" EC2_INSTANCE_TYPE: "r8g.2xlarge" diff --git a/tests/ci/cdk/cdk/components.py b/tests/ci/cdk/cdk/components.py index dd4a7bfab5..5aacf19c3e 100644 --- a/tests/ci/cdk/cdk/components.py +++ b/tests/ci/cdk/cdk/components.py @@ -1,14 +1,22 @@ import pathlib -from aws_cdk import aws_codebuild as codebuild, aws_lambda as lambda_, aws_ecr as ecr, aws_secretsmanager as sm, \ +from aws_cdk import aws_codebuild as codebuild, aws_lambda as lambda_, aws_ecr_assets as ecr_assets, aws_secretsmanager as sm, \ aws_events as events, aws_events_targets as events_targets, aws_iam as iam, Duration from constructs import Construct -from util.metadata import AWS_REGION, AWS_ACCOUNT, GITHUB_REPO_OWNER, GITHUB_TOKEN_SECRET_NAME +from util.metadata import GITHUB_REPO_OWNER, GITHUB_TOKEN_SECRET_NAME class PruneStaleGitHubBuilds(Construct): - def __init__(self, scope: Construct, id: str, *, project: codebuild.IProject, ec2_permissions: bool) -> None: + def __init__( + self, + scope: Construct, + id: str, + *, + project: codebuild.IProject, + env, + ec2_permissions: bool + ) -> None: super().__init__(scope, id) github_token_secret = sm.Secret.from_secret_name_v2(scope=self, @@ -19,7 +27,9 @@ def __init__(self, scope: Construct, id: str, *, project: codebuild.IProject, ec id="LambdaFunction", code=lambda_.Code.from_asset_image( directory=str(pathlib.Path().joinpath("..", "lambda")), - target="purge-stale-builds"), + target="purge-stale-builds", + platform=ecr_assets.Platform.LINUX_AMD64 + ), handler=lambda_.Handler.FROM_IMAGE, runtime=lambda_.Runtime.FROM_IMAGE, environment={ @@ -46,7 +56,7 @@ def __init__(self, scope: Construct, id: str, *, project: codebuild.IProject, ec actions=[ "ec2:TerminateInstances", ], - resources=["arn:aws:ec2:{}:{}:instance/*".format(AWS_REGION, AWS_ACCOUNT)], + resources=["arn:aws:ec2:{}:{}:instance/*".format(env.region, env.account)], conditions={ "StringEquals": { "ec2:ResourceTag/ec2-framework-host": "ec2-framework-host" @@ -65,7 +75,7 @@ def __init__(self, scope: Construct, id: str, *, project: codebuild.IProject, ec "ssm:ListDocuments", "ssm:DeleteDocument", ], - resources=["arn:aws:ssm:{}:{}:*".format(AWS_REGION, AWS_ACCOUNT)])) + resources=["arn:aws:ssm:{}:{}:*".format(env.region, env.account)])) events.Rule(scope=self, id="PurgeEventRule", diff --git a/tests/ci/cdk/cdk/ecr_stack.py b/tests/ci/cdk/cdk/ecr_stack.py index ff8a4b54ed..e6afa5cb51 100644 --- a/tests/ci/cdk/cdk/ecr_stack.py +++ b/tests/ci/cdk/cdk/ecr_stack.py @@ -1,7 +1,7 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC -from aws_cdk import Stack, aws_ecr as ecr, aws_iam as iam +from aws_cdk import Stack, Duration, aws_ecr as ecr, aws_iam as iam from constructs import Construct @@ -14,3 +14,13 @@ def __init__(self, scope: Construct, id: str, repo_name: str, **kwargs) -> None: repo = ecr.Repository(scope=self, id=id, repository_name=repo_name) repo.grant_pull_push(iam.ServicePrincipal("codebuild.amazonaws.com")) repo.grant_pull(iam.ArnPrincipal("arn:aws:iam::222961743098:role/scrutini-ecr")) + repo.add_lifecycle_rule( + description="Expire images older than 1 month", + max_image_age=Duration.days(30), + ) + + repo.add_lifecycle_rule( + description="Remove untagged images after 1 day", + tag_status=ecr.TagStatus.UNTAGGED, + max_image_age=Duration.days(1) + ) diff --git a/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py b/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py index c350d0a22e..184eb4e5ae 100644 --- a/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py +++ b/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py @@ -1,33 +1,54 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_ec2 as ec2 +from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_ec2 as ec2, Environment from constructs import Construct -from util.metadata import AWS_ACCOUNT, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, GITHUB_SOURCE_VERSION, LINUX_AARCH_ECR_REPO, \ - LINUX_X86_ECR_REPO +from util.metadata import ( + GITHUB_REPO_OWNER, + GITHUB_REPO_NAME, + GITHUB_SOURCE_VERSION, + LINUX_AARCH_ECR_REPO, + LINUX_X86_ECR_REPO, + PRE_PROD_ACCOUNT, + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME +) from util.iam_policies import code_build_batch_policy_in_json, ecr_power_user_policy_in_json from util.yml_loader import YmlLoader class LinuxDockerImageBatchBuildStack(Stack): - """Define a temporary stack used to batch build Linux Docker images. After build, this stack will be destroyed.""" + """Define a temporary stack used to batch build Linux Docker images.""" - def __init__(self, scope: Construct, id: str, **kwargs) -> None: - super().__init__(scope, id, **kwargs) + def __init__( + self, + scope: Construct, + id: str, + env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + **kwargs) -> None: + super().__init__(scope, id, env=env, **kwargs) + + github_repo_owner = GITHUB_REPO_OWNER + github_repo_name = GITHUB_REPO_NAME + + if env.account == PRE_PROD_ACCOUNT: + github_repo_owner = STAGING_GITHUB_REPO_OWNER + github_repo_name = STAGING_GITHUB_REPO_NAME # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( - owner=GITHUB_REPO_OWNER, - repo=GITHUB_REPO_NAME, + owner=github_repo_owner, + repo=github_repo_name, webhook=False, branch_or_ref=GITHUB_SOURCE_VERSION, clone_depth=1) # Define a role. - code_build_batch_policy = iam.PolicyDocument.from_json(code_build_batch_policy_in_json([id])) + code_build_batch_policy = iam.PolicyDocument.from_json(code_build_batch_policy_in_json([id], env)) ecr_repo_names = [LINUX_AARCH_ECR_REPO, LINUX_X86_ECR_REPO] - ecr_power_user_policy = iam.PolicyDocument.from_json(ecr_power_user_policy_in_json(ecr_repo_names)) + ecr_power_user_policy = iam.PolicyDocument.from_json(ecr_power_user_policy_in_json(ecr_repo_names, env)) inline_policies = {"code_build_batch_policy": code_build_batch_policy, "ecr_power_user_policy": ecr_power_user_policy} role = iam.Role(scope=self, @@ -40,7 +61,7 @@ def __init__(self, scope: Construct, id: str, **kwargs) -> None: # Define environment variables. environment_variables = { - "AWS_ACCOUNT_ID": codebuild.BuildEnvironmentVariable(value=AWS_ACCOUNT), + "AWS_ACCOUNT_ID": codebuild.BuildEnvironmentVariable(value=env.account), "AWS_ECR_REPO_X86": codebuild.BuildEnvironmentVariable(value=LINUX_X86_ECR_REPO), "AWS_ECR_REPO_AARCH": codebuild.BuildEnvironmentVariable(value=LINUX_AARCH_ECR_REPO), "GITHUB_REPO_OWNER": codebuild.BuildEnvironmentVariable(value=GITHUB_REPO_OWNER), diff --git a/tests/ci/cdk/cdk/ssm/windows_docker_build_ssm_document.yaml b/tests/ci/cdk/cdk/ssm/windows_docker_build_ssm_document.yaml index b5288d9370..94ac25ef36 100644 --- a/tests/ci/cdk/cdk/ssm/windows_docker_build_ssm_document.yaml +++ b/tests/ci/cdk/cdk/ssm/windows_docker_build_ssm_document.yaml @@ -10,6 +10,10 @@ mainSteps: inputs: timeoutSeconds: '7200' runCommand: + - $ErrorActionPreference = 'Stop' + - $TRIGGER_TYPE = '{{ TriggerType }}' + - "Get-ChildItem Env: | Sort-Object Name" + - rm -Recurse -Force docker-images - mkdir docker-images - cd docker-images - Set-ExecutionPolicy Bypass -Scope Process -Force; [Net.ServicePointManager]::SecurityProtocol = [Net.ServicePointManager]::SecurityProtocol -bor [Net.SecurityProtocolType]::Tls12; $env:chocolateyUseWindowsCompression = 'true'; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1')) | Out-Null @@ -23,3 +27,7 @@ mainSteps: - Invoke-Expression -Command (Get-ECRLoginCommand -Region REGION_PLACEHOLDER).Command - .\build_images.ps1 - .\push_images.ps1 ECR_PLACEHOLDER +parameters: + TriggerType: + type: String + description: Specifies how this command was initiated. diff --git a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py index fa1079c7ed..4b6cc8affb 100644 --- a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py +++ b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py @@ -1,5 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import typing from aws_cdk import ( Stack, @@ -7,18 +8,16 @@ aws_ec2 as ec2, aws_s3 as s3, aws_iam as iam, - aws_ssm as ssm, + aws_ssm as ssm, PhysicalName, CfnOutput, CfnParameter, Environment ) from constructs import Construct + from util.iam_policies import ( ecr_power_user_policy_in_json, s3_read_write_policy_in_json, ) from util.metadata import ( - AWS_ACCOUNT, - AWS_REGION, WINDOWS_X86_ECR_REPO, - S3_BUCKET_NAME, GITHUB_REPO_OWNER, WIN_EC2_TAG_KEY, WIN_EC2_TAG_VALUE, @@ -29,46 +28,60 @@ class WindowsDockerImageBuildStack(Stack): - """Define a temporary stack used to build Windows Docker images. After build, this stack will be destroyed.""" + """Define a temporary stack used to build Windows Docker images.""" - def __init__(self, scope: Construct, id: str, **kwargs) -> None: - super().__init__(scope, id, **kwargs) + def __init__( + self, + scope: Construct, + id: str, + env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + **kwargs) -> None: + super().__init__( + scope, + id, + env=env, + **kwargs + ) # Define SSM command document. + # ecr_uri = ecr_windows_x86.ecr_repo.repository_uri ecr_repo = "{}.dkr.ecr.{}.amazonaws.com/{}".format( - AWS_ACCOUNT, AWS_REGION, WINDOWS_X86_ECR_REPO + env.account, env.region, WINDOWS_X86_ECR_REPO ) + placeholder_map = { "ECR_PLACEHOLDER": ecr_repo, "GITHUB_OWNER_PLACEHOLDER": GITHUB_REPO_OWNER, - "REGION_PLACEHOLDER": AWS_REGION, + "REGION_PLACEHOLDER": env.region, "GITHUB_SOURCE_VERSION_PLACEHOLDER": GITHUB_SOURCE_VERSION, } content = YmlLoader.load( "./cdk/ssm/windows_docker_build_ssm_document.yaml", placeholder_map ) + ssm.CfnDocument( scope=self, id="{}-ssm-document".format(id), name=SSM_DOCUMENT_NAME, content=content, document_type="Command", + update_method="NewVersion", ) # Define a S3 bucket to store windows docker files and build scripts. - s3.Bucket( + bucket = s3.Bucket( scope=self, id="{}-s3".format(id), - bucket_name=S3_BUCKET_NAME, + bucket_name=PhysicalName.GENERATE_IF_NEEDED, block_public_access=s3.BlockPublicAccess.BLOCK_ALL, ) # Define a role for EC2. ecr_power_user_policy = iam.PolicyDocument.from_json( - ecr_power_user_policy_in_json([WINDOWS_X86_ECR_REPO]) + ecr_power_user_policy_in_json([WINDOWS_X86_ECR_REPO], env) ) s3_read_write_policy = iam.PolicyDocument.from_json( - s3_read_write_policy_in_json(S3_BUCKET_NAME) + s3_read_write_policy_in_json(bucket.bucket_name) ) inline_policies = { "ecr_power_user_policy": ecr_power_user_policy, @@ -119,6 +132,11 @@ def __init__(self, scope: Construct, id: str, **kwargs) -> None: vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PUBLIC), machine_image=machine_image, user_data=setup_user_data, + instance_name="{}-instance".format(id) ) Tags.of(instance).add(WIN_EC2_TAG_KEY, WIN_EC2_TAG_VALUE) + + self.output = { + "s3_bucket_name": bucket.bucket_name, + } diff --git a/tests/ci/cdk/pipeline/__init__.py b/tests/ci/cdk/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/ci/cdk/pipeline/ci_stage.py b/tests/ci/cdk/pipeline/ci_stage.py new file mode 100644 index 0000000000..9906b3ad3b --- /dev/null +++ b/tests/ci/cdk/pipeline/ci_stage.py @@ -0,0 +1,238 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC + +from aws_cdk import Stage, Environment, Duration, pipelines, aws_iam as iam, Stack +from constructs import Construct + +from cdk.aws_lc_analytics_stack import AwsLcGitHubAnalyticsStack +from cdk.aws_lc_android_ci_stack import AwsLcAndroidCIStack +from cdk.aws_lc_ec2_test_framework_ci_stack import AwsLcEC2TestingCIStack +from cdk.aws_lc_github_ci_stack import AwsLcGitHubCIStack +from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack +from pipeline.codebuild_batch_step import BatchBuildTargetOptions, CodeBuildBatchStep + + +class CiStage(Stage): + def __init__( + self, + scope: Construct, + id, + pipeline_environment, + deploy_environment, + **kwargs + ): + super().__init__( + scope, + id, + env=pipeline_environment, + **kwargs, + ) + + self.build_targets = [] + + # Define CodeBuild Batch job for testing code. + x86_build_spec_file = "cdk/codebuild/github_ci_linux_x86_omnibus.yaml" + self.ci_linux_x86_stack = AwsLcGitHubCIStack( + self, + "aws-lc-ci-linux-x86", + x86_build_spec_file, + env=deploy_environment, + stack_name="aws-lc-ci-linux-x86", + ) + self.build_targets.append(BatchBuildTargetOptions( + target="aws-lc-ci-linux-x86", + ignore_failure=False, + )) + + arm_stack_name = "aws-lc-ci-linux-arm" + arm_build_spec_file = "cdk/codebuild/github_ci_linux_arm_omnibus.yaml" + self.ci_linux_aarch_stack = AwsLcGitHubCIStack( + self, + arm_stack_name, + arm_build_spec_file, + env=deploy_environment, + stack_name=arm_stack_name, + ) + self.build_targets.append(BatchBuildTargetOptions( + target="aws-lc-ci-linux-arm", + ignore_failure=False, + )) + + integration_build_spec_file = "cdk/codebuild/github_ci_integration_omnibus.yaml" + self.ci_integration_stack = AwsLcGitHubCIStack( + self, + "aws-lc-ci-integration", + integration_build_spec_file, + env=deploy_environment, + stack_name="aws-lc-ci-integration", + ) + self.build_targets.append(BatchBuildTargetOptions( + target="aws-lc-ci-integration", + ignore_failure=True, + )) + + fuzz_build_spec_file = "cdk/codebuild/github_ci_fuzzing_omnibus.yaml" + self.ci_fuzzing_stack = AwsLcGitHubFuzzCIStack( + self, + "aws-lc-ci-fuzzing", + fuzz_build_spec_file, + env=deploy_environment, + stack_name="aws-lc-ci-fuzzing", + ) + self.build_targets.append(BatchBuildTargetOptions( + target="aws-lc-ci-fuzzing", + ignore_failure=False, + )) + + analytics_build_spec_file = "cdk/codebuild/github_ci_analytics_omnibus.yaml" + self.ci_analytics_stack = AwsLcGitHubAnalyticsStack( + self, + "aws-lc-ci-analytics", + analytics_build_spec_file, + env=deploy_environment, + stack_name="aws-lc-ci-analytics", + ) + self.build_targets.append(BatchBuildTargetOptions( + target="aws-lc-ci-analytics", + ignore_failure=True, + )) + + # bm_framework_build_spec_file = "cdk/codebuild/bm_framework_omnibus.yaml" + # BmFrameworkStack(app, "aws-lc-ci-bm-framework", bm_framework_build_spec_file, env=env) + ec2_test_framework_build_spec_file = ( + "cdk/codebuild/ec2_test_framework_omnibus.yaml" + ) + self.ci_ec2_test_framework_stack = AwsLcEC2TestingCIStack( + self, + "aws-lc-ci-ec2-test-framework", + ec2_test_framework_build_spec_file, + env=deploy_environment, + stack_name="aws-lc-ci-ec2-test-framework", + ) + self.build_targets.append(BatchBuildTargetOptions( + target="aws-lc-ci-ec2-test-framework", + ignore_failure=True, + )) + + android_build_spec_file = "cdk/codebuild/github_ci_android_omnibus.yaml" + self.ci_android_stack = AwsLcAndroidCIStack( + self, + "aws-lc-ci-devicefarm-android", + android_build_spec_file, + env=deploy_environment, + stack_name="aws-lc-ci-devicefarm-android", + ) + self.build_targets.append(BatchBuildTargetOptions( + target="aws-lc-ci-devicefarm-android", + ignore_failure=False, + )) + + win_x86_build_spec_file = "cdk/codebuild/github_ci_windows_x86_omnibus.yaml" + self.ci_windows_x86_stack = AwsLcGitHubCIStack( + self, + "aws-lc-ci-windows-x86", + win_x86_build_spec_file, + env=deploy_environment, + stack_name="aws-lc-ci-windows-x86", + ) + self.build_targets.append(BatchBuildTargetOptions( + target="aws-lc-ci-windows-x86", + ignore_failure=False, + )) + + @property + def stacks(self): + return [child for child in self.node.children if isinstance(child, Stack)] + + def add_stage_to_pipeline( + self, + pipeline: pipelines.CodePipeline, + input: pipelines.FileSet, + role: iam.Role, + max_retry: int=2, + env={}, + ): + stack_names = [stack.stack_name for stack in self.stacks] + + prebuild_check_step = pipelines.CodeBuildStep( + "PrebuildCheck", + input=input, + commands=[ + "cd tests/ci/cdk/pipeline/scripts", + "chmod +x check_trigger_conditions.sh", + "trigger_conditions=$(./check_trigger_conditions.sh --build-type ci --stacks \"${STACKS}\")", + "export NEED_REBUILD=$(echo $trigger_conditions | sed -n 's/.*\(NEED_REBUILD=[0-9]*\).*/\\1/p' | cut -d'=' -f2 )" + ], + env={ + **env, + "STACKS": " ".join(stack_names), + }, + role=role, + timeout=Duration.minutes(180) + # project_name=f"{self.stage_name}-PrebuildCheck" + ) + + batch_build_jobs = { + "build-list": [ + { + "identifier": options.identifier, + "ignore-failure": options.ignore_failure, + "env": { + "variables": { + "PROJECT": options.target, + "TIMEOUT": options.timeout, + **options.env, + } + } + } + for options in self.build_targets + ] + } + + ci_run_step = CodeBuildBatchStep( + f"BuildStep", + action_name="StartWait", + input=input, + commands=[ + "cd tests/ci/cdk/pipeline/scripts", + "chmod +x build_target.sh", + "./build_target.sh --build-type ci --project ${PROJECT} --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}" + ], + role=role, + partial_batch_buildspec=batch_build_jobs, + env={ + **env, + "MAX_RETRY": max_retry, + "NEED_REBUILD": prebuild_check_step.exported_variable("NEED_REBUILD") + }, + ) + + ci_run_step.add_step_dependency(prebuild_check_step) + + # pipeline.add_stage( + # self, + # post=[ + # CodeBuildRunStep( + # f"{self.stage_name}-BuildStep", + # name_prefix=self.stage_name, + # input=input, + # role=role, + # stacks=[stack.stack_name for stack in self.stacks], + # build_targets=self.build_targets, + # max_retry=max_retry, + # env=env, + # ) + # ] + # ) + + pipeline.add_stage( + self, + post=[ + prebuild_check_step, + ci_run_step + ] + ) + + + + diff --git a/tests/ci/cdk/pipeline/codebuild_batch_step.py b/tests/ci/cdk/pipeline/codebuild_batch_step.py new file mode 100644 index 0000000000..472574cfca --- /dev/null +++ b/tests/ci/cdk/pipeline/codebuild_batch_step.py @@ -0,0 +1,94 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC +import builtins +import re +import typing +from typing import Mapping + +import jsii +from aws_cdk import ( + Duration, + pipelines, + aws_codepipeline_actions as cp_actions, + aws_codebuild as codebuild, + aws_codepipeline as codepipeline, + aws_iam as iam +) + +class BatchBuildTargetOptions: + def __init__( + self, + target: str, + identifier: str = None, + ignore_failure: bool = False, + timeout: int = 180, + env: Mapping[str, str] = {} + ): + self.target = target + self.identifier = identifier or re.sub(r'[^a-zA-Z0-9]', '_', target) + self.ignore_failure = ignore_failure + self.timeout = timeout + self.env = env + +@jsii.implements(pipelines.ICodePipelineActionFactory) +class CodeBuildBatchStep(pipelines.Step): + def __init__(self, + id, + # input: pipelines.IFileSetProducer, + input: pipelines.FileSet, + action_name: str, + commands: list[str], + role: iam.Role, + partial_batch_buildspec: typing.Mapping[builtins.str, typing.Any], + env: Mapping[str, str] = {}): + super().__init__(id) + + self._discover_referenced_outputs(env) + + self.input = input + self.action_name = action_name + self.commands = commands + self.partial_batch_buildspec = partial_batch_buildspec + self.role = role + self.env = { + key: codebuild.BuildEnvironmentVariable(value=value) + for key, value in env.items() + } + + @jsii.member(jsii_name="produceAction") + def produce_action( + self, + stage: codepipeline.IStage, + options: pipelines.ProduceActionOptions, + ) -> pipelines.CodePipelineActionFactoryResult: + build_target_project = codebuild.PipelineProject( + options.scope, + "StartWait", + build_spec=codebuild.BuildSpec.from_object({ + "version": 0.2, + "batch": self.partial_batch_buildspec, + "phases": { + "build": { + "commands": self.commands + } + } + }), + role=self.role, + timeout=Duration.minutes(180) + ) + + build_target_action = cp_actions.CodeBuildAction( + action_name=self.action_name, + # input=artifacts.to_code_pipeline(self.input.primary_output), + input=options.artifacts.to_code_pipeline(self.input), + run_order=options.run_order, + project=build_target_project, + execute_batch_build=True, + environment_variables=self.env + ) + + stage.add_action(build_target_action) + + return pipelines.CodePipelineActionFactoryResult( + run_orders_consumed=1 + ) \ No newline at end of file diff --git a/tests/ci/cdk/pipeline/deploy_util.py b/tests/ci/cdk/pipeline/deploy_util.py new file mode 100644 index 0000000000..d283b80d51 --- /dev/null +++ b/tests/ci/cdk/pipeline/deploy_util.py @@ -0,0 +1,6 @@ +from enum import Enum + +class DeployEnvironmentType(Enum): + PRE_PROD="Staging" + PROD="Prod" + DEV="Dev" \ No newline at end of file diff --git a/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py b/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py new file mode 100644 index 0000000000..832ccd967c --- /dev/null +++ b/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py @@ -0,0 +1,107 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC + +from aws_cdk import Stage, Environment, Stack, Duration, aws_iam as iam, pipelines +from aws_cdk.pipelines import CodeBuildStep +from constructs import Construct + +from cdk.ecr_stack import EcrStack +from cdk.linux_docker_image_batch_build_stack import LinuxDockerImageBatchBuildStack +from pipeline.deploy_util import DeployEnvironmentType +from util.metadata import LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO + + +class LinuxDockerImageBuildStage(Stage): + def __init__( + self, + scope: Construct, + id, + pipeline_environment, + deploy_environment, + **kwargs + ): + super().__init__( + scope, + id, + env=pipeline_environment, + **kwargs, + ) + + # Define AWS ECR stacks. + # ECR holds the docker images, which are pre-built to accelerate the code builds/tests of git pull requests. + self.ecr_linux_x86_stack = EcrStack( + self, + "aws-lc-ecr-linux-x86", + LINUX_X86_ECR_REPO, + env=deploy_environment, + stack_name="aws-lc-ecr-linux-x86" + ) + self.ecr_linux_aarch_stack = EcrStack( + self, + "aws-lc-ecr-linux-aarch", + LINUX_AARCH_ECR_REPO, + env=deploy_environment, + stack_name="aws-lc-ecr-linux-aarch" + ) + + # Define CodeBuild Batch job for building Docker images. + self.linux_docker_build_stack = LinuxDockerImageBatchBuildStack( + self, + "aws-lc-docker-image-build-linux", + env=deploy_environment, + stack_name="aws-lc-docker-image-build-linux" + ) + self.linux_docker_build_stack.add_dependency(self.ecr_linux_x86_stack) + self.linux_docker_build_stack.add_dependency(self.ecr_linux_aarch_stack) + + self.ecr_repo_names = [LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO] + self.need_rebuild = None + + @property + def stacks(self): + return [child for child in self.node.children if isinstance(child, Stack)] + + def add_stage_to_wave( + self, + wave: pipelines.Wave, + input: pipelines.FileSet, + role: iam.Role, + max_retry: int=2, + additional_stacks: list[Stack]=[], + env={}, + ): + stacks = self.stacks + additional_stacks + stack_names = [stack.stack_name for stack in stacks] + + docker_build_step = CodeBuildStep( + "StartWait", + input=input, + commands=[ + "cd tests/ci/cdk/pipeline/scripts", + "chmod +x cleanup_orphaned_images.sh check_trigger_conditions.sh build_target.sh", + "./cleanup_orphaned_images.sh --repos \"${ECR_REPOS}\"", + "trigger_conditions=$(./check_trigger_conditions.sh --build-type docker --platform linux --stacks \"${STACKS}\")", + "export NEED_REBUILD=$(echo $trigger_conditions | sed -n -e 's/.*\(NEED_REBUILD=[0-9]*\).*/\\1/p' | cut -d'=' -f2 )", + "./build_target.sh --build-type docker --platform linux --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}" + ], + env={ + **env, + "STACKS": " ".join(stack_names), + "ECR_REPOS": " ".join(self.ecr_repo_names), + "MAX_RETRY": str(max_retry), + "TIMEOUT": str(180), # 3 hours + }, + role=role, + timeout=Duration.minutes(180) + # project_name=f"{self.stage_name}-StartWait" + ) + + wave.add_stage( + self, + post=[ + docker_build_step + ] + ) + + self.need_rebuild = docker_build_step.exported_variable("NEED_REBUILD") + diff --git a/tests/ci/cdk/pipeline/pipeline_stack.py b/tests/ci/cdk/pipeline/pipeline_stack.py new file mode 100644 index 0000000000..07db4ce8be --- /dev/null +++ b/tests/ci/cdk/pipeline/pipeline_stack.py @@ -0,0 +1,289 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC + +from aws_cdk import Stack, Environment, Duration +from aws_cdk import ( + pipelines, + aws_codestarconnections as codestarconnections, + aws_codepipeline as codepipeline, + aws_iam as iam, + aws_events as events, + aws_events_targets as targets, + aws_cloudwatch as cloudwatch +) +from aws_cdk.pipelines import CodeBuildStep +from constructs import Construct + +from pipeline.ci_stage import CiStage +from pipeline.deploy_util import DeployEnvironmentType +from pipeline.linux_docker_image_build_stage import LinuxDockerImageBuildStage +from pipeline.setup_stage import SetupStage +from pipeline.windows_docker_image_build_stage import WindowsDockerImageBuildStage +from util.metadata import * + +class AwsLcCiPipeline(Stack): + def __init__( + self, + scope: Construct, + id: str, + **kwargs, + ) -> None: + super().__init__( + scope, + id, + **kwargs, + ) + + gh_connection = codestarconnections.CfnConnection( + self, + "GitHubConnection", + connection_name="AwsLcCiPipelineGitHubConnection", + provider_type="GitHub", + ) + + cross_account_role = iam.Role( + self, + "CrossAccountPipelineRole", + role_name="CrossAccountPipelineRole", + assumed_by=iam.CompositePrincipal( + iam.ServicePrincipal("codebuild.amazonaws.com"), + iam.ServicePrincipal("codepipeline.amazonaws.com") + ), + ) + + cross_account_role.add_to_policy( + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + resources=['*'], + actions=["codepipeline:GetPipelineExecution"], + ) + ) + + source = pipelines.CodePipelineSource.connection( + f"{GITHUB_REPO_OWNER}/{GITHUB_REPO_NAME}", + GITHUB_SOURCE_VERSION, + connection_arn=gh_connection.attr_connection_arn, + code_build_clone_output=True, + ) + + # Create a base pipeline to upgrade the default pipeline type + base_pipeline = codepipeline.Pipeline( + self, + "AwsLcCiPipeline", + execution_mode=codepipeline.ExecutionMode.QUEUED, + pipeline_type=codepipeline.PipelineType.V2, + pipeline_name="AwsLcCiPipeline", + cross_account_keys=True, + enable_key_rotation=True, + restart_execution_on_update=True + ) + + # Bucket contains artifacts from old pipeline executions + # These artifacts are kept for 60 days in case we need to do a rollback + base_pipeline.artifact_bucket.add_lifecycle_rule( + enabled=True, + expiration=Duration.days(60), + ) + + cdk_env = { + "GITHUB_REPO_OWNER": GITHUB_REPO_OWNER, + "GITHUB_REPO_NAME": GITHUB_REPO_NAME, + "GITHUB_SOURCE_VERSION": GITHUB_SOURCE_VERSION, + "GITHUB_TOKEN_SECRET_NAME": GITHUB_TOKEN_SECRET_NAME, + "PIPELINE_ACCOUNT": PIPELINE_ACCOUNT, + "PIPELINE_REGION": PIPELINE_REGION, + "WIN_EC2_TAG_KEY": WIN_EC2_TAG_KEY, + "WIN_EC2_TAG_VALUE": WIN_EC2_TAG_VALUE, + "WIN_DOCKER_BUILD_SSM_DOCUMENT": SSM_DOCUMENT_NAME, + "LINUX_AARCH_ECR_REPO": LINUX_AARCH_ECR_REPO, + "LINUX_X86_ECR_REPO": LINUX_X86_ECR_REPO, + "WINDOWS_X86_ECR_REPO": WINDOWS_X86_ECR_REPO, + "IS_DEV": str(IS_DEV) + } + + if DEPLOY_ACCOUNT is not None and DEPLOY_REGION is not None: + cdk_env["DEPLOY_ACCOUNT"] = DEPLOY_ACCOUNT + cdk_env["DEPLOY_REGION"] = DEPLOY_REGION + + pipeline = pipelines.CodePipeline( + self, + "CdkPipeline", + code_pipeline=base_pipeline, + # pipeline_name="AwsLcCiPipeline", + synth=pipelines.ShellStep( + "Synth", + input=source, + commands=[ + "echo \"Environment variables:\"", + "env", + "npm install -g aws-cdk", + "cd tests/ci", + "python -m pip install -r requirements.txt", + "cd cdk", + "cdk synth" + ], + env=cdk_env, + primary_output_directory="tests/ci/cdk/cdk.out", + ), + self_mutation=True, + code_build_defaults=pipelines.CodeBuildOptions( + role_policy=[ + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + resources=["*"], + actions=["sts:AssumeRole"], + conditions={ + "StringEquals": { + "iam:ResourceTag/aws-cdk:bootstrap-role": "lookup", + } + } + ), + ], + ), + ) + + if IS_DEV: + self.deploy_to_environment( + DeployEnvironmentType.DEV, + pipeline=pipeline, + source=source, + cross_account_role=cross_account_role, + ) + else: + self.deploy_to_environment( + DeployEnvironmentType.PRE_PROD, + pipeline=pipeline, + source=source, + cross_account_role=cross_account_role, + ) + + #TODO: add prod env + + pipeline.build_pipeline() + + # Schedule pipeline to run every Tuesday 15:00 UTC or 7:00 PST + events.Rule( + self, "WeeklyCodePipelineRun", + schedule=events.Schedule.cron( + minute="0", + hour="15", + # weekday="TUE", #TODO: Uncomment this line. It's running everyday now to make sure I didn't break anything + ), + targets=[ + targets.CodePipeline( + pipeline=base_pipeline + ) + ] + ) + + def deploy_to_environment( + self, + deploy_environment_type: DeployEnvironmentType, + pipeline: pipelines.CodePipeline, + source: pipelines.CodePipelineSource, + cross_account_role: iam.Role, + codebuild_environment_variables = {}, + ): + pipeline_environment = Environment(account=PIPELINE_ACCOUNT, region=PIPELINE_REGION) + + if deploy_environment_type == DeployEnvironmentType.PRE_PROD: + deploy_environment = Environment(account=PRE_PROD_ACCOUNT, region=PRE_PROD_REGION) + elif deploy_environment_type == DeployEnvironmentType.DEV: + deploy_environment = Environment(account=DEPLOY_ACCOUNT, region=DEPLOY_REGION) + else: + deploy_environment = Environment(account=PROD_ACCOUNT, region=PROD_REGION) + + codebuild_environment_variables = { + **codebuild_environment_variables, + "PIPELINE_EXECUTION_ID": "#{codepipeline.PipelineExecutionId}", + "DEPLOY_ACCOUNT": deploy_environment.account, + "DEPLOY_REGION": deploy_environment.region + } + + cross_account_role.add_to_policy( + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + resources=[ + f'arn:aws:iam::{deploy_environment.account}:role/CrossAccountCodeBuildRole' + ], + actions=["sts:AssumeRole"], + ) + ) + + setup_stage = SetupStage( + self, + f"{deploy_environment_type.value}-Setup", + pipeline_environment=pipeline_environment, + deploy_environment=deploy_environment, + ) + + pipeline.add_stage(setup_stage) + + docker_build_wave = pipeline.add_wave(f"{deploy_environment_type.value}-DockerImageBuild") + + linux_stage = LinuxDockerImageBuildStage( + self, + f"{deploy_environment_type.value}-LinuxDockerImageBuild", + pipeline_environment=pipeline_environment, + deploy_environment=deploy_environment, + ) + + linux_stage.add_stage_to_wave( + wave=docker_build_wave, + input=source.primary_output, + role=cross_account_role, + additional_stacks=setup_stage.stacks, + max_retry=MAX_TEST_RETRY, + env=codebuild_environment_variables + ) + + windows_stage = WindowsDockerImageBuildStage( + self, + f"{deploy_environment_type.value}-WindowsDockerImageBuild", + pipeline_environment=pipeline_environment, + deploy_environment=deploy_environment, + ) + + windows_stage.add_stage_to_wave( + wave=docker_build_wave, + input=source.primary_output, + role=cross_account_role, + additional_stacks=setup_stage.stacks, + max_retry=MAX_TEST_RETRY, + env=codebuild_environment_variables + ) + + docker_build_wave.add_post( + CodeBuildStep( + f"{deploy_environment_type.value}-CompleteDockerBuild", + input=source, + commands=[ + "cd tests/ci/cdk/pipeline/scripts", + "chmod +x finalize_images.sh", + # "./finalize_images.sh --repos \"${ECR_REPOS}\"", + ], + env={ + **codebuild_environment_variables, + "ECR_REPOS": f"{' '.join(linux_stage.ecr_repo_names)} {' '.join(windows_stage.ecr_repo_names)}" + }, + role=cross_account_role, + ) + ) + + ci_stage = CiStage( + self, + f"{deploy_environment_type.value}-CiTests", + pipeline_environment=pipeline_environment, + deploy_environment=deploy_environment, + ) + + ci_stage.add_stage_to_pipeline( + pipeline=pipeline, + input=source.primary_output, + role=cross_account_role, + max_retry=MAX_TEST_RETRY, + env={ + **codebuild_environment_variables, + "PREVIOUS_REBUILDS": f'{linux_stage.need_rebuild} {linux_stage.need_rebuild}' + }, + ) \ No newline at end of file diff --git a/tests/ci/cdk/pipeline/scripts/build_target.sh b/tests/ci/cdk/pipeline/scripts/build_target.sh new file mode 100644 index 0000000000..8b29151e10 --- /dev/null +++ b/tests/ci/cdk/pipeline/scripts/build_target.sh @@ -0,0 +1,168 @@ +#!/usr/bin/env bash +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC + +set -exuo pipefail + +source util.sh + +echo \"Environment variables:\" +env + +if [[ -z "${NEED_REBUILD+x}" || -z "${NEED_REBUILD}" || ${NEED_REBUILD} -eq 0 ]]; then + echo "No rebuild needed" + exit 0 +fi + +export COMMIT_HASH=${COMMIT_HASH:-$CODEBUILD_RESOLVED_SOURCE_VERSION} +export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountCodeBuildRole" +export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${COMMIT_HASH}" + +function build_codebuild_ci_project() { + local attempt=0 + local project=${1} + + if [[ -z ${project} ]]; then + echo "No project name provided." + exit 1 + fi + + echo "Starting CI tests in ${project}" + start_codebuild_project "${project}" "${COMMIT_HASH}" + + while [[ ${attempt} -le ${MAX_RETRY} ]]; do + attempt=$((attempt + 1)) + + echo "Waiting for CI tests for complete. This may take anywhere from 15 minutes to 1 hour" + if ! codebuild_build_status_check "${TIMEOUT}"; then + echo "Tests failed, retrying ${attempt}/${MAX_RETRY}..." + if [[ ${attempt} -le ${MAX_RETRY} ]]; then + retry_batch_build + else + echo "CI tests failed" + exit 1 + fi + fi + done + + echo "All tests completed successfully" +} + +function build_linux_docker_images() { + local attempt=0 + + echo "Activating AWS CodeBuild to build Linux aarch & x86 docker images." + start_codebuild_project aws-lc-docker-image-build-linux "${COMMIT_HASH}" + + while [[ ${attempt} -le ${MAX_RETRY} ]]; do + attempt=$((attempt + 1)) + + echo "Waiting for docker images creation. Building the docker images need to take 1 hour." + # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. + if ! codebuild_build_status_check "${TIMEOUT}"; then + echo "Build failed, retrying ${attempt}/${MAX_RETRY}..." + if [[ ${attempt} -le ${MAX_RETRY} ]]; then + retry_batch_build + else + echo "Failed to build Linux docker images" + exit 1 + fi + fi + done + + echo "Successfully built Linux docker images" +} + +function build_win_docker_images() { + local attempt=0 + + while [[ ${attempt} -le ${MAX_RETRY} ]]; do + attempt=$((attempt + 1)) + echo "Executing AWS SSM commands to build Windows docker images." + if ! start_windows_img_build; then + echo "Failed to start build, retrying ${attempt}/${MAX_RETRY}..." + continue + fi + + echo "Waiting for docker images creation. Building the docker images need to take 1 hour." + # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. + if ! win_docker_img_build_status_check "${TIMEOUT}"; then + echo "Build failed, retrying ${attempt}/${MAX_RETRY}..." + continue + fi + + echo "Successfully built Windows docker images" + exit 0 + done + + echo "Failed to build Windows docker images" + exit 1 +} + +while [[ $# -gt 0 ]]; do + case ${1} in + --build-type) + BUILD_TYPE="${2}" + shift + ;; + --platform) + PLATFORM="${2}" + shift + ;; + --project) + PROJECT="${2}" + shift + ;; + --max-retry) + MAX_RETRY="${2}" + shift + ;; + --timeout) + TIMEOUT="${2}" + shift + ;; + *) + echo "${1} is not supported." + exit 1 + ;; + esac + shift +done + +MAX_RETRY=${MAX_RETRY:-0} +TIMEOUT=${TIMEOUT:-180} # 3 hours + +if [[ -z ${BUILD_TYPE} ]]; then + echo "No build type provided." + exit 1 +fi + +assume_role + +if [[ -z "${BUILD_TYPE+x}" || -z "${BUILD_TYPE}" ]]; then + echo "No build type provided." + exit 1 +fi + +if [[ ${BUILD_TYPE} == "docker" ]]; then + if [[ -z "${PLATFORM+x}" || -z "${PLATFORM}" ]]; then + echo "When building Docker images, a platform must be specified" + exit 1 + fi + +# if [[ ${PLATFORM} == "linux" ]]; then +# build_linux_docker_images +# elif [[ ${PLATFORM} == "windows" ]]; then +# build_win_docker_images +# fi + exit 0 +fi + +if [[ ${BUILD_TYPE} == "ci" ]]; then + if [[ -z "${PROJECT+x}" || -z "${PROJECT}" ]]; then + echo "When building CI tests, a project name must be specified" + exit 1 + fi + + build_codebuild_ci_project "${PROJECT}" +fi \ No newline at end of file diff --git a/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh b/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh new file mode 100644 index 0000000000..b21cecfcd8 --- /dev/null +++ b/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh @@ -0,0 +1,124 @@ +#!/usr/bin/env bash +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC + +set -exuo pipefail + +source util.sh + +NEED_REBUILD=${NEED_REBUILD:-1} +COMMIT_HASH=${COMMIT_HASH:-$CODEBUILD_RESOLVED_SOURCE_VERSION} + +LINUX_DOCKER_PATH="tests/ci/docker_images/(dependencies|linux)" +WINDOWS_DOCKER_PATH="tests/ci/docker_images/windows" +PIPELINE_PATH="tests/ci/cdk/pipeline" + +export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountCodeBuildRole" +export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${COMMIT_HASH}" + +function check_pipeline_trigger_type() { + trigger_type=$(aws codepipeline get-pipeline-execution \ + --pipeline-name AwsLcCiPipeline \ + --pipeline-execution-id ${PIPELINE_EXECUTION_ID} \ + --query 'pipelineExecution.trigger.triggerType' \ + --output text) + + # unblock execution for self-mutation, weekly cron job, and manual start/forced deploy + if [[ "$trigger_type" == "StartPipelineExecution" || "$trigger_type" == "CloudWatchEvent" ]]; then + NEED_REBUILD=$((NEED_REBUILD + 1)) + fi +} + +function get_commit_changed_files() { + local file_paths + if [[ ${PLATFORM} == "linux" ]]; then + file_paths=("${LINUX_DOCKER_PATH}" "${PIPELINE_PATH}") + elif [[ ${PLATFORM} == "windows" ]]; then + file_paths=("${WINDOWS_DOCKER_PATH}" "${PIPELINE_PATH}") + fi + + changed_files=$(git diff-tree --no-commit-id --name-only -r "${COMMIT_HASH}") + + for file_path in "${file_paths[@]}"; do + if (echo "$changed_files" | grep -E "^${file_path}"); then + NEED_REBUILD=$((NEED_REBUILD + 1)) + break + fi + done +} + +function get_cfn_changeset() { + for stack in ${STACKS}; do + change_set_arn=$(aws cloudformation describe-stacks \ + --stack-name "${stack}" \ + --query "Stacks[0].ChangeSetId" \ + --output text) + changes_count=$(aws cloudformation describe-change-set \ + --change-set-name "${change_set_arn}" \ + --stack-name "${stack}" \ + --query "Changes" | jq 'length') + if [ "$changes_count" -gt 0 ]; then + NEED_REBUILD=$((NEED_REBUILD + 1)) + break + fi + done +} + +echo \"Environment variables:\" +env + +while [[ $# -gt 0 ]]; do + case ${1} in + --stacks) + STACKS="${2}" + shift + ;; + --build-type) + BUILD_TYPE="${2}" + shift + ;; + --platform) + PLATFORM="${2}" + shift + ;; + *) + echo "${1} is not supported." + exit 1 + ;; + esac + shift +done + +if [[ -z "${BUILD_TYPE+x}" || -z "${BUILD_TYPE}" ]]; then + echo "No build type provided." + exit 1 +fi + +if [[ -z "${STACKS+x}" || -z "${STACKS}" ]]; then + echo "No stacks provided." + exit 1 +fi + +if [[ -n "${PREVIOUS_REBUILDS+x}" && -n "${PREVIOUS_REBUILDS}" ]]; then + for previous_rebuild in ${PREVIOUS_REBUILDS}; do + NEED_REBUILD=$((NEED_REBUILD + previous_rebuild)) + done +fi + +if [[ ${BUILD_TYPE} == "docker" ]]; then + if [[ -z "${PLATFORM+x}" || -z "${PLATFORM}" ]]; then + echo "A platform must be specified" + exit 1 + fi + + check_pipeline_trigger_type + + assume_role + get_commit_changed_files + get_cfn_changeset +elif [[ ${BUILD_TYPE} == "ci" ]]; then + assume_role + get_cfn_changeset +fi + +echo "NEED_REBUILD=$NEED_REBUILD" \ No newline at end of file diff --git a/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh b/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh new file mode 100644 index 0000000000..9e33dd5cd2 --- /dev/null +++ b/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh @@ -0,0 +1,81 @@ +#!/usr/bin/env bash +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC + +set -exuo pipefail + +source util.sh + +export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountCodeBuildRole" +export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${CODEBUILD_RESOLVED_SOURCE_VERSION}" + +function remove_pending_images() { + local repo=${1} + + # List all images in the repository and filter the ones with any tag ending with '_pending' + image_details=$(aws ecr describe-images --repository-name "$repo" --query 'imageDetails[?length(imageTags) > `0` && imageTags[?ends_with(@, `_pending`)]].{ImageDigest:imageDigest,Tags:imageTags}' --output json) + + if [ -z "$image_details" ]; then + echo "No images found with tags ending in '_pending'." + exit 0 + fi + + # Loop through and delete each image by its digest + for image in $(echo "${image_details}" | jq -c '.[]'); do + image_digest=$(echo "$image" | jq -r '.ImageDigest') + tags=$(echo "$image" | jq -r '.Tags[]') + + for tag in $tags; do + if [[ "$tag" == *"_pending" ]]; then + new_tag="${tag%_pending}_latest" # Replace '_pending' with '_latest' + + if echo "${tags}" | grep -q "${new_tag}"; then + echo "Image with digest $image_digest is tagged as latest. Will only be removing pending tag..." + # Delete the pending tag + aws ecr batch-delete-image --repository-name "$repo" --image-ids imageTag="$tag" + else + echo "Deleting image with digest: $image_digest..." + + # Delete the image by its digest + aws ecr batch-delete-image --repository-name "$repo" --image-ids imageDigest="$image_digest" + fi + + if [ $? -eq 0 ]; then + echo "Image $image_digest with _pending tag removed successfully." + else + echo "Failed to cleanup image $image_digest." + fi + break + fi + done + done + + echo "Cleanup complete." +} + +while [[ $# -gt 0 ]]; do + case ${1} in + --repos) + REPOS="${2}" + shift + ;; + *) + echo "${1} is not supported." + exit 1 + ;; + esac + shift +done + +if [[ -z "${REPOS+x}" || -z "${REPOS}" ]]; then + echo "No build type provided." + exit 1 +fi + +assume_role + +for repo in ${REPOS}; do + remove_pending_images "${repo}" & +done + +wait diff --git a/tests/ci/cdk/pipeline/scripts/finalize_images.sh b/tests/ci/cdk/pipeline/scripts/finalize_images.sh new file mode 100644 index 0000000000..207660a894 --- /dev/null +++ b/tests/ci/cdk/pipeline/scripts/finalize_images.sh @@ -0,0 +1,115 @@ +#!/usr/bin/env bash +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC + +set -exuo pipefail + +source util.sh + +export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountCodeBuildRole" +export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${CODEBUILD_RESOLVED_SOURCE_VERSION}" + +function promote_pending_tags_to_latest() { + local repo=${1} + + # Get the list of images with tags ending in "_pending" + echo "Fetching images from repository '$repo'..." + + # List all images in the repository and filter the ones with any tag ending with '_pending' + image_details=$(aws ecr describe-images --repository-name "$repo" --query 'imageDetails[?length(imageTags) > `0` && imageTags[?ends_with(@, `_pending`)]].{ImageDigest:imageDigest,Tags:imageTags}' --output json) + + if [ -z "$image_details" ]; then + echo "No images found with tags ending in '_pending'." + exit 0 + fi + + # Loop through each image and update the tags + for image in $(echo "${image_details}" | jq -c '.[]'); do + image_digest=$(echo "$image" | jq -r '.ImageDigest') + tags=$(echo "$image" | jq -r '.Tags[]') + + # Check if any tag ends with '_pending' + for tag in $tags; do + if [[ "$tag" == *"_pending" ]]; then + new_tag="${tag%_pending}_latest" # Replace '_pending' with '_latest' + + if echo "${tags}" | grep -q "${new_tag}"; then + echo "Image with digest $image_digest already has tag '$new_tag' - skipping tag update" + # Delete the pending tag + aws ecr batch-delete-image --repository-name "$repo" --image-ids imageTag="$tag" + break + else + echo "Updating tag '$tag' to '$new_tag' for image with digest: $image_digest" + + # Get the image manifest using the image digest + image_manifest=$(aws ecr batch-get-image --repository-name "$repo" --image-ids imageDigest="$image_digest" --query 'images[0].imageManifest' --output text) + + # Push the new tag using batch-put-image + aws ecr put-image --repository-name "$repo" --image-manifest "$image_manifest" --image-tag "$new_tag" + aws ecr batch-delete-image --repository-name "$repo" --image-ids imageTag="$tag" + fi + + if [ $? -eq 0 ]; then + echo "Successfully updated tag '$tag' to '$new_tag'." + else + echo "Failed to update tag '$tag' to '$new_tag'." + fi + break + fi + done + done + + echo "Tag update complete." +} + +while [[ $# -gt 0 ]]; do + case ${1} in + --repos) + REPOS="${2}" + shift + ;; + *) + echo "${1} is not supported." + exit 1 + ;; + esac + shift +done + +if [[ -z "${REPOS+x}" || -z "${REPOS}" ]]; then + echo "No build type provided." + exit 1 +fi + +assume_role + +for repo in ${REPOS}; do + promote_pending_tags_to_latest "${repo}" & +done + +wait + + +# List all images in the repository and filter the ones with any tag ending with '_pending' +#image_digests=$(aws ecr describe-images --repository-name "$repo" --region "$REGION" --query "imageDetails[?length(imageTags) > 0 && imageTags[?ends_with(@, '_pending')]].imageDigest" --output text) +# +#if [ -z "$image_digests" ]; then +# echo "No images found with tags ending in '_pending'." +# exit 0 +#fi + +## Loop through and delete each image by its digest +#for image_digest in $image_digests; do +# echo "Deleting image with digest: $image_digest..." +# +# # Delete the image by its digest +# aws ecr batch-delete-image --repository-name "$repo" --region "$REGION" --image-ids imageDigest="$image_digest" +# +# if [ $? -eq 0 ]; then +# echo "Image $image_digest deleted successfully." +# else +# echo "Failed to delete image $image_digest." +# fi +#done +# +#echo "Cleanup complete." diff --git a/tests/ci/cdk/pipeline/scripts/util.sh b/tests/ci/cdk/pipeline/scripts/util.sh new file mode 100644 index 0000000000..6429d44389 --- /dev/null +++ b/tests/ci/cdk/pipeline/scripts/util.sh @@ -0,0 +1,149 @@ +if [[ -z "${PIPELINE_EXECUTION_ID+x}" || -z "${PIPELINE_EXECUTION_ID}" ]]; then + TRIGGER_TYPE="manual" +else + TRIGGER_TYPE="pipeline" +fi + +function assume_role() { + if [[ -z ${CROSS_ACCOUNT_BUILD_ROLE_ARN} ]]; then + echo "No role arn provided" + return 1 + fi + + local session_name=${CROSS_ACCOUNT_BUILD_SESSION:-"build-session"} + CREDENTIALS=$(aws sts assume-role --role-arn "${CROSS_ACCOUNT_BUILD_ROLE_ARN}" --role-session-name "${session_name}") + export AWS_ACCESS_KEY_ID=$(echo $CREDENTIALS | jq -r .Credentials.AccessKeyId) + export AWS_SECRET_ACCESS_KEY=$(echo $CREDENTIALS | jq -r .Credentials.SecretAccessKey) + export AWS_SESSION_TOKEN=$(echo $CREDENTIALS | jq -r .Credentials.SessionToken) +} + +function refresh_session() { + unset AWS_ACCESS_KEY_ID + unset AWS_SECRET_ACCESS_KEY + unset AWS_SESSION_TOKEN + + if [[ -z "${PIPELINE_EXECUTION_ID+x}" || -z "${PIPELINE_EXECUTION_ID}" ]]; then + echo "Security token expired. Please monitor build progress on the console" + exit 1 + fi + + assume_role +} + +function start_codebuild_project() { + local project=${1} + local commit_hash=${2:-HEAD} + + if [[ -z ${project} ]]; then + echo "No project name provided." + exit 1 + fi + + # https://awscli.amazonaws.com/v2/documentation/api/latest/reference/codebuild/start-build-batch.html + build_id=$(aws codebuild start-build-batch --project-name ${project} \ + --source-version ${commit_hash} \ + --environment-variables-override "name=TRIGGER_TYPE,value=${TRIGGER_TYPE},type=PLAINTEXT" \ + --query "buildBatch.id" \ + --output text) + export BUILD_BATCH_ID=${build_id} +} + +function retry_batch_build() { + aws codebuild retry-build-batch --id "${BUILD_BATCH_ID}" \ + --retry-type RETRY_FAILED_BUILDS +} + +function codebuild_build_status_check() { + # Every 5 min, this function checks if the linux docker image batch code build finished successfully. + # Normally, docker img build can take up to 1 hour. By default, we wait up to 30 * 5 min. + local timeout=${1:-180} + local status_check_max=$((timeout / 5)) + for i in $(seq 1 ${status_check_max}); do + # https://docs.aws.amazon.com/cli/latest/reference/codebuild/batch-get-build-batches.html + build_batch_status=$(aws codebuild batch-get-build-batches --ids "${BUILD_BATCH_ID}" \ + --query "buildBatches[0].buildBatchStatus" \ + --output text 2>&1) + if [[ ${build_batch_status} == "SUCCEEDED" ]]; then + echo "Build ${BUILD_BATCH_ID} finished successfully." + return 0 + elif [[ ${build_batch_status} == "FAILED" ]]; then + echo "Build ${BUILD_BATCH_ID} failed." + return 1 + elif [[ ${build_batch_status} == "IN_PROGRESS" ]]; then + echo "${i}: Wait 5 min for build job finish." + sleep 300 + elif echo "${build_batch_status}" | grep -q "ExpiredTokenException"; then + refresh_session + else + echo "Build ${BUILD_BATCH_ID} returns: ${build_batch_status}. Exiting..." + return 1 + fi + done + echo "Build ${BUILD_BATCH_ID} takes more time than expected." + return 1 +} + +function start_windows_img_build() { + # EC2 takes several minutes to be ready for running command. + echo "Wait 3 min for EC2 ready for SSM command execution." + sleep 180 + + # Run commands on windows EC2 instance to build windows docker images. + for i in {1..60}; do + instance_id=$(aws ec2 describe-instances \ + --filters "Name=tag:${WIN_EC2_TAG_KEY},Values=${WIN_EC2_TAG_VALUE}" | jq -r '.Reservations[0].Instances[0].InstanceId') + if [[ "${instance_id}" == "null" ]]; then + sleep 60 + continue + fi + instance_ping_status=$(aws ssm describe-instance-information \ + --filters "Key=InstanceIds,Values=${instance_id}" | jq -r '.InstanceInformationList[0].PingStatus') + if [[ "${instance_ping_status}" == "Online" ]]; then + # https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ssm/send-command.html + command_id=$(aws ssm send-command \ + --instance-ids "${instance_id}" \ + --document-name "${WIN_DOCKER_BUILD_SSM_DOCUMENT}" \ + --output-s3-bucket-name "${S3_FOR_WIN_DOCKER_IMG_BUILD}" \ + --output-s3-key-prefix 'runcommand' \ + --parameters "TriggerType=[\"${TRIGGER_TYPE}\"]" \ + | jq -r '.Command.CommandId') + # Export for checking command run status. + export WINDOWS_DOCKER_IMG_BUILD_COMMAND_ID="${command_id}" + echo "Windows ec2 is executing SSM command." + return 0 + else + echo "${i}: Current instance ping status: ${instance_ping_status}. Wait 1 minute to retry SSM command execution." + sleep 60 + fi + done + echo "After 60 minutes, Windows ec2 is still not ready for SSM commands execution. Exit." + return 1 +} + +function win_docker_img_build_status_check() { + # Every 5 min, this function checks if the windows docker image build is finished successfully. + # Normally, docker img build can take up to 1 hour. + local timeout=${1:-150} + local status_check_max=$((timeout / 5)) + for i in $(seq 1 ${status_check_max}); do + # https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ssm/list-commands.html + command_run_status=$(aws ssm list-commands --command-id "${WINDOWS_DOCKER_IMG_BUILD_COMMAND_ID}" | jq -r '.Commands[0].Status') + if [[ ${command_run_status} == "Success" ]]; then + echo "SSM command ${WINDOWS_DOCKER_IMG_BUILD_COMMAND_ID} finished successfully." + return 0 + elif [[ ${command_run_status} == "Failed" ]]; then + echo "SSM command ${WINDOWS_DOCKER_IMG_BUILD_COMMAND_ID} failed." + return 1 + elif [[ ${command_run_status} == "InProgress" ]]; then + echo "${i}: Wait 5 min for build job finish." + sleep 300 + elif echo "${command_run_status}" | grep -q "ExpiredTokenException"; then + refresh_session + else + echo "SSM commands ${WINDOWS_DOCKER_IMG_BUILD_COMMAND_ID} returns: ${command_run_status}. Exiting..." + return 1 + fi + done + echo "SSM command ${WINDOWS_DOCKER_IMG_BUILD_COMMAND_ID} takes more time than expected." + return 1 +} \ No newline at end of file diff --git a/tests/ci/cdk/pipeline/setup_stage.py b/tests/ci/cdk/pipeline/setup_stage.py new file mode 100644 index 0000000000..80de35308f --- /dev/null +++ b/tests/ci/cdk/pipeline/setup_stage.py @@ -0,0 +1,80 @@ +from aws_cdk import Stage, aws_codebuild as codebuild, Environment, Stack, aws_iam as iam +from constructs import Construct + +from cdk.ecr_stack import EcrStack +from cdk.linux_docker_image_batch_build_stack import LinuxDockerImageBatchBuildStack +from cdk.windows_docker_image_build_stack import WindowsDockerImageBuildStack +from util.metadata import LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO, WINDOWS_X86_ECR_REPO, PIPELINE_ACCOUNT + + +class SetupStage(Stage): + def __init__( + self, + scope: Construct, + id, + pipeline_environment, + deploy_environment, + **kwargs + ): + super().__init__( + scope, + id, + env=pipeline_environment, + **kwargs, + ) + + self.setup_stack = SetupStack( + self, + "aws-lc-ci-pipeline-setup", + pipeline_environment=pipeline_environment, + deploy_environment=deploy_environment, + stack_name="aws-lc-ci-pipeline-setup", + **kwargs + ) + + @property + def stacks(self): + return [child for child in self.node.children if isinstance(child, Stack)] + +class SetupStack(Stack): + def __init__( + self, + scope: Construct, + id: str, + pipeline_environment, + deploy_environment, + **kwargs) -> None: + super().__init__(scope, id, env=deploy_environment, **kwargs) + + cross_account_role = iam.Role( + self, + 'CrossAccountCodeBuildRole', + role_name='CrossAccountCodeBuildRole', + assumed_by=iam.ArnPrincipal(f'arn:aws:iam::{pipeline_environment.account}:role/CrossAccountPipelineRole'), #TODO: add a conditional to exclude this in dev env + ) + + # Grant access to all CodeBuild projects + cross_account_role.add_to_policy(iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + 'codebuild:*' + ], + resources=[f'arn:aws:codebuild:{deploy_environment.region}:{deploy_environment.account}:project/aws-lc-*'] + )) + + cross_account_role.add_to_policy(iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + 'cloudformation:DescribeChangeSet', + 'cloudformation:DescribeStacks', + 'ec2:DescribeInstances', + 'ssm:DescribeInstanceInformation', + 'ssm:SendCommand', + 'ssm:ListCommands', + 'ecr:DescribeImages', + 'ecr:BatchGetImage', + 'ecr:PutImage', + 'ecr:BatchDeleteImage' + ], + resources=['*'] + )) \ No newline at end of file diff --git a/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py b/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py new file mode 100644 index 0000000000..701dc5cec5 --- /dev/null +++ b/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py @@ -0,0 +1,100 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC + +from aws_cdk import Stage, Environment, Stack, Duration, aws_iam as iam, pipelines, Fn +from aws_cdk.pipelines import CodeBuildStep +from constructs import Construct + +from cdk.ecr_stack import EcrStack +from cdk.windows_docker_image_build_stack import WindowsDockerImageBuildStack +from pipeline.deploy_util import DeployEnvironmentType +from util.metadata import WINDOWS_X86_ECR_REPO, WIN_EC2_TAG_KEY, WIN_EC2_TAG_VALUE, SSM_DOCUMENT_NAME + + +class WindowsDockerImageBuildStage(Stage): + def __init__( + self, + scope: Construct, + id, + pipeline_environment, + deploy_environment, + **kwargs + ): + super().__init__( + scope, + id, + env=pipeline_environment, + **kwargs, + ) + + self.ecr_windows_x86 = EcrStack( + self, + "aws-lc-ecr-windows-x86", + WINDOWS_X86_ECR_REPO, + env=deploy_environment, + stack_name="aws-lc-ecr-windows-x86" + ) + + self.windows_docker_build_stack = WindowsDockerImageBuildStack( + self, + "aws-lc-docker-image-build-windows", + env=deploy_environment, + stack_name="aws-lc-docker-image-build-windows", + ) + self.windows_docker_build_stack.add_dependency(self.ecr_windows_x86) + + self.ecr_repo_names = [WINDOWS_X86_ECR_REPO] + self.s3_bucket_name = self.windows_docker_build_stack.output["s3_bucket_name"] + + self.need_rebuild = None + + @property + def stacks(self): + return [child for child in self.node.children if isinstance(child, Stack)] + + def add_stage_to_wave( + self, + wave: pipelines.Wave, + input: pipelines.FileSet, + role: iam.Role, + max_retry: int=2, + additional_stacks: list[Construct]=[], + env=None, + ): + stacks = self.stacks + additional_stacks + stack_names = [stack.stack_name for stack in stacks] + + docker_build_step = CodeBuildStep( + "StartWait", + input=input, + commands=[ + "cd tests/ci/cdk/pipeline/scripts", + "chmod +x cleanup_orphaned_images.sh check_trigger_conditions.sh build_target.sh", + "./cleanup_orphaned_images.sh --repos \"${ECR_REPOS}\"", + "trigger_conditions=$(./check_trigger_conditions.sh --build-type docker --platform windows --stacks \"${STACKS}\")", + "export NEED_REBUILD=$(echo $trigger_conditions | sed -n -e 's/.*\(NEED_REBUILD=[0-9]*\).*/\\1/p' | cut -d'=' -f2 )", + "./build_target.sh --build-type docker --platform windows --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}" + ], + env={ + **env, + "STACKS": " ".join(stack_names), + "ECR_REPOS": " ".join(self.ecr_repo_names), + "MAX_RETRY": str(max_retry), + "TIMEOUT": str(180), # 3 hours + "WIN_EC2_TAG_KEY": WIN_EC2_TAG_KEY, + "WIN_EC2_TAG_VALUE": WIN_EC2_TAG_VALUE, + "WIN_DOCKER_BUILD_SSM_DOCUMENT": SSM_DOCUMENT_NAME, + "S3_FOR_WIN_DOCKER_IMG_BUILD": self.s3_bucket_name, + }, + role=role, + timeout=Duration.minutes(180) + ) + + wave.add_stage( + self, + post=[ + docker_build_step + ] + ) + + self.need_rebuild = docker_build_step.exported_variable("NEED_REBUILD") \ No newline at end of file diff --git a/tests/ci/cdk/run-cdk.sh b/tests/ci/cdk/run-cdk.sh index 983c7beef4..a1f039a05a 100755 --- a/tests/ci/cdk/run-cdk.sh +++ b/tests/ci/cdk/run-cdk.sh @@ -4,6 +4,8 @@ set -exuo pipefail +source pipeline/scripts/util.sh + # -e: Exit on any failure # -x: Print the command before running # -u: Any variable that is not set will cause an error if used @@ -14,11 +16,11 @@ function delete_s3_buckets() { aws s3api list-buckets --query "Buckets[].Name" | jq '.[]' | while read -r i; do bucket_name=$(echo "${i}" | tr -d '"') # Delete the bucket if its name uses AWS_LC_S3_BUCKET_PREFIX. - if [[ "${bucket_name}" == *"${AWS_LC_S3_BUCKET_PREFIX}"* ]]; then - aws s3 rm "s3://${bucket_name}" --recursive - aws s3api delete-bucket --bucket "${bucket_name}" +# if [[ "${bucket_name}" == *"${AWS_LC_S3_BUCKET_PREFIX}"* ]]; then +# aws s3 rm "s3://${bucket_name}" --recursive +# aws s3api delete-bucket --bucket "${bucket_name}" # Delete bm-framework buckets if we're not on the team account - elif [[ "${CDK_DEPLOY_ACCOUNT}" != "620771051181" ]] && [[ "${bucket_name}" == *"${aws-lc-ci-bm-framework}"* ]]; then + if [[ "${DEPLOY_ACCOUNT}" != "620771051181" ]] && [[ "${bucket_name}" == *"${aws-lc-ci-bm-framework}"* ]]; then aws s3 rm "s3://${bucket_name}" --recursive aws s3api delete-bucket --bucket "${bucket_name}" fi @@ -39,7 +41,7 @@ function delete_container_repositories() { } function destroy_ci() { - if [[ "${CDK_DEPLOY_ACCOUNT}" == "620771051181" ]]; then + if [[ "${DEPLOY_ACCOUNT}" == "620771051181" ]]; then echo "destroy_ci should not be executed on team account." exit 1 fi @@ -62,8 +64,6 @@ function destroy_docker_img_build_stack() { } function create_linux_docker_img_build_stack() { - # Clean up build stacks if exists. - destroy_docker_img_build_stack # Deploy aws-lc ci stacks. # When repeatedly deploy, error 'EIP failed Reason: Maximum number of addresses has been reached' can happen. # @@ -74,8 +74,6 @@ function create_linux_docker_img_build_stack() { } function create_win_docker_img_build_stack() { - # Clean up build stacks if exists. - destroy_docker_img_build_stack # Deploy aws-lc ci stacks. # When repeatedly deploy, error 'EIP failed Reason: Maximum number of addresses has been reached' can happen. # @@ -97,8 +95,8 @@ function run_linux_img_build() { function run_windows_img_build() { # EC2 takes several minutes to be ready for running command. - echo "Wait 3 min for EC2 ready for SSM command execution." - sleep 180 +# echo "Wait 3 min for EC2 ready for SSM command execution." +# sleep 180 # Run commands on windows EC2 instance to build windows docker images. for i in {1..60}; do @@ -116,7 +114,9 @@ function run_windows_img_build() { --instance-ids "${instance_id}" \ --document-name "${WIN_DOCKER_BUILD_SSM_DOCUMENT}" \ --output-s3-bucket-name "${S3_FOR_WIN_DOCKER_IMG_BUILD}" \ - --output-s3-key-prefix 'runcommand' | jq -r '.Command.CommandId') + --output-s3-key-prefix 'runcommand' \ + --parameters "TriggerType=[\"pipeline\"]" | \ + jq -r '.Command.CommandId') # Export for checking command run status. export WINDOWS_DOCKER_IMG_BUILD_COMMAND_ID="${command_id}" echo "Windows ec2 is executing SSM command." @@ -178,7 +178,7 @@ function win_docker_img_build_status_check() { function build_linux_docker_images() { # Always destroy docker build stacks (which include EC2 instance) on EXIT. - trap destroy_docker_img_build_stack EXIT +# trap destroy_docker_img_build_stack EXIT # Create/update aws-ecr repo. cdk deploy 'aws-lc-ecr-linux-*' --require-approval never @@ -196,7 +196,7 @@ function build_linux_docker_images() { function build_win_docker_images() { # Always destroy docker build stacks (which include EC2 instance) on EXIT. - trap destroy_docker_img_build_stack EXIT +# trap destroy_docker_img_build_stack EXIT # Create/update aws-ecr repo. cdk deploy 'aws-lc-ecr-windows-*' --require-approval never @@ -204,6 +204,11 @@ function build_win_docker_images() { # Create aws windows build stack create_win_docker_img_build_stack + S3_FOR_WIN_DOCKER_IMG_BUILD=$(aws cloudformation describe-stack-resources \ + --stack-name aws-lc-docker-image-build-windows \ + --query "StackResources[?ResourceType=='AWS::S3::Bucket'].PhysicalResourceId" \ + --output text) + echo "Executing AWS SSM commands to build Windows docker images." run_windows_img_build @@ -285,12 +290,12 @@ EOF function export_global_variables() { # If these variables are not set or empty, defaults are export. - if [[ -z "${CDK_DEPLOY_ACCOUNT+x}" || -z "${CDK_DEPLOY_ACCOUNT}" ]]; then - export CDK_DEPLOY_ACCOUNT='620771051181' + if [[ -z "${DEPLOY_ACCOUNT+x}" || -z "${DEPLOY_ACCOUNT}" ]]; then + export DEPLOY_ACCOUNT='620771051181' fi - if [[ -z "${CDK_DEPLOY_REGION+x}" || -z "${CDK_DEPLOY_REGION}" ]]; then - export CDK_DEPLOY_REGION='us-west-2' - export AWS_DEFAULT_REGION="${CDK_DEPLOY_REGION}" + if [[ -z "${DEPLOY_REGION+x}" || -z "${DEPLOY_REGION}" ]]; then + export DEPLOY_REGION='us-west-2' + export AWS_DEFAULT_REGION="${DEPLOY_REGION}" fi if [[ -z "${GITHUB_REPO_OWNER+x}" || -z "${GITHUB_REPO_OWNER}" ]]; then export GITHUB_REPO_OWNER='aws' @@ -299,19 +304,18 @@ function export_global_variables() { export GITHUB_SOURCE_VERSION='main' fi # Other variables for managing resources. - DATE_NOW="$(date +%Y-%m-%d-%H-%M)" - export GITHUB_REPO='aws-lc' +# DATE_NOW="$(date +%Y-%m-%d-%H-%M)" + export GITHUB_REPO_NAME='aws-lc' export ECR_LINUX_AARCH_REPO_NAME='aws-lc-docker-images-linux-aarch' export ECR_LINUX_X86_REPO_NAME='aws-lc-docker-images-linux-x86' export ECR_WINDOWS_X86_REPO_NAME='aws-lc-docker-images-windows-x86' export AWS_LC_S3_BUCKET_PREFIX='aws-lc-windows-docker-image-build-s3' - export S3_FOR_WIN_DOCKER_IMG_BUILD="${AWS_LC_S3_BUCKET_PREFIX}-${DATE_NOW}" export WIN_EC2_TAG_KEY='aws-lc' - export WIN_EC2_TAG_VALUE="aws-lc-windows-docker-image-build-${DATE_NOW}" - export WIN_DOCKER_BUILD_SSM_DOCUMENT="windows-ssm-document-${DATE_NOW}" + export WIN_EC2_TAG_VALUE="aws-lc-windows-docker-image-build" + export WIN_DOCKER_BUILD_SSM_DOCUMENT="AWSLC-BuildWindowsDockerImagesTEST" export IMG_BUILD_STATUS='unknown' - # 620771051181 is AWS-LC team AWS account. - if [[ "${CDK_DEPLOY_ACCOUNT}" != "620771051181" ]] && [[ "${GITHUB_REPO_OWNER}" == 'aws' ]]; then + # 620771051181 and 351119683581 is AWS-LC team AWS account. + if [[ "${DEPLOY_ACCOUNT}" != "620771051181" && "${DEPLOY_ACCOUNT}" != "351119683581" ]] && [[ "${GITHUB_REPO_OWNER}" == 'aws' ]]; then echo "Only team account is allowed to create CI stacks on aws repo." exit 1 fi @@ -326,12 +330,12 @@ function main() { exit 0 ;; --aws-account) - export CDK_DEPLOY_ACCOUNT="${2}" + export DEPLOY_ACCOUNT="${2}" shift ;; --aws-region) - export CDK_DEPLOY_REGION="${2}" - export AWS_DEFAULT_REGION="${CDK_DEPLOY_REGION}" + export DEPLOY_REGION="${2}" + export AWS_DEFAULT_REGION="${DEPLOY_REGION}" shift ;; --github-repo-owner) @@ -346,6 +350,10 @@ function main() { export ACTION="${2}" shift ;; + --command) + COMMAND="${2}" + shift + ;; *) echo "${1} is not supported." exit 1 @@ -388,7 +396,7 @@ function main() { build_win_docker_images ;; synth) - cdk synth 'aws-lc-ci-*' + cdk synth '*' ;; diff) cdk diff aws-lc-ci-* @@ -396,6 +404,9 @@ function main() { bootstrap) cdk bootstrap ;; + invoke) + ${COMMAND:?} + ;; *) echo "--action is required. Use '--help' to see allowed actions." exit 1 diff --git a/tests/ci/cdk/util/build_spec_loader.py b/tests/ci/cdk/util/build_spec_loader.py index 7ce522e4d9..f0eb27c9d4 100644 --- a/tests/ci/cdk/util/build_spec_loader.py +++ b/tests/ci/cdk/util/build_spec_loader.py @@ -3,8 +3,8 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC -from aws_cdk import aws_codebuild as codebuild, aws_s3_assets -from util.metadata import CAN_AUTOLOAD, TEAM_ACCOUNT, AWS_ACCOUNT, DEFAULT_REGION, AWS_REGION +from aws_cdk import aws_codebuild as codebuild +from util.metadata import PROD_ACCOUNT, PROD_REGION import tempfile @@ -12,20 +12,23 @@ class BuildSpecLoader(object): """Responsible for loading the BuildSpec yml file as python object.""" @staticmethod - def load(file_path): + def load(file_path, env): """ Used to load yml file and replace some placeholders if needed. :param file_path: path to the yml file. :return: python object. """ + # Indicate if the BuildSpec files can be automatically loaded without manual deployment. + can_autoload = (env.account == PROD_ACCOUNT) and (env.region == PROD_REGION) + # If the deployment uses team account, the change of batch BuildSpec file is loaded automatically without deployment. # else, the change will require manual deployment via CDK command. - if CAN_AUTOLOAD: + if can_autoload: return codebuild.BuildSpec.from_source_filename("tests/ci/cdk/{}".format(file_path)) # TODO(CryptoAlg-1276): remove below when the batch BuildSpec supports the env variable of account and region. placeholder_map = { - TEAM_ACCOUNT: AWS_ACCOUNT, - DEFAULT_REGION: AWS_REGION, + PROD_ACCOUNT: env.account, + PROD_REGION: env.region, } with open(file_path) as original_file: file_text = original_file.read() diff --git a/tests/ci/cdk/util/ecr_util.py b/tests/ci/cdk/util/ecr_util.py index 2dfd3d521d..cbbd0016ca 100644 --- a/tests/ci/cdk/util/ecr_util.py +++ b/tests/ci/cdk/util/ecr_util.py @@ -1,5 +1,2 @@ -from util.metadata import AWS_ACCOUNT, AWS_REGION - - -def ecr_arn(ecr_repo_name): - return "{}.dkr.ecr.{}.amazonaws.com/{}".format(AWS_ACCOUNT, AWS_REGION, ecr_repo_name) +def ecr_arn(ecr_repo_name, env): + return "{}.dkr.ecr.{}.amazonaws.com/{}".format(env.account, env.region, ecr_repo_name) diff --git a/tests/ci/cdk/util/env_util.py b/tests/ci/cdk/util/env_util.py index 700b0569d4..3e44cdccdd 100644 --- a/tests/ci/cdk/util/env_util.py +++ b/tests/ci/cdk/util/env_util.py @@ -11,11 +11,16 @@ class EnvUtil(object): """An util helps get environment variable.""" @staticmethod - def get(key, defalut_value: typing.Optional[str] = None): + def get(key, default_value: typing.Optional[str] = None): val = os.environ.get(key) if val is None: - val = defalut_value + val = default_value if val is None: raise ValueError("{} env variable is not set.".format(key)) else: return val + + @staticmethod + def get_optional(key): + return os.environ.get(key) + diff --git a/tests/ci/cdk/util/iam_policies.py b/tests/ci/cdk/util/iam_policies.py index 2558fff73a..6606cfe4c7 100644 --- a/tests/ci/cdk/util/iam_policies.py +++ b/tests/ci/cdk/util/iam_policies.py @@ -3,9 +3,7 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC -from util.metadata import AWS_REGION, AWS_ACCOUNT - -def ec2_policies_in_json(ec2_role_name, ec2_security_group_id, ec2_subnet_id, ec2_vpc_id): +def ec2_policies_in_json(ec2_role_name, ec2_security_group_id, ec2_subnet_id, ec2_vpc_id, env): """ Define an IAM policy that gives permissions for starting, stopping, and getting details of EC2 instances and their Vpcs :return: an IAM policy statement in json. @@ -23,20 +21,20 @@ def ec2_policies_in_json(ec2_role_name, ec2_security_group_id, ec2_subnet_id, ec "ec2:DescribeInstances", ], "Resource": [ - "arn:aws:iam::{}:role/{}".format(AWS_ACCOUNT, ec2_role_name), - "arn:aws:ec2:{}:{}:instance/*".format(AWS_REGION, AWS_ACCOUNT), - "arn:aws:ec2:{}::image/*".format(AWS_REGION), - "arn:aws:ec2:{}:{}:network-interface/*".format(AWS_REGION, AWS_ACCOUNT), - "arn:aws:ec2:{}:{}:volume/*".format(AWS_REGION, AWS_ACCOUNT), - "arn:aws:ec2:{}:{}:security-group/{}".format(AWS_REGION, AWS_ACCOUNT, ec2_security_group_id), - "arn:aws:ec2:{}:{}:subnet/{}".format(AWS_REGION, AWS_ACCOUNT, ec2_subnet_id), - "arn:aws:ec2:{}:{}:vpc/{}".format(AWS_REGION, AWS_ACCOUNT, ec2_vpc_id), + "arn:aws:iam::{}:role/{}".format(env.account, ec2_role_name), + "arn:aws:ec2:{}:{}:instance/*".format(env.region, env.account), + "arn:aws:ec2:{}::image/*".format(env.region), + "arn:aws:ec2:{}:{}:network-interface/*".format(env.region, env.account), + "arn:aws:ec2:{}:{}:volume/*".format(env.region, env.account), + "arn:aws:ec2:{}:{}:security-group/{}".format(env.region, env.account, ec2_security_group_id), + "arn:aws:ec2:{}:{}:subnet/{}".format(env.region, env.account, ec2_subnet_id), + "arn:aws:ec2:{}:{}:vpc/{}".format(env.region, env.account, ec2_vpc_id), ] }] } -def ssm_policies_in_json(): +def ssm_policies_in_json(env): """ Define an IAM policy that gives permissions to creating documents and running commands. :return: an IAM policy statement in json. @@ -54,14 +52,14 @@ def ssm_policies_in_json(): "ssm:DescribeInstanceInformation" ], "Resource": [ - "arn:aws:ec2:{}:{}:instance/*".format(AWS_REGION, AWS_ACCOUNT), # Needed for ssm:SendCommand - "arn:aws:ssm:{}:{}:*".format(AWS_REGION, AWS_ACCOUNT), - "arn:aws:ssm:{}:{}:document/*".format(AWS_REGION, AWS_ACCOUNT), + "arn:aws:ec2:{}:{}:instance/*".format(env.region, env.account), # Needed for ssm:SendCommand + "arn:aws:ssm:{}:{}:*".format(env.region, env.account), + "arn:aws:ssm:{}:{}:document/*".format(env.region, env.account), ] }] } -def code_build_batch_policy_in_json(project_ids): +def code_build_batch_policy_in_json(project_ids, env): """ Define an IAM policy statement for CodeBuild batch operation. :param project_ids: a list of CodeBuild project id. @@ -69,7 +67,7 @@ def code_build_batch_policy_in_json(project_ids): """ resources = [] for project_id in project_ids: - resources.append("arn:aws:codebuild:{}:{}:project/{}*".format(AWS_REGION, AWS_ACCOUNT, project_id)) + resources.append("arn:aws:codebuild:{}:{}:project/{}*".format(env.region, env.account, project_id)) return { "Version": "2012-10-17", "Statement": [ @@ -107,7 +105,7 @@ def code_build_cloudwatch_logs_policy_in_json(log_groups): ] } -def code_build_publish_metrics_in_json(): +def code_build_publish_metrics_in_json(env): """ Define an IAM policy that only grants access to publish CloudWatch metrics to the current region in the same namespace used in the calls to PutMetricData in tests/ci/common_fuzz.sh. @@ -122,7 +120,7 @@ def code_build_publish_metrics_in_json(): "Condition": { "StringEquals": { "aws:RequestedRegion": [ - AWS_REGION + env.region ], "cloudwatch:namespace": [ "AWS-LC-Fuzz", @@ -157,26 +155,48 @@ def s3_read_write_policy_in_json(s3_bucket_name): ] } +def s3_read_policy_in_json(): + """ + Define an IAM policy statement for reading from S3 bucket. + :return: an IAM policy statement in json. + """ + return { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:GetObjectVersion", + "s3:ListBucket" + ], + "Resource": [ + "*" + ] + } + ] + } + -def ecr_repo_arn(repo_name): +def ecr_repo_arn(repo_name, env): """ Create a ECR repository arn. See https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonelasticcontainerregistry.html :param repo_name: repository name. :return: arn:aws:ecr:${Region}:${Account}:repository/${RepositoryName} """ - ecr_arn_prefix = "arn:aws:ecr:{}:{}:repository".format(AWS_REGION, AWS_ACCOUNT) + ecr_arn_prefix = "arn:aws:ecr:{}:{}:repository".format(env.region, env.account) return "{}/{}".format(ecr_arn_prefix, repo_name) -def ecr_power_user_policy_in_json(ecr_repo_names): +def ecr_power_user_policy_in_json(ecr_repo_names, env): """ Define an AWS-LC specific IAM policy statement for AWS ECR power user used to create new docker images. :return: an IAM policy statement in json. """ ecr_arns = [] for ecr_repo_name in ecr_repo_names: - ecr_arns.append(ecr_repo_arn(ecr_repo_name)) + ecr_arns.append(ecr_repo_arn(ecr_repo_name, env)) return { "Version": "2012-10-17", "Statement": [ @@ -211,13 +231,13 @@ def ecr_power_user_policy_in_json(ecr_repo_names): ] } -def device_farm_access_policy_in_json(): +def device_farm_access_policy_in_json(env): """ Define an IAM policy statement for Device Farm operations. :return: an IAM policy statement in json. """ resources = [] - resources.append("arn:aws:devicefarm:{}:{}:*:*".format(AWS_REGION, AWS_ACCOUNT)) + resources.append("arn:aws:devicefarm:{}:{}:*:*".format(env.region, env.account)) return { "Version": "2012-10-17", "Statement": [ diff --git a/tests/ci/cdk/util/metadata.py b/tests/ci/cdk/util/metadata.py index 7c45210078..1c85f54b45 100644 --- a/tests/ci/cdk/util/metadata.py +++ b/tests/ci/cdk/util/metadata.py @@ -4,14 +4,26 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC from util.env_util import EnvUtil +from datetime import datetime + +# timestamp = datetime.now().strftime('%Y-%m-%d-%H-%M') # Used when AWS CDK defines AWS resources. -TEAM_ACCOUNT = "620771051181" -DEFAULT_REGION = "us-west-2" -AWS_ACCOUNT = EnvUtil.get("CDK_DEPLOY_ACCOUNT", TEAM_ACCOUNT) -AWS_REGION = EnvUtil.get("CDK_DEPLOY_REGION", DEFAULT_REGION) -# Indicate if the BuildSpec files can be automatically loaded without manualy deployment. -CAN_AUTOLOAD = (AWS_ACCOUNT == TEAM_ACCOUNT) and (AWS_REGION == DEFAULT_REGION) +PROD_ACCOUNT = "620771051181" +PROD_REGION = "us-west-2" +PRE_PROD_ACCOUNT = "351119683581" +PRE_PROD_REGION = "us-west-2" +PIPELINE_ACCOUNT = EnvUtil.get("PIPELINE_ACCOUNT", "774305600158") +PIPELINE_REGION = EnvUtil.get("PIPELINE_REGION", "us-west-2") + +DEPLOY_ACCOUNT = EnvUtil.get_optional("DEPLOY_ACCOUNT") +DEPLOY_REGION = EnvUtil.get_optional("DEPLOY_REGION") + +STAGING_GITHUB_REPO_OWNER = "aws" +STAGING_GITHUB_REPO_NAME = "private-aws-lc-staging" + +IS_DEV = EnvUtil.get("IS_DEV", "True") == "True" #TODO: change default value to true +MAX_TEST_RETRY = int(EnvUtil.get("MAX_TEST_RETRY", "2")) # Used when AWS CDK defines ECR repos. LINUX_AARCH_ECR_REPO = EnvUtil.get("ECR_LINUX_AARCH_REPO_NAME", "aws-lc-docker-images-linux-aarch") @@ -25,9 +37,8 @@ GITHUB_TOKEN_SECRET_NAME = EnvUtil.get("GITHUB_TOKEN_SECRET_NAME", "aws-lc/ci/github/token") # Used when AWS CDK defines resources for Windows docker image build. -S3_BUCKET_NAME = EnvUtil.get("S3_FOR_WIN_DOCKER_IMG_BUILD", "aws-lc-windows-docker-image-build") WIN_EC2_TAG_KEY = EnvUtil.get("WIN_EC2_TAG_KEY", "aws-lc") WIN_EC2_TAG_VALUE = EnvUtil.get("WIN_EC2_TAG_VALUE", "aws-lc-windows-docker-image-build") -SSM_DOCUMENT_NAME = EnvUtil.get("WIN_DOCKER_BUILD_SSM_DOCUMENT", "windows-ssm-document") +SSM_DOCUMENT_NAME = EnvUtil.get("WIN_DOCKER_BUILD_SSM_DOCUMENT", "AWSLC-BuildWindowsDockerImages") -GITHUB_PUSH_CI_BRANCH_TARGETS = r"(main|fips-\d{4}-\d{2}-\d{2}.*)" +GITHUB_PUSH_CI_BRANCH_TARGETS = r"(main|fips-\d{4}-\d{2}-\d{2}.*)" \ No newline at end of file diff --git a/tests/ci/docker_images/linux-aarch/build_images.sh b/tests/ci/docker_images/linux-aarch/build_images.sh index 86de2ee111..21d8b9ea57 100755 --- a/tests/ci/docker_images/linux-aarch/build_images.sh +++ b/tests/ci/docker_images/linux-aarch/build_images.sh @@ -4,6 +4,8 @@ set -ex +env + # Log Docker hub limit https://docs.docker.com/docker-hub/download-rate-limit/#how-can-i-check-my-current-rate TOKEN=$(curl "https://auth.docker.io/token?service=registry.docker.io&scope=repository:ratelimitpreview/test:pull" | jq -r .token) curl --head -H "Authorization: Bearer $TOKEN" https://registry-1.docker.io/v2/ratelimitpreview/test/manifests/latest diff --git a/tests/ci/docker_images/linux-aarch/common.sh b/tests/ci/docker_images/linux-aarch/common.sh index d1390f9a65..dc7fb407d7 100755 --- a/tests/ci/docker_images/linux-aarch/common.sh +++ b/tests/ci/docker_images/linux-aarch/common.sh @@ -4,6 +4,12 @@ set -ex +if [[ -n "${TRIGGER_TYPE:+x}" && "${TRIGGER_TYPE}" == "pipeline" ]]; then + TAG="pending" +else + TAG="latest" +fi + function validate_input() { key="${1}" value="${2}" @@ -20,10 +26,10 @@ function tag_and_push_img() { target="${2}" validate_input 'target' "${target}" img_push_date=$(date +%Y-%m-%d) - docker_img_with_latest="${target}_latest" + docker_img_with_tag="${target}_${TAG}" docker_img_with_date="${target}_${img_push_date}" - docker tag "${source}" "${docker_img_with_latest}" + docker tag "${source}" "${docker_img_with_tag}" docker tag "${source}" "${docker_img_with_date}" - docker push "${docker_img_with_latest}" + docker push "${docker_img_with_tag}" docker push "${docker_img_with_date}" } diff --git a/tests/ci/docker_images/linux-x86/common.sh b/tests/ci/docker_images/linux-x86/common.sh index d1390f9a65..dc7fb407d7 100755 --- a/tests/ci/docker_images/linux-x86/common.sh +++ b/tests/ci/docker_images/linux-x86/common.sh @@ -4,6 +4,12 @@ set -ex +if [[ -n "${TRIGGER_TYPE:+x}" && "${TRIGGER_TYPE}" == "pipeline" ]]; then + TAG="pending" +else + TAG="latest" +fi + function validate_input() { key="${1}" value="${2}" @@ -20,10 +26,10 @@ function tag_and_push_img() { target="${2}" validate_input 'target' "${target}" img_push_date=$(date +%Y-%m-%d) - docker_img_with_latest="${target}_latest" + docker_img_with_tag="${target}_${TAG}" docker_img_with_date="${target}_${img_push_date}" - docker tag "${source}" "${docker_img_with_latest}" + docker tag "${source}" "${docker_img_with_tag}" docker tag "${source}" "${docker_img_with_date}" - docker push "${docker_img_with_latest}" + docker push "${docker_img_with_tag}" docker push "${docker_img_with_date}" } diff --git a/tests/ci/docker_images/windows/push_images.ps1 b/tests/ci/docker_images/windows/push_images.ps1 index 4fde9b1f83..c114e2745b 100644 --- a/tests/ci/docker_images/windows/push_images.ps1 +++ b/tests/ci/docker_images/windows/push_images.ps1 @@ -2,6 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC $ECS_REPO=$args[0] +Write-Host "TRIGGER_TYPE: $TRIGGER_TYPE" +$TAG = if (-not [string]::IsNullOrEmpty($TRIGGER_TYPE) -and $TRIGGER_TYPE -eq "pipeline") { + "pending" +} else { + "latest" +} if ($args[0] -eq $null) { # This is a ECS repository in our CI account @@ -10,12 +16,12 @@ if ($args[0] -eq $null) { Write-Host "$ECS_REPO" -docker tag vs2015 ${ECS_REPO}:vs2015_latest +docker tag vs2015 ${ECS_REPO}:vs2015_${TAG} docker tag vs2015 ${ECS_REPO}:vs2015-$(Get-Date -UFormat %Y-%m-%d-%H) -docker push ${ECS_REPO}:vs2015_latest +docker push ${ECS_REPO}:vs2015_${TAG} docker push ${ECS_REPO}:vs2015-$(Get-Date -UFormat %Y-%m-%d-%H) -docker tag vs2017 ${ECS_REPO}:vs2017_latest +docker tag vs2017 ${ECS_REPO}:vs2017_${TAG} docker tag vs2017 ${ECS_REPO}:vs2017-$(Get-Date -UFormat %Y-%m-%d-%H) -docker push ${ECS_REPO}:vs2017_latest +docker push ${ECS_REPO}:vs2017_${TAG} docker push ${ECS_REPO}:vs2017-$(Get-Date -UFormat %Y-%m-%d-%H) From 310e3752cddf699e38ee2d3e623d48aa9453d718 Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Tue, 25 Mar 2025 17:40:13 -0700 Subject: [PATCH 02/10] Update cdk instructions --- tests/ci/cdk/README.md | 82 ++++++++++++++++++++++++++++++++++------- tests/ci/cdk/run-cdk.sh | 59 ++++++++++++++++++++++++++--- 2 files changed, 123 insertions(+), 18 deletions(-) diff --git a/tests/ci/cdk/README.md b/tests/ci/cdk/README.md index e9883a7c05..4b1d0ed764 100644 --- a/tests/ci/cdk/README.md +++ b/tests/ci/cdk/README.md @@ -2,7 +2,26 @@ AWS-LC CI uses AWS CDK to define and deploy AWS resources (e.g. AWS CodeBuild, ECR). -## CI Setup +## Table of Contents +- [CI Setup](#ci-setup) + - [Before running CDK command](#before-running-cdk-command) + - [Minimal permissions](#minimal-permissions) + - [Pipeline Commands](#pipeline-commands) + - [CI Commands](#ci-commands) +- [AWS-LC Benchmarking Framework](#aws-lc-benchmarking-framework) + - [Framework Setup](#framework-setup) + - [How to Use](#how-to-use) + - [Start from Pull Request](#start-from-pull-request) + - [Start Locally](#start-locally) + - [Examine Output](#examine-output) +- [Files](#files) +- [Development Reference](#development-reference) + - [Useful commands](#useful-commands) + - [Useful Docker image build commands](#useful-docker-image-build-commands) + - [Linux Docker image build](#linux-docker-image-build) + - [Windows Docker image build (DEPRECATED)](#windows-docker-image-build-deprecated) + +## CDK Setup ### Before running CDK command: @@ -64,7 +83,36 @@ To setup or update the CI in your account you will need the following IAM permis * secretsmanager:GetSecretValue ### Pipeline Commands -Bootstrap pipeline account +Use these commands to deploy the CI pipeline. Any changes to the CI or Docker images will be updated automatically after the pipeline is deployed. + +These commands are run from `aws-lc/tests/ci/cdk`. + +If not done previously, bootstrap cdk for the pipeline account before running the next commands. +``` +cdk bootstrap aws://${PIPELINE_ACCOUNT_ID}/us-west-2 +``` + +[SKIP IF NO CROSS-ACCOUNT DEPLOYMENT] Give the pipeline account administrator access to the deployment account's CloudFormation. Repeat this step depending on how many deployment environment there are. You only need to run this step once when the pipeline is deploying to a new account for the first time. +``` +cdk bootstrap aws://${DEPLOY_ACCOUNT_ID}/us-west-2 --trust ${PIPELINE_ACCOUNT_ID} --trust-for-lookup ${PIPELINE_ACCOUNT_ID} --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess +``` + +To deploy dev pipeline to the same account as your CI: +``` +./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --github-source-version ${GITHUB_SOURCE_VERSION} --deploy-account ${DEPLOY_ACCOUNT_ID} --action deploy-dev-pipeline +``` + +To deploy dev pipeline but pipeline is hosted in a separate account: +``` +./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --github-source-version ${GITHUB_SOURCE_VERSION} --pipeline-acount ${PIPELINE_ACCOUNT_ID} --deploy-account ${DEPLOY_ACCOUNT_ID} --action deploy-dev-pipeline +``` + +To deploy production pipeline using default parameters: +``` +./run-cdk.sh --action deploy-production-pipeline +``` + + + +### CI Commands +Use these commands if you wish to deploy individual stacks instead of the entire pipeline. + +These commands are run from `aws-lc/tests/ci/cdk`. -### Commands +If not done previously, bootstrap cdk before running the commands below. Make sure that AWS_ACCOUNT_ID is the AWS account you wish to deploy the CI stacks to. -These commands are run from `aws-lc/tests/ci/cdk`. \ -If not done previously, bootstrap cdk before running the commands below: ```shell cdk bootstrap aws://${AWS_ACCOUNT_ID}/us-west-2 ``` You may also need to request an increase to certain account quotas: ```shell -open https://${CDK_DEPLOY_REGION}.console.aws.amazon.com/servicequotas/home/services/ec2/quotas +open https://${DEPLOY_REGION}.console.aws.amazon.com/servicequotas/home/services/ec2/quotas ``` * **EC2-VPC Elastic IPs** = 20 @@ -102,23 +153,23 @@ Note: `GITHUB_REPO_OWNER` specifies the GitHub repo targeted by this CI setup. To set up AWS-LC CI, run command: ``` -./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --action deploy-ci --aws-account ${AWS_ACCOUNT_ID} +./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --action deploy-ci --deploy-account ${AWS_ACCOUNT_ID} ``` To update AWS-LC CI, run command: ``` -./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --action update-ci --aws-account ${AWS_ACCOUNT_ID} +./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --action update-ci --deploy-account ${AWS_ACCOUNT_ID} ``` To create/update Linux Docker images, run command: ``` -./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --action build-linux-img --aws-account ${AWS_ACCOUNT_ID} +./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --action build-linux-img --deploy-account ${AWS_ACCOUNT_ID} ``` To destroy AWS-LC CI resources created above, run command: ``` # NOTE: this command will destroy all resources (AWS CodeBuild and ECR). -./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --action destroy-ci --aws-account ${AWS_ACCOUNT_ID} +./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --action destroy-ci --deploy-account ${AWS_ACCOUNT_ID} ``` For help, run command: @@ -175,6 +226,10 @@ Below is CI file structure. │   ├── __init__.py │   ├── ecr_stack.py │   ├── ... +├── pipeline +│   ├── __init__.py +│   ├── pipeline_stack.py +│   ├── ... ├── cdk.json ├── requirements.txt ├── run-cdk.sh @@ -187,7 +242,8 @@ Below is CI file structure. * `README.md` — The introductory README for this project. * `app.py` — The “main” for this sample application. * `cdk.json` — A configuration file for CDK that defines what executable CDK should run to generate the CDK construct tree. -* `cdk` — A CDK module directory +* `cdk` — A module directory that contains all CI-related stacks and utilities +* `pipeline` - A module directory that defines a continuous deployment pipeline for the CI. * `requirements.txt` — This file is used by pip to install all of the dependencies for your application. In this case, it contains only -e . This tells pip to install the requirements specified in setup.py. It also tells pip to run python setup.py develop to install the code in the cdk module so that it can be edited in place. * `setup.py` — Defines how this Python package would be constructed and what the dependencies are. @@ -264,7 +320,7 @@ aws codebuild start-build-batch --project-name aws-lc-docker-image-build-linux # Go to AWS console, you can check CodeBuild by clicking "Developer Tools > CodeBuild > Build projects". ``` -#### Windows Docker image build +#### Windows Docker image build (DEPRECATED) Windows docker image build requires more resources (like EC2 host, S3, SSM and so on) set up because DIND (Docker in Docker) is not supported by Windows. Below are some commands specific to windows docker image build. diff --git a/tests/ci/cdk/run-cdk.sh b/tests/ci/cdk/run-cdk.sh index a1f039a05a..a9e53046cf 100755 --- a/tests/ci/cdk/run-cdk.sh +++ b/tests/ci/cdk/run-cdk.sh @@ -225,6 +225,33 @@ function setup_ci() { create_android_resources } +function deploy_production_pipeline() { + cdk deploy AwsLcCiPipeline --require-approval never +} + +function deploy_dev_pipeline() { + if [[ -z "${DEPLOY_ACCOUNT:+x}" || -z "${PIPELINE_ACCOUNT}" ]]; then + echo "The pipeline needs a deployment acount to know where to deploy the CI to." + exit 1 + fi + + if [[ ${DEPLOY_ACCOUNT} == '620771051181' ]]; then + echo "Dev pipeline cannot deploy to production account." + exit 1 + fi + + if [[ -z "${PIPELINE_ACCOUNT+x}" || -z "${PIPELINE_ACCOUNT}" ]]; then + export PIPELINE_ACCOUNT=DEPLOY_ACCOUNT + fi + + if [[ ${PIPELINE_ACCOUNT+x} == '774305600158' ]]; then + echo "Cannot deploy. The production pipeline is hosted with the same name in this pipeline account." + exit 1 + fi + + cdk deploy AwsLcCiPipeline --require-approval never +} + function create_android_resources() { # Use aws cli to create Device Farm project and get project arn to create device pools. # TODO: Move resource creation to aws cdk when cdk has support for device form resource constructs. @@ -285,6 +312,7 @@ Options: 'diff': compares the specified stack with the deployed stack. 'synth': synthesizes and prints the CloudFormation template for the stacks. 'bootstrap': Bootstraps the CDK stack. This is needed before deployment or updating the CI. + 'invoke': invoke a custom command. Provide the custom command through '--command ' EOF } @@ -311,11 +339,12 @@ function export_global_variables() { export ECR_WINDOWS_X86_REPO_NAME='aws-lc-docker-images-windows-x86' export AWS_LC_S3_BUCKET_PREFIX='aws-lc-windows-docker-image-build-s3' export WIN_EC2_TAG_KEY='aws-lc' - export WIN_EC2_TAG_VALUE="aws-lc-windows-docker-image-build" - export WIN_DOCKER_BUILD_SSM_DOCUMENT="AWSLC-BuildWindowsDockerImagesTEST" + export WIN_EC2_TAG_VALUE='aws-lc-windows-docker-image-build' + export WIN_DOCKER_BUILD_SSM_DOCUMENT='AWSLC-BuildWindowsDockerImages' + export MAX_TEST_RETRY=2 export IMG_BUILD_STATUS='unknown' # 620771051181 and 351119683581 is AWS-LC team AWS account. - if [[ "${DEPLOY_ACCOUNT}" != "620771051181" && "${DEPLOY_ACCOUNT}" != "351119683581" ]] && [[ "${GITHUB_REPO_OWNER}" == 'aws' ]]; then + if [[ "${DEPLOY_ACCOUNT}" != "620771051181" && "${DEPLOY_ACCOUNT}" != '351119683581' ]] && [[ "${GITHUB_REPO_OWNER}" == 'aws' ]]; then echo "Only team account is allowed to create CI stacks on aws repo." exit 1 fi @@ -329,15 +358,23 @@ function main() { script_helper exit 0 ;; - --aws-account) + --deploy-account) export DEPLOY_ACCOUNT="${2}" shift ;; - --aws-region) + --deploy-region) export DEPLOY_REGION="${2}" export AWS_DEFAULT_REGION="${DEPLOY_REGION}" shift ;; + --pipeline-account) + export PIPELINE_ACCOUNT="${2}" + shift + ;; + --pipeline-region) + export PIPELINE_REGION="${2}" + shift + ;; --github-repo-owner) export GITHUB_REPO_OWNER="${2}" shift @@ -374,6 +411,14 @@ function main() { # Execute the action. case ${ACTION} in + deploy-production-pipeline) + export IS_DEV="False" + deploy_production_pipeline + ;; + deploy-dev-pipeline) + export IS_DEV="True" + deploy_dev_pipeline + ;; deploy-ci) setup_ci ;; @@ -405,6 +450,10 @@ function main() { cdk bootstrap ;; invoke) + if [[ -z "${COMMAND+x}" || -z "${COMMAND}" ]]; then + echo "--action invoke requires a command." + exit 1 + fi ${COMMAND:?} ;; *) From 4fadbc452205d92ddd5742b5c1aca86499ee2119 Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Tue, 25 Mar 2025 18:59:25 -0700 Subject: [PATCH 03/10] Add typing and cleanup --- tests/ci/cdk/README.md | 27 +---- tests/ci/cdk/cdk/aws_lc_analytics_stack.py | 2 +- tests/ci/cdk/cdk/aws_lc_android_ci_stack.py | 2 +- .../cdk/aws_lc_ec2_test_framework_ci_stack.py | 2 +- tests/ci/cdk/cdk/aws_lc_github_ci_stack.py | 16 +-- .../ci/cdk/cdk/aws_lc_github_ci_x509_stack.py | 3 +- .../ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py | 2 +- tests/ci/cdk/cdk/components.py | 8 +- .../linux_docker_image_batch_build_stack.py | 6 +- .../cdk/windows_docker_image_build_stack.py | 2 +- tests/ci/cdk/pipeline/ci_stage.py | 101 +++++++++--------- tests/ci/cdk/pipeline/codebuild_batch_step.py | 57 +++++----- .../linux_docker_image_build_stage.py | 25 +++-- tests/ci/cdk/pipeline/pipeline_stack.py | 5 +- tests/ci/cdk/pipeline/scripts/build_target.sh | 8 +- .../cdk/pipeline/scripts/finalize_images.sh | 25 ----- tests/ci/cdk/pipeline/setup_stage.py | 17 +-- .../windows_docker_image_build_stage.py | 25 +++-- tests/ci/cdk/run-cdk.sh | 4 +- tests/ci/cdk/util/iam_policies.py | 22 ---- tests/ci/cdk/util/metadata.py | 2 +- tests/ci/cdk/util/yml_loader.py | 3 +- .../docker_images/linux-aarch/build_images.sh | 2 - .../ci/docker_images/windows/push_images.ps1 | 1 - 24 files changed, 152 insertions(+), 215 deletions(-) diff --git a/tests/ci/cdk/README.md b/tests/ci/cdk/README.md index 4b1d0ed764..85eeb50b03 100644 --- a/tests/ci/cdk/README.md +++ b/tests/ci/cdk/README.md @@ -87,14 +87,14 @@ Use these commands to deploy the CI pipeline. Any changes to the CI or Docker im These commands are run from `aws-lc/tests/ci/cdk`. -If not done previously, bootstrap cdk for the pipeline account before running the next commands. +[SKIP IF NO CROSS-ACCOUNT DEPLOYMENT] Give the pipeline account administrator access to the deployment account's CloudFormation. Repeat this step depending on how many deployment environment there are. You only need to run this step once when the pipeline is deploying to a new account for the first time. ``` -cdk bootstrap aws://${PIPELINE_ACCOUNT_ID}/us-west-2 +cdk bootstrap aws://${DEPLOY_ACCOUNT_ID}/us-west-2 --trust ${PIPELINE_ACCOUNT_ID} --trust-for-lookup ${PIPELINE_ACCOUNT_ID} --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess ``` -[SKIP IF NO CROSS-ACCOUNT DEPLOYMENT] Give the pipeline account administrator access to the deployment account's CloudFormation. Repeat this step depending on how many deployment environment there are. You only need to run this step once when the pipeline is deploying to a new account for the first time. +If not done previously, bootstrap cdk for the pipeline account before running the next commands. ``` -cdk bootstrap aws://${DEPLOY_ACCOUNT_ID}/us-west-2 --trust ${PIPELINE_ACCOUNT_ID} --trust-for-lookup ${PIPELINE_ACCOUNT_ID} --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess +cdk bootstrap aws://${PIPELINE_ACCOUNT_ID}/us-west-2 ``` To deploy dev pipeline to the same account as your CI: @@ -112,25 +112,6 @@ To deploy production pipeline using default parameters: ./run-cdk.sh --action deploy-production-pipeline ``` - - ### CI Commands Use these commands if you wish to deploy individual stacks instead of the entire pipeline. diff --git a/tests/ci/cdk/cdk/aws_lc_analytics_stack.py b/tests/ci/cdk/cdk/aws_lc_analytics_stack.py index e52144d190..8eb046bf7f 100644 --- a/tests/ci/cdk/cdk/aws_lc_analytics_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_analytics_stack.py @@ -20,7 +20,7 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, - env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs) -> None: super().__init__(scope, id, env=env, **kwargs) diff --git a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py index a548a26eed..1ed3a424db 100644 --- a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py @@ -22,7 +22,7 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, - env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs) -> None: super().__init__(scope, id, env=env, **kwargs) diff --git a/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py index cd7533f1d7..3b65c914d6 100644 --- a/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py @@ -27,7 +27,7 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, - env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs) -> None: super().__init__(scope, id, env=env, **kwargs) diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py index 6789d9048b..bd502e3019 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py @@ -8,7 +8,7 @@ from cdk.components import PruneStaleGitHubBuilds from util.iam_policies import code_build_batch_policy_in_json, code_build_publish_metrics_in_json, \ - code_build_cloudwatch_logs_policy_in_json, s3_read_policy_in_json + code_build_cloudwatch_logs_policy_in_json from util.metadata import GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, \ PIPELINE_ACCOUNT, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME from util.build_spec_loader import BuildSpecLoader @@ -21,7 +21,7 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, - env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs) -> None: super().__init__(scope, id, env=env, **kwargs) @@ -52,16 +52,11 @@ def __init__(self, code_build_cloudwatch_logs_policy = iam.PolicyDocument.from_json( code_build_cloudwatch_logs_policy_in_json([log_group]) ) - s3_assets_policy = iam.PolicyDocument.from_json(s3_read_policy_in_json()) resource_access_role = iam.Role(scope=self, id="{}-resource-role".format(id), - assumed_by=iam.CompositePrincipal( - iam.ServicePrincipal("codebuild.amazonaws.com"), - iam.ArnPrincipal(f'arn:aws:iam::{PIPELINE_ACCOUNT}:role/CrossAccountCodeBuildRole') - ), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), inline_policies={ "code_build_cloudwatch_logs_policy": code_build_cloudwatch_logs_policy, - "s3_assets_policy": s3_assets_policy }) # Define a IAM role for this stack. @@ -84,11 +79,6 @@ def __init__(self, ) ) - # test = iam.Role(scope=self, - # id="test", - # assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - # inline_policies=inline_policies) - # Define CodeBuild. project = codebuild.Project( scope=self, diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py index c71bd372f8..44aeae454c 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py @@ -9,13 +9,12 @@ GITHUB_REPO_OWNER, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME, ) - class AwsLcGitHubX509CIStack(Stack): def __init__( self, scope: Construct, id: str, - env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs, ) -> None: super().__init__(scope, id, env=env, **kwargs) diff --git a/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py index 2cea259269..c0f9bc32b2 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py @@ -22,7 +22,7 @@ def __init__(self, scope: Construct, id: str, spec_file_path: str, - env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs) -> None: super().__init__(scope, id, env=env, **kwargs) diff --git a/tests/ci/cdk/cdk/components.py b/tests/ci/cdk/cdk/components.py index 5aacf19c3e..a1d8a6a81b 100644 --- a/tests/ci/cdk/cdk/components.py +++ b/tests/ci/cdk/cdk/components.py @@ -1,7 +1,9 @@ import pathlib +import typing -from aws_cdk import aws_codebuild as codebuild, aws_lambda as lambda_, aws_ecr_assets as ecr_assets, aws_secretsmanager as sm, \ - aws_events as events, aws_events_targets as events_targets, aws_iam as iam, Duration +from aws_cdk import aws_codebuild as codebuild, aws_lambda as lambda_, aws_ecr_assets as ecr_assets, \ + aws_secretsmanager as sm, \ + aws_events as events, aws_events_targets as events_targets, aws_iam as iam, Duration, Environment from constructs import Construct from util.metadata import GITHUB_REPO_OWNER, GITHUB_TOKEN_SECRET_NAME @@ -14,7 +16,7 @@ def __init__( id: str, *, project: codebuild.IProject, - env, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], ec2_permissions: bool ) -> None: super().__init__(scope, id) diff --git a/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py b/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py index 184eb4e5ae..631fed814c 100644 --- a/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py +++ b/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py @@ -26,17 +26,13 @@ def __init__( self, scope: Construct, id: str, - env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs) -> None: super().__init__(scope, id, env=env, **kwargs) github_repo_owner = GITHUB_REPO_OWNER github_repo_name = GITHUB_REPO_NAME - if env.account == PRE_PROD_ACCOUNT: - github_repo_owner = STAGING_GITHUB_REPO_OWNER - github_repo_name = STAGING_GITHUB_REPO_NAME - # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( owner=github_repo_owner, diff --git a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py index 4b6cc8affb..558c2aeeef 100644 --- a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py +++ b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py @@ -34,7 +34,7 @@ def __init__( self, scope: Construct, id: str, - env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs) -> None: super().__init__( scope, diff --git a/tests/ci/cdk/pipeline/ci_stage.py b/tests/ci/cdk/pipeline/ci_stage.py index 9906b3ad3b..c249cbaaf5 100644 --- a/tests/ci/cdk/pipeline/ci_stage.py +++ b/tests/ci/cdk/pipeline/ci_stage.py @@ -1,5 +1,8 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import builtins +import re +import typing from aws_cdk import Stage, Environment, Duration, pipelines, aws_iam as iam, Stack from constructs import Construct @@ -9,16 +12,15 @@ from cdk.aws_lc_ec2_test_framework_ci_stack import AwsLcEC2TestingCIStack from cdk.aws_lc_github_ci_stack import AwsLcGitHubCIStack from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack -from pipeline.codebuild_batch_step import BatchBuildTargetOptions, CodeBuildBatchStep - +from pipeline.codebuild_batch_step import CodeBuildBatchStep class CiStage(Stage): def __init__( self, scope: Construct, - id, - pipeline_environment, - deploy_environment, + id: str, + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs ): super().__init__( @@ -28,7 +30,7 @@ def __init__( **kwargs, ) - self.build_targets = [] + self.build_options = [] # Define CodeBuild Batch job for testing code. x86_build_spec_file = "cdk/codebuild/github_ci_linux_x86_omnibus.yaml" @@ -39,8 +41,8 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-linux-x86", ) - self.build_targets.append(BatchBuildTargetOptions( - target="aws-lc-ci-linux-x86", + self.build_options.append(BatchBuildOptions( + project="aws-lc-ci-linux-x86", ignore_failure=False, )) @@ -53,8 +55,8 @@ def __init__( env=deploy_environment, stack_name=arm_stack_name, ) - self.build_targets.append(BatchBuildTargetOptions( - target="aws-lc-ci-linux-arm", + self.build_options.append(BatchBuildOptions( + project="aws-lc-ci-linux-arm", ignore_failure=False, )) @@ -66,8 +68,8 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-integration", ) - self.build_targets.append(BatchBuildTargetOptions( - target="aws-lc-ci-integration", + self.build_options.append(BatchBuildOptions( + project="aws-lc-ci-integration", ignore_failure=True, )) @@ -79,8 +81,8 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-fuzzing", ) - self.build_targets.append(BatchBuildTargetOptions( - target="aws-lc-ci-fuzzing", + self.build_options.append(BatchBuildOptions( + project="aws-lc-ci-fuzzing", ignore_failure=False, )) @@ -92,8 +94,8 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-analytics", ) - self.build_targets.append(BatchBuildTargetOptions( - target="aws-lc-ci-analytics", + self.build_options.append(BatchBuildOptions( + project="aws-lc-ci-analytics", ignore_failure=True, )) @@ -109,8 +111,8 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-ec2-test-framework", ) - self.build_targets.append(BatchBuildTargetOptions( - target="aws-lc-ci-ec2-test-framework", + self.build_options.append(BatchBuildOptions( + project="aws-lc-ci-ec2-test-framework", ignore_failure=True, )) @@ -122,8 +124,8 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-devicefarm-android", ) - self.build_targets.append(BatchBuildTargetOptions( - target="aws-lc-ci-devicefarm-android", + self.build_options.append(BatchBuildOptions( + project="aws-lc-ci-devicefarm-android", ignore_failure=False, )) @@ -135,13 +137,13 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-windows-x86", ) - self.build_targets.append(BatchBuildTargetOptions( - target="aws-lc-ci-windows-x86", + self.build_options.append(BatchBuildOptions( + project="aws-lc-ci-windows-x86", ignore_failure=False, )) @property - def stacks(self): + def stacks(self) -> typing.List[Stack]: return [child for child in self.node.children if isinstance(child, Stack)] def add_stage_to_pipeline( @@ -149,11 +151,13 @@ def add_stage_to_pipeline( pipeline: pipelines.CodePipeline, input: pipelines.FileSet, role: iam.Role, - max_retry: int=2, - env={}, + max_retry: typing.Optional[int] = 2, + env: typing.Optional[typing.Mapping[str, str]] = None, ): stack_names = [stack.stack_name for stack in self.stacks] + env = env or {} + prebuild_check_step = pipelines.CodeBuildStep( "PrebuildCheck", input=input, @@ -168,8 +172,7 @@ def add_stage_to_pipeline( "STACKS": " ".join(stack_names), }, role=role, - timeout=Duration.minutes(180) - # project_name=f"{self.stage_name}-PrebuildCheck" + timeout=Duration.minutes(60) ) batch_build_jobs = { @@ -179,13 +182,13 @@ def add_stage_to_pipeline( "ignore-failure": options.ignore_failure, "env": { "variables": { - "PROJECT": options.target, - "TIMEOUT": options.timeout, + "PROJECT": options.project, + "TIMEOUT": str(max_retry * options.timeout), **options.env, } } } - for options in self.build_targets + for options in self.build_options ] } @@ -199,32 +202,17 @@ def add_stage_to_pipeline( "./build_target.sh --build-type ci --project ${PROJECT} --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}" ], role=role, - partial_batch_buildspec=batch_build_jobs, + timeout=300, + partial_batch_build_spec=batch_build_jobs, env={ **env, "MAX_RETRY": max_retry, - "NEED_REBUILD": prebuild_check_step.exported_variable("NEED_REBUILD") + "NEED_REBUILD": prebuild_check_step.exported_variable("NEED_REBUILD"), }, ) ci_run_step.add_step_dependency(prebuild_check_step) - # pipeline.add_stage( - # self, - # post=[ - # CodeBuildRunStep( - # f"{self.stage_name}-BuildStep", - # name_prefix=self.stage_name, - # input=input, - # role=role, - # stacks=[stack.stack_name for stack in self.stacks], - # build_targets=self.build_targets, - # max_retry=max_retry, - # env=env, - # ) - # ] - # ) - pipeline.add_stage( self, post=[ @@ -233,6 +221,17 @@ def add_stage_to_pipeline( ] ) - - - +class BatchBuildOptions: + def __init__( + self, + project: str, + identifier: str = None, + ignore_failure: bool = False, + timeout: int = 120, + env: typing.Optional[typing.Mapping[str, str]] = None + ): + self.project = project + self.identifier = identifier or re.sub(r'[^a-zA-Z0-9]', '_', project) + self.ignore_failure = ignore_failure + self.timeout = timeout + self.env = env or {} \ No newline at end of file diff --git a/tests/ci/cdk/pipeline/codebuild_batch_step.py b/tests/ci/cdk/pipeline/codebuild_batch_step.py index 472574cfca..59a4ac6f5e 100644 --- a/tests/ci/cdk/pipeline/codebuild_batch_step.py +++ b/tests/ci/cdk/pipeline/codebuild_batch_step.py @@ -15,32 +15,31 @@ aws_iam as iam ) -class BatchBuildTargetOptions: - def __init__( - self, - target: str, - identifier: str = None, - ignore_failure: bool = False, - timeout: int = 180, - env: Mapping[str, str] = {} - ): - self.target = target - self.identifier = identifier or re.sub(r'[^a-zA-Z0-9]', '_', target) - self.ignore_failure = ignore_failure - self.timeout = timeout - self.env = env - @jsii.implements(pipelines.ICodePipelineActionFactory) class CodeBuildBatchStep(pipelines.Step): + """ + Create a CodeBuildBatchStep given shell commands and batch build settings. + + :param id: The id of the step. + :param input: The input file set producer. + :param action_name: Name of the action produced by this step. + :param commands: The CodeBuild commands to be run. + :param partial_batch_build_spec: The batch build settings for the project. + :param role: The role to use for the CodeBuild project. + :param timeout: Timeout of the batch build project, in minutes. + :param env: The environment variables to use for the CodeBuild project. + + :return: A new CodeBuildBatchStep. + """ def __init__(self, id, - # input: pipelines.IFileSetProducer, input: pipelines.FileSet, action_name: str, - commands: list[str], + commands: typing.List[str], + partial_batch_build_spec: typing.Mapping[builtins.str, typing.Any], role: iam.Role, - partial_batch_buildspec: typing.Mapping[builtins.str, typing.Any], - env: Mapping[str, str] = {}): + timeout: int = 300, + env: typing.Optional[typing.Mapping[str, str]]=None): super().__init__(id) self._discover_referenced_outputs(env) @@ -48,12 +47,13 @@ def __init__(self, self.input = input self.action_name = action_name self.commands = commands - self.partial_batch_buildspec = partial_batch_buildspec + self.partial_batch_build_spec = partial_batch_build_spec self.role = role + self.timeout = timeout self.env = { key: codebuild.BuildEnvironmentVariable(value=value) for key, value in env.items() - } + } if env else {} @jsii.member(jsii_name="produceAction") def produce_action( @@ -61,12 +61,12 @@ def produce_action( stage: codepipeline.IStage, options: pipelines.ProduceActionOptions, ) -> pipelines.CodePipelineActionFactoryResult: - build_target_project = codebuild.PipelineProject( + batch_build_project = codebuild.PipelineProject( options.scope, - "StartWait", + self.action_name, build_spec=codebuild.BuildSpec.from_object({ "version": 0.2, - "batch": self.partial_batch_buildspec, + "batch": self.partial_batch_build_spec, "phases": { "build": { "commands": self.commands @@ -74,20 +74,19 @@ def produce_action( } }), role=self.role, - timeout=Duration.minutes(180) + timeout=Duration.minutes(self.timeout) ) - build_target_action = cp_actions.CodeBuildAction( + batch_build_action = cp_actions.CodeBuildAction( action_name=self.action_name, - # input=artifacts.to_code_pipeline(self.input.primary_output), input=options.artifacts.to_code_pipeline(self.input), run_order=options.run_order, - project=build_target_project, + project=batch_build_project, execute_batch_build=True, environment_variables=self.env ) - stage.add_action(build_target_action) + stage.add_action(batch_build_action) return pipelines.CodePipelineActionFactoryResult( run_orders_consumed=1 diff --git a/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py b/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py index 832ccd967c..00236e4f50 100644 --- a/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py +++ b/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py @@ -1,5 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import typing from aws_cdk import Stage, Environment, Stack, Duration, aws_iam as iam, pipelines from aws_cdk.pipelines import CodeBuildStep @@ -15,9 +16,9 @@ class LinuxDockerImageBuildStage(Stage): def __init__( self, scope: Construct, - id, - pipeline_environment, - deploy_environment, + id: str, + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs ): super().__init__( @@ -58,7 +59,7 @@ def __init__( self.need_rebuild = None @property - def stacks(self): + def stacks(self) -> typing.List[Stack]: return [child for child in self.node.children if isinstance(child, Stack)] def add_stage_to_wave( @@ -66,13 +67,16 @@ def add_stage_to_wave( wave: pipelines.Wave, input: pipelines.FileSet, role: iam.Role, - max_retry: int=2, - additional_stacks: list[Stack]=[], - env={}, + max_retry: typing.Optional[int] = 2, + additional_stacks: typing.Optional[typing.List[str]] = None, + env: typing.Optional[typing.Mapping[str, str]] = None ): - stacks = self.stacks + additional_stacks + stacks = self.stacks + (additional_stacks if additional_stacks else []) stack_names = [stack.stack_name for stack in stacks] + env = env if env else {} + timeout = (max_retry + 1) * 120 + docker_build_step = CodeBuildStep( "StartWait", input=input, @@ -89,11 +93,10 @@ def add_stage_to_wave( "STACKS": " ".join(stack_names), "ECR_REPOS": " ".join(self.ecr_repo_names), "MAX_RETRY": str(max_retry), - "TIMEOUT": str(180), # 3 hours + "TIMEOUT": str(timeout), }, role=role, - timeout=Duration.minutes(180) - # project_name=f"{self.stage_name}-StartWait" + timeout=Duration.minutes(timeout) ) wave.add_stage( diff --git a/tests/ci/cdk/pipeline/pipeline_stack.py b/tests/ci/cdk/pipeline/pipeline_stack.py index 07db4ce8be..fd0abc43a1 100644 --- a/tests/ci/cdk/pipeline/pipeline_stack.py +++ b/tests/ci/cdk/pipeline/pipeline_stack.py @@ -1,5 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import typing from aws_cdk import Stack, Environment, Duration from aws_cdk import ( @@ -182,7 +183,7 @@ def deploy_to_environment( pipeline: pipelines.CodePipeline, source: pipelines.CodePipelineSource, cross_account_role: iam.Role, - codebuild_environment_variables = {}, + codebuild_environment_variables: typing.Optional[typing.Mapping[str, str]] = None, ): pipeline_environment = Environment(account=PIPELINE_ACCOUNT, region=PIPELINE_REGION) @@ -193,6 +194,8 @@ def deploy_to_environment( else: deploy_environment = Environment(account=PROD_ACCOUNT, region=PROD_REGION) + codebuild_environment_variables = codebuild_environment_variables if codebuild_environment_variables else {} + codebuild_environment_variables = { **codebuild_environment_variables, "PIPELINE_EXECUTION_ID": "#{codepipeline.PipelineExecutionId}", diff --git a/tests/ci/cdk/pipeline/scripts/build_target.sh b/tests/ci/cdk/pipeline/scripts/build_target.sh index 8b29151e10..47e2c37b8a 100644 --- a/tests/ci/cdk/pipeline/scripts/build_target.sh +++ b/tests/ci/cdk/pipeline/scripts/build_target.sh @@ -27,8 +27,14 @@ function build_codebuild_ci_project() { exit 1 fi + if [[ ${DEPLOY_ACCOUNT} == '351119683581' ]]; then + source_version="main" + else + source_version=${COMMIT_HASH} + fi + echo "Starting CI tests in ${project}" - start_codebuild_project "${project}" "${COMMIT_HASH}" + start_codebuild_project "${project}" "${source_version}" while [[ ${attempt} -le ${MAX_RETRY} ]]; do attempt=$((attempt + 1)) diff --git a/tests/ci/cdk/pipeline/scripts/finalize_images.sh b/tests/ci/cdk/pipeline/scripts/finalize_images.sh index 207660a894..b313ee98ca 100644 --- a/tests/ci/cdk/pipeline/scripts/finalize_images.sh +++ b/tests/ci/cdk/pipeline/scripts/finalize_images.sh @@ -88,28 +88,3 @@ for repo in ${REPOS}; do done wait - - -# List all images in the repository and filter the ones with any tag ending with '_pending' -#image_digests=$(aws ecr describe-images --repository-name "$repo" --region "$REGION" --query "imageDetails[?length(imageTags) > 0 && imageTags[?ends_with(@, '_pending')]].imageDigest" --output text) -# -#if [ -z "$image_digests" ]; then -# echo "No images found with tags ending in '_pending'." -# exit 0 -#fi - -## Loop through and delete each image by its digest -#for image_digest in $image_digests; do -# echo "Deleting image with digest: $image_digest..." -# -# # Delete the image by its digest -# aws ecr batch-delete-image --repository-name "$repo" --region "$REGION" --image-ids imageDigest="$image_digest" -# -# if [ $? -eq 0 ]; then -# echo "Image $image_digest deleted successfully." -# else -# echo "Failed to delete image $image_digest." -# fi -#done -# -#echo "Cleanup complete." diff --git a/tests/ci/cdk/pipeline/setup_stage.py b/tests/ci/cdk/pipeline/setup_stage.py index 80de35308f..f7276d46d7 100644 --- a/tests/ci/cdk/pipeline/setup_stage.py +++ b/tests/ci/cdk/pipeline/setup_stage.py @@ -1,3 +1,5 @@ +import typing + from aws_cdk import Stage, aws_codebuild as codebuild, Environment, Stack, aws_iam as iam from constructs import Construct @@ -8,12 +10,13 @@ class SetupStage(Stage): + """Define a stack of IAM role to allow cross-account deployment""" def __init__( self, scope: Construct, - id, - pipeline_environment, - deploy_environment, + id: str, + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs ): super().__init__( @@ -37,13 +40,15 @@ def stacks(self): return [child for child in self.node.children if isinstance(child, Stack)] class SetupStack(Stack): + """Define a stack of IAM role to allow cross-account deployment""" def __init__( self, scope: Construct, id: str, - pipeline_environment, - deploy_environment, - **kwargs) -> None: + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs + ) -> None: super().__init__(scope, id, env=deploy_environment, **kwargs) cross_account_role = iam.Role( diff --git a/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py b/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py index 701dc5cec5..4e37cd469f 100644 --- a/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py +++ b/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py @@ -1,5 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC +import typing from aws_cdk import Stage, Environment, Stack, Duration, aws_iam as iam, pipelines, Fn from aws_cdk.pipelines import CodeBuildStep @@ -7,7 +8,6 @@ from cdk.ecr_stack import EcrStack from cdk.windows_docker_image_build_stack import WindowsDockerImageBuildStack -from pipeline.deploy_util import DeployEnvironmentType from util.metadata import WINDOWS_X86_ECR_REPO, WIN_EC2_TAG_KEY, WIN_EC2_TAG_VALUE, SSM_DOCUMENT_NAME @@ -15,9 +15,9 @@ class WindowsDockerImageBuildStage(Stage): def __init__( self, scope: Construct, - id, - pipeline_environment, - deploy_environment, + id: str, + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs ): super().__init__( @@ -49,7 +49,7 @@ def __init__( self.need_rebuild = None @property - def stacks(self): + def stacks(self) -> typing.List[Stack]: return [child for child in self.node.children if isinstance(child, Stack)] def add_stage_to_wave( @@ -57,13 +57,16 @@ def add_stage_to_wave( wave: pipelines.Wave, input: pipelines.FileSet, role: iam.Role, - max_retry: int=2, - additional_stacks: list[Construct]=[], - env=None, + max_retry: typing.Optional[int] = 2, + additional_stacks: typing.Optional[typing.List[str]] = None, + env: typing.Optional[typing.Mapping[str, str]] = None ): - stacks = self.stacks + additional_stacks + stacks = self.stacks + (additional_stacks if additional_stacks else []) stack_names = [stack.stack_name for stack in stacks] + env = env if env else {} + timeout = (max_retry + 1) * 120 + docker_build_step = CodeBuildStep( "StartWait", input=input, @@ -80,14 +83,14 @@ def add_stage_to_wave( "STACKS": " ".join(stack_names), "ECR_REPOS": " ".join(self.ecr_repo_names), "MAX_RETRY": str(max_retry), - "TIMEOUT": str(180), # 3 hours + "TIMEOUT": str(timeout), "WIN_EC2_TAG_KEY": WIN_EC2_TAG_KEY, "WIN_EC2_TAG_VALUE": WIN_EC2_TAG_VALUE, "WIN_DOCKER_BUILD_SSM_DOCUMENT": SSM_DOCUMENT_NAME, "S3_FOR_WIN_DOCKER_IMG_BUILD": self.s3_bucket_name, }, role=role, - timeout=Duration.minutes(180) + timeout=Duration.minutes(timeout) ) wave.add_stage( diff --git a/tests/ci/cdk/run-cdk.sh b/tests/ci/cdk/run-cdk.sh index a9e53046cf..593bd0ecff 100755 --- a/tests/ci/cdk/run-cdk.sh +++ b/tests/ci/cdk/run-cdk.sh @@ -47,9 +47,9 @@ function destroy_ci() { fi cdk destroy 'aws-lc-*' --force # CDK stack destroy does not delete s3 bucket automatically. - delete_s3_buckets +# delete_s3_buckets # CDK stack destroy does not delete ecr automatically. - delete_container_repositories +# delete_container_repositories } function destroy_docker_img_build_stack() { diff --git a/tests/ci/cdk/util/iam_policies.py b/tests/ci/cdk/util/iam_policies.py index 6606cfe4c7..2158d105d5 100644 --- a/tests/ci/cdk/util/iam_policies.py +++ b/tests/ci/cdk/util/iam_policies.py @@ -155,28 +155,6 @@ def s3_read_write_policy_in_json(s3_bucket_name): ] } -def s3_read_policy_in_json(): - """ - Define an IAM policy statement for reading from S3 bucket. - :return: an IAM policy statement in json. - """ - return { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Action": [ - "s3:GetObject", - "s3:GetObjectVersion", - "s3:ListBucket" - ], - "Resource": [ - "*" - ] - } - ] - } - def ecr_repo_arn(repo_name, env): """ diff --git a/tests/ci/cdk/util/metadata.py b/tests/ci/cdk/util/metadata.py index 1c85f54b45..7135f70349 100644 --- a/tests/ci/cdk/util/metadata.py +++ b/tests/ci/cdk/util/metadata.py @@ -22,7 +22,7 @@ STAGING_GITHUB_REPO_OWNER = "aws" STAGING_GITHUB_REPO_NAME = "private-aws-lc-staging" -IS_DEV = EnvUtil.get("IS_DEV", "True") == "True" #TODO: change default value to true +IS_DEV = EnvUtil.get("IS_DEV", "False") == "True" #TODO: change default value to true MAX_TEST_RETRY = int(EnvUtil.get("MAX_TEST_RETRY", "2")) # Used when AWS CDK defines ECR repos. diff --git a/tests/ci/cdk/util/yml_loader.py b/tests/ci/cdk/util/yml_loader.py index bef17e3e38..da70888a6e 100644 --- a/tests/ci/cdk/util/yml_loader.py +++ b/tests/ci/cdk/util/yml_loader.py @@ -12,13 +12,14 @@ class YmlLoader(object): """Responsible for loading yml file as python object.""" @staticmethod - def load(file_path, placeholder_map: typing.Optional[typing.Mapping[str, str]] = {}): + def load(file_path, placeholder_map: typing.Optional[typing.Mapping[str, str]] = None): """ Used to load yml file and replace some placeholders if needed. :param file_path: path to the yml file. :param placeholder_map: a mapping from placeholder to corresponding value. :return: python object. """ + placeholder_map = placeholder_map or {} with open(file_path) as file: file_text = file.read() for key in placeholder_map.keys(): diff --git a/tests/ci/docker_images/linux-aarch/build_images.sh b/tests/ci/docker_images/linux-aarch/build_images.sh index 21d8b9ea57..86de2ee111 100755 --- a/tests/ci/docker_images/linux-aarch/build_images.sh +++ b/tests/ci/docker_images/linux-aarch/build_images.sh @@ -4,8 +4,6 @@ set -ex -env - # Log Docker hub limit https://docs.docker.com/docker-hub/download-rate-limit/#how-can-i-check-my-current-rate TOKEN=$(curl "https://auth.docker.io/token?service=registry.docker.io&scope=repository:ratelimitpreview/test:pull" | jq -r .token) curl --head -H "Authorization: Bearer $TOKEN" https://registry-1.docker.io/v2/ratelimitpreview/test/manifests/latest diff --git a/tests/ci/docker_images/windows/push_images.ps1 b/tests/ci/docker_images/windows/push_images.ps1 index c114e2745b..ae57eb977a 100644 --- a/tests/ci/docker_images/windows/push_images.ps1 +++ b/tests/ci/docker_images/windows/push_images.ps1 @@ -2,7 +2,6 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC $ECS_REPO=$args[0] -Write-Host "TRIGGER_TYPE: $TRIGGER_TYPE" $TAG = if (-not [string]::IsNullOrEmpty($TRIGGER_TYPE) -and $TRIGGER_TYPE -eq "pipeline") { "pending" } else { From 10d511fc27ad08d64131e8626fbe5fe9a341123c Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Wed, 26 Mar 2025 14:54:21 -0700 Subject: [PATCH 04/10] Reformat code --- tests/ci/cdk/app.py | 37 +++- tests/ci/cdk/cdk/aws_lc_analytics_stack.py | 70 +++++-- tests/ci/cdk/cdk/aws_lc_android_ci_stack.py | 82 +++++--- .../cdk/aws_lc_ec2_test_framework_ci_stack.py | 182 ++++++++++++------ tests/ci/cdk/cdk/aws_lc_github_ci_stack.py | 116 +++++++---- .../ci/cdk/cdk/aws_lc_github_ci_x509_stack.py | 17 +- .../ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py | 133 ++++++++----- tests/ci/cdk/cdk/bm_framework_stack.py | 93 ++++++--- tests/ci/cdk/cdk/components.py | 152 +++++++++------ tests/ci/cdk/cdk/ecr_stack.py | 2 +- .../linux_docker_image_batch_build_stack.py | 77 +++++--- .../cdk/windows_docker_image_build_stack.py | 24 ++- tests/ci/cdk/pipeline/ci_stage.py | 140 ++++++++------ tests/ci/cdk/pipeline/codebuild_batch_step.py | 64 +++--- tests/ci/cdk/pipeline/deploy_util.py | 7 +- .../linux_docker_image_build_stage.py | 48 ++--- tests/ci/cdk/pipeline/pipeline_stack.py | 84 ++++---- tests/ci/cdk/pipeline/scripts/build_target.sh | 56 +++--- .../scripts/check_trigger_conditions.sh | 28 +-- .../scripts/cleanup_orphaned_images.sh | 2 +- .../cdk/pipeline/scripts/finalize_images.sh | 4 +- tests/ci/cdk/pipeline/scripts/util.sh | 34 ++-- tests/ci/cdk/pipeline/setup_stage.py | 99 +++++----- .../windows_docker_image_build_stage.py | 52 ++--- tests/ci/cdk/util/build_spec_loader.py | 6 +- tests/ci/cdk/util/devicefarm_util.py | 2 +- tests/ci/cdk/util/ecr_util.py | 4 +- tests/ci/cdk/util/env_util.py | 1 - tests/ci/cdk/util/iam_policies.py | 106 +++++----- tests/ci/cdk/util/metadata.py | 30 ++- tests/ci/cdk/util/yml_loader.py | 4 +- 31 files changed, 1072 insertions(+), 684 deletions(-) diff --git a/tests/ci/cdk/app.py b/tests/ci/cdk/app.py index 8db6537e67..efd6f52997 100644 --- a/tests/ci/cdk/app.py +++ b/tests/ci/cdk/app.py @@ -6,23 +6,34 @@ from aws_cdk import Environment, App # from cdk.bm_framework_stack import BmFrameworkStack -from cdk.aws_lc_analytics_stack import AwsLcGitHubAnalyticsStack +from cdk.aws_lc_analytics_stack import AwsLcGitHubAnalyticsStack from cdk.aws_lc_android_ci_stack import AwsLcAndroidCIStack from cdk.aws_lc_github_ci_stack import AwsLcGitHubCIStack -from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack +from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack from cdk.aws_lc_ec2_test_framework_ci_stack import AwsLcEC2TestingCIStack from cdk.linux_docker_image_batch_build_stack import LinuxDockerImageBatchBuildStack from pipeline.pipeline_stack import AwsLcCiPipeline from cdk.windows_docker_image_build_stack import WindowsDockerImageBuildStack from cdk.aws_lc_github_ci_x509_stack import AwsLcGitHubX509CIStack from cdk.ecr_stack import EcrStack -from util.metadata import LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO, WINDOWS_X86_ECR_REPO, \ - PIPELINE_ACCOUNT, PIPELINE_REGION, DEPLOY_ACCOUNT, DEPLOY_REGION +from util.metadata import ( + LINUX_X86_ECR_REPO, + LINUX_AARCH_ECR_REPO, + WINDOWS_X86_ECR_REPO, + PIPELINE_ACCOUNT, + PIPELINE_REGION, + DEPLOY_ACCOUNT, + DEPLOY_REGION, +) # Initialize app. app = App() -AwsLcCiPipeline(app, "AwsLcCiPipeline", env=Environment(account=PIPELINE_ACCOUNT, region=PIPELINE_REGION)) +AwsLcCiPipeline( + app, + "AwsLcCiPipeline", + env=Environment(account=PIPELINE_ACCOUNT, region=PIPELINE_REGION), +) if DEPLOY_ACCOUNT is not None and DEPLOY_REGION is not None: # Initialize env. @@ -47,19 +58,27 @@ arm_build_spec_file = "cdk/codebuild/github_ci_linux_arm_omnibus.yaml" AwsLcGitHubCIStack(app, "aws-lc-ci-linux-arm", arm_build_spec_file, env=env) integration_build_spec_file = "cdk/codebuild/github_ci_integration_omnibus.yaml" - AwsLcGitHubCIStack(app, "aws-lc-ci-integration", integration_build_spec_file, env=env) + AwsLcGitHubCIStack( + app, "aws-lc-ci-integration", integration_build_spec_file, env=env + ) win_x86_build_spec_file = "cdk/codebuild/github_ci_windows_x86_omnibus.yaml" AwsLcGitHubCIStack(app, "aws-lc-ci-windows-x86", win_x86_build_spec_file, env=env) fuzz_build_spec_file = "cdk/codebuild/github_ci_fuzzing_omnibus.yaml" AwsLcGitHubFuzzCIStack(app, "aws-lc-ci-fuzzing", fuzz_build_spec_file, env=env) analytics_build_spec_file = "cdk/codebuild/github_ci_analytics_omnibus.yaml" - AwsLcGitHubAnalyticsStack(app, "aws-lc-ci-analytics", analytics_build_spec_file, env=env) + AwsLcGitHubAnalyticsStack( + app, "aws-lc-ci-analytics", analytics_build_spec_file, env=env + ) # bm_framework_build_spec_file = "cdk/codebuild/bm_framework_omnibus.yaml" # BmFrameworkStack(app, "aws-lc-ci-bm-framework", bm_framework_build_spec_file, env=env) ec2_test_framework_build_spec_file = "cdk/codebuild/ec2_test_framework_omnibus.yaml" - AwsLcEC2TestingCIStack(app, "aws-lc-ci-ec2-test-framework", ec2_test_framework_build_spec_file, env=env) + AwsLcEC2TestingCIStack( + app, "aws-lc-ci-ec2-test-framework", ec2_test_framework_build_spec_file, env=env + ) android_build_spec_file = "cdk/codebuild/github_ci_android_omnibus.yaml" - AwsLcAndroidCIStack(app, "aws-lc-ci-devicefarm-android", android_build_spec_file, env=env) + AwsLcAndroidCIStack( + app, "aws-lc-ci-devicefarm-android", android_build_spec_file, env=env + ) AwsLcGitHubX509CIStack(app, "aws-lc-ci-x509", env=env) app.synth() diff --git a/tests/ci/cdk/cdk/aws_lc_analytics_stack.py b/tests/ci/cdk/cdk/aws_lc_analytics_stack.py index 8eb046bf7f..4691835fd5 100644 --- a/tests/ci/cdk/cdk/aws_lc_analytics_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_analytics_stack.py @@ -2,26 +2,40 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_ec2 as ec2, aws_efs as efs, \ - Environment +from aws_cdk import ( + Duration, + Stack, + aws_codebuild as codebuild, + aws_iam as iam, + aws_ec2 as ec2, + aws_efs as efs, + Environment, +) from constructs import Construct from cdk.components import PruneStaleGitHubBuilds from util.iam_policies import code_build_publish_metrics_in_json -from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, \ - STAGING_GITHUB_REPO_NAME +from util.metadata import ( + GITHUB_REPO_OWNER, + GITHUB_REPO_NAME, + PRE_PROD_ACCOUNT, + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME, +) from util.build_spec_loader import BuildSpecLoader class AwsLcGitHubAnalyticsStack(Stack): """Define a stack used to batch execute AWS-LC tests in GitHub.""" - def __init__(self, - scope: Construct, - id: str, - spec_file_path: str, - env: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs) -> None: + def __init__( + self, + scope: Construct, + id: str, + spec_file_path: str, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs + ) -> None: super().__init__(scope, id, env=env, **kwargs) # Define CodeBuild resource. @@ -43,15 +57,20 @@ def __init__(self, # the branch or create a new FIPS branch it should be updated to '(main)|(fips.*)' .and_branch_is("main") ], - webhook_triggers_batch_build=True) + webhook_triggers_batch_build=True, + ) # Define a IAM role for this stack. - metrics_policy = iam.PolicyDocument.from_json(code_build_publish_metrics_in_json(env)) + metrics_policy = iam.PolicyDocument.from_json( + code_build_publish_metrics_in_json(env) + ) inline_policies = {"metric_policy": metrics_policy} - role = iam.Role(scope=self, - id="{}-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=inline_policies) + role = iam.Role( + scope=self, + id="{}-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies=inline_policies, + ) # Define CodeBuild. analytics = codebuild.Project( @@ -61,10 +80,19 @@ def __init__(self, source=git_hub_source, role=role, timeout=Duration.minutes(120), - environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.LARGE, - privileged=True, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path, env)) + environment=codebuild.BuildEnvironment( + compute_type=codebuild.ComputeType.LARGE, + privileged=True, + build_image=codebuild.LinuxBuildImage.STANDARD_4_0, + ), + build_spec=BuildSpecLoader.load(spec_file_path, env), + ) analytics.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=analytics, ec2_permissions=False, env=env) + PruneStaleGitHubBuilds( + scope=self, + id="PruneStaleGitHubBuilds", + project=analytics, + ec2_permissions=False, + env=env, + ) diff --git a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py index 1ed3a424db..c191894ee6 100644 --- a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py @@ -2,13 +2,28 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, Environment +from aws_cdk import ( + Duration, + Environment, + Stack, + aws_codebuild as codebuild, + aws_iam as iam, +) from constructs import Construct from cdk.components import PruneStaleGitHubBuilds -from util.iam_policies import code_build_batch_policy_in_json, device_farm_access_policy_in_json -from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME, GITHUB_PUSH_CI_BRANCH_TARGETS, PRE_PROD_ACCOUNT, \ - STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME +from util.iam_policies import ( + code_build_batch_policy_in_json, + device_farm_access_policy_in_json, +) +from util.metadata import ( + GITHUB_REPO_OWNER, + GITHUB_REPO_NAME, + GITHUB_PUSH_CI_BRANCH_TARGETS, + PRE_PROD_ACCOUNT, + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME, +) from util.build_spec_loader import BuildSpecLoader @@ -18,12 +33,14 @@ class AwsLcAndroidCIStack(Stack): # The Device Farm resource used to in this CI spec, must be manually created. # TODO: Automate Device Farm creation with cdk script. - def __init__(self, - scope: Construct, - id: str, - spec_file_path: str, - env: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs) -> None: + def __init__( + self, + scope: Construct, + id: str, + spec_file_path: str, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs + ) -> None: super().__init__(scope, id, env=env, **kwargs) github_repo_owner = GITHUB_REPO_OWNER @@ -42,11 +59,14 @@ def __init__(self, codebuild.FilterGroup.in_event_of( codebuild.EventAction.PULL_REQUEST_CREATED, codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED), - codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH).and_branch_is( - GITHUB_PUSH_CI_BRANCH_TARGETS), + codebuild.EventAction.PULL_REQUEST_REOPENED, + ), + codebuild.FilterGroup.in_event_of( + codebuild.EventAction.PUSH + ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), ], - webhook_triggers_batch_build=True) + webhook_triggers_batch_build=True, + ) # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( @@ -55,11 +75,16 @@ def __init__(self, device_farm_policy = iam.PolicyDocument.from_json( device_farm_access_policy_in_json(env) ) - inline_policies = {"code_build_batch_policy": code_build_batch_policy, "device_farm_policy": device_farm_policy} - role = iam.Role(scope=self, - id="{}-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=inline_policies) + inline_policies = { + "code_build_batch_policy": code_build_batch_policy, + "device_farm_policy": device_farm_policy, + } + role = iam.Role( + scope=self, + id="{}-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies=inline_policies, + ) # Define CodeBuild. project = codebuild.Project( @@ -69,10 +94,19 @@ def __init__(self, source=git_hub_source, role=role, timeout=Duration.minutes(180), - environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.SMALL, - privileged=False, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path, env)) + environment=codebuild.BuildEnvironment( + compute_type=codebuild.ComputeType.SMALL, + privileged=False, + build_image=codebuild.LinuxBuildImage.STANDARD_4_0, + ), + build_spec=BuildSpecLoader.load(spec_file_path, env), + ) project.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=False, env=env) + PruneStaleGitHubBuilds( + scope=self, + id="PruneStaleGitHubBuilds", + project=project, + ec2_permissions=False, + env=env, + ) diff --git a/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py index 3b65c914d6..5cb35275e3 100644 --- a/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py @@ -7,28 +7,54 @@ import boto3 from botocore.exceptions import ClientError -from aws_cdk import CfnTag, Duration, Stack, Tags, aws_ec2 as ec2, aws_codebuild as codebuild, aws_iam as iam, \ - aws_s3 as s3, aws_logs as logs, Environment +from aws_cdk import ( + CfnTag, + Duration, + Stack, + Tags, + aws_ec2 as ec2, + aws_codebuild as codebuild, + aws_iam as iam, + aws_s3 as s3, + aws_logs as logs, + Environment, +) from constructs import Construct from cdk.components import PruneStaleGitHubBuilds -from util.metadata import GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, \ - LINUX_AARCH_ECR_REPO, \ - LINUX_X86_ECR_REPO, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME -from util.iam_policies import code_build_batch_policy_in_json, ec2_policies_in_json, ssm_policies_in_json, s3_read_write_policy_in_json, ecr_power_user_policy_in_json +from util.metadata import ( + GITHUB_PUSH_CI_BRANCH_TARGETS, + GITHUB_REPO_OWNER, + GITHUB_REPO_NAME, + LINUX_AARCH_ECR_REPO, + LINUX_X86_ECR_REPO, + PRE_PROD_ACCOUNT, + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME, +) +from util.iam_policies import ( + code_build_batch_policy_in_json, + ec2_policies_in_json, + ssm_policies_in_json, + s3_read_write_policy_in_json, + ecr_power_user_policy_in_json, +) from util.build_spec_loader import BuildSpecLoader # detailed documentation can be found here: https://docs.aws.amazon.com/cdk/api/latest/docs/aws-ec2-readme.html + class AwsLcEC2TestingCIStack(Stack): """Define a stack used to create a CodeBuild instance on which to execute the AWS-LC m1 ci ec2 instance""" - def __init__(self, - scope: Construct, - id: str, - spec_file_path: str, - env: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs) -> None: + def __init__( + self, + scope: Construct, + id: str, + spec_file_path: str, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs + ) -> None: super().__init__(scope, id, env=env, **kwargs) github_repo_owner = GITHUB_REPO_OWNER @@ -47,31 +73,55 @@ def __init__(self, codebuild.FilterGroup.in_event_of( codebuild.EventAction.PULL_REQUEST_CREATED, codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED), - codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH).and_branch_is( - GITHUB_PUSH_CI_BRANCH_TARGETS), + codebuild.EventAction.PULL_REQUEST_REOPENED, + ), + codebuild.FilterGroup.in_event_of( + codebuild.EventAction.PUSH + ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), ], - webhook_triggers_batch_build=True) + webhook_triggers_batch_build=True, + ) # S3 bucket for testing internal fixes. - s3_read_write_policy = iam.PolicyDocument.from_json(s3_read_write_policy_in_json("aws-lc-codebuild")) - ecr_power_user_policy = iam.PolicyDocument.from_json(ecr_power_user_policy_in_json([LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO], env)) - ec2_inline_policies = {"s3_read_write_policy": s3_read_write_policy, "ecr_power_user_policy": ecr_power_user_policy} - ec2_role = iam.Role(scope=self, id="{}-ec2-role".format(id), - role_name="{}-ec2-role".format(id), - assumed_by=iam.ServicePrincipal("ec2.amazonaws.com"), - inline_policies=ec2_inline_policies, - managed_policies=[ - iam.ManagedPolicy.from_aws_managed_policy_name("AmazonSSMManagedInstanceCore"), - iam.ManagedPolicy.from_aws_managed_policy_name("CloudWatchAgentServerPolicy") - ]) - iam.CfnInstanceProfile(scope=self, id="{}-ec2-profile".format(id), - roles=[ec2_role.role_name], - instance_profile_name="{}-ec2-profile".format(id)) + s3_read_write_policy = iam.PolicyDocument.from_json( + s3_read_write_policy_in_json("aws-lc-codebuild") + ) + ecr_power_user_policy = iam.PolicyDocument.from_json( + ecr_power_user_policy_in_json( + [LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO], env + ) + ) + ec2_inline_policies = { + "s3_read_write_policy": s3_read_write_policy, + "ecr_power_user_policy": ecr_power_user_policy, + } + ec2_role = iam.Role( + scope=self, + id="{}-ec2-role".format(id), + role_name="{}-ec2-role".format(id), + assumed_by=iam.ServicePrincipal("ec2.amazonaws.com"), + inline_policies=ec2_inline_policies, + managed_policies=[ + iam.ManagedPolicy.from_aws_managed_policy_name( + "AmazonSSMManagedInstanceCore" + ), + iam.ManagedPolicy.from_aws_managed_policy_name( + "CloudWatchAgentServerPolicy" + ), + ], + ) + iam.CfnInstanceProfile( + scope=self, + id="{}-ec2-profile".format(id), + roles=[ec2_role.role_name], + instance_profile_name="{}-ec2-profile".format(id), + ) # create vpc for ec2s vpc = ec2.Vpc(self, id="{}-ec2-vpc".format(id)) - selected_subnets = vpc.select_subnets(subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS) + selected_subnets = vpc.select_subnets( + subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS + ) # create security group with default rules # security_group = ec2.SecurityGroup(self, id="{}-ec2-sg".format(id), @@ -80,19 +130,35 @@ def __init__(self, # security_group_name='codebuild_ec2_sg') # Define a IAM role for this stack. - code_build_batch_policy = iam.PolicyDocument.from_json(code_build_batch_policy_in_json([id], env)) - ec2_policy = iam.PolicyDocument.from_json(ec2_policies_in_json(ec2_role.role_name, vpc.vpc_default_security_group, selected_subnets.subnets[0].subnet_id, vpc.vpc_id, env)) + code_build_batch_policy = iam.PolicyDocument.from_json( + code_build_batch_policy_in_json([id], env) + ) + ec2_policy = iam.PolicyDocument.from_json( + ec2_policies_in_json( + ec2_role.role_name, + vpc.vpc_default_security_group, + selected_subnets.subnets[0].subnet_id, + vpc.vpc_id, + env, + ) + ) ssm_policy = iam.PolicyDocument.from_json(ssm_policies_in_json(env)) - codebuild_inline_policies = {"code_build_batch_policy": code_build_batch_policy, - "ec2_policy": ec2_policy, - "ssm_policy": ssm_policy} - codebuild_role = iam.Role(scope=self, - id="{}-codebuild-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=codebuild_inline_policies, - managed_policies=[ - iam.ManagedPolicy.from_aws_managed_policy_name("CloudWatchAgentServerPolicy") - ]) + codebuild_inline_policies = { + "code_build_batch_policy": code_build_batch_policy, + "ec2_policy": ec2_policy, + "ssm_policy": ssm_policy, + } + codebuild_role = iam.Role( + scope=self, + id="{}-codebuild-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies=codebuild_inline_policies, + managed_policies=[ + iam.ManagedPolicy.from_aws_managed_policy_name( + "CloudWatchAgentServerPolicy" + ) + ], + ) # Define CodeBuild. project = codebuild.Project( @@ -102,29 +168,37 @@ def __init__(self, source=git_hub_source, role=codebuild_role, timeout=Duration.minutes(120), - environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.SMALL, - privileged=False, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0), + environment=codebuild.BuildEnvironment( + compute_type=codebuild.ComputeType.SMALL, + privileged=False, + build_image=codebuild.LinuxBuildImage.STANDARD_4_0, + ), build_spec=BuildSpecLoader.load(spec_file_path, env), - environment_variables= { + environment_variables={ "EC2_SECURITY_GROUP_ID": codebuild.BuildEnvironmentVariable( value=vpc.vpc_default_security_group ), "EC2_SUBNET_ID": codebuild.BuildEnvironmentVariable( value=selected_subnets.subnets[0].subnet_id ), - "EC2_VPC_ID": codebuild.BuildEnvironmentVariable( - value=vpc.vpc_id - ), - }) + "EC2_VPC_ID": codebuild.BuildEnvironmentVariable(value=vpc.vpc_id), + }, + ) project.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=True, env=env) + PruneStaleGitHubBuilds( + scope=self, + id="PruneStaleGitHubBuilds", + project=project, + ec2_permissions=True, + env=env, + ) # Define logs for SSM. log_group_name = "{}-cw-logs".format(id) - log_group = logs.CfnLogGroup(self, log_group_name, + log_group = logs.CfnLogGroup( + self, + log_group_name, log_group_name=log_group_name, retention_in_days=365, ) - diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py index bd502e3019..7492064e1d 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py @@ -2,27 +2,46 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_s3_assets, aws_logs as logs, \ - Environment +from aws_cdk import ( + Duration, + Stack, + aws_codebuild as codebuild, + aws_iam as iam, + aws_s3_assets, + aws_logs as logs, + Environment, +) from constructs import Construct from cdk.components import PruneStaleGitHubBuilds -from util.iam_policies import code_build_batch_policy_in_json, code_build_publish_metrics_in_json, \ - code_build_cloudwatch_logs_policy_in_json -from util.metadata import GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, \ - PIPELINE_ACCOUNT, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME +from util.iam_policies import ( + code_build_batch_policy_in_json, + code_build_publish_metrics_in_json, + code_build_cloudwatch_logs_policy_in_json, +) +from util.metadata import ( + GITHUB_PUSH_CI_BRANCH_TARGETS, + GITHUB_REPO_OWNER, + GITHUB_REPO_NAME, + PIPELINE_ACCOUNT, + PRE_PROD_ACCOUNT, + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME, +) from util.build_spec_loader import BuildSpecLoader class AwsLcGitHubCIStack(Stack): """Define a stack used to batch execute AWS-LC tests in GitHub.""" - def __init__(self, - scope: Construct, - id: str, - spec_file_path: str, - env: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs) -> None: + def __init__( + self, + scope: Construct, + id: str, + spec_file_path: str, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs + ) -> None: super().__init__(scope, id, env=env, **kwargs) github_repo_owner = GITHUB_REPO_OWNER @@ -41,42 +60,50 @@ def __init__(self, codebuild.FilterGroup.in_event_of( codebuild.EventAction.PULL_REQUEST_CREATED, codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED), - codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH).and_branch_is( - GITHUB_PUSH_CI_BRANCH_TARGETS), + codebuild.EventAction.PULL_REQUEST_REOPENED, + ), + codebuild.FilterGroup.in_event_of( + codebuild.EventAction.PUSH + ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), ], - webhook_triggers_batch_build=True) + webhook_triggers_batch_build=True, + ) # Define a IAM role for accessing build resources log_group = logs.LogGroup(self, id="{}-public-logs".format(id)) code_build_cloudwatch_logs_policy = iam.PolicyDocument.from_json( code_build_cloudwatch_logs_policy_in_json([log_group]) ) - resource_access_role = iam.Role(scope=self, - id="{}-resource-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies={ - "code_build_cloudwatch_logs_policy": code_build_cloudwatch_logs_policy, - }) + resource_access_role = iam.Role( + scope=self, + id="{}-resource-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies={ + "code_build_cloudwatch_logs_policy": code_build_cloudwatch_logs_policy, + }, + ) # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( code_build_batch_policy_in_json([id], env) ) - metrics_policy = iam.PolicyDocument.from_json(code_build_publish_metrics_in_json(env)) + metrics_policy = iam.PolicyDocument.from_json( + code_build_publish_metrics_in_json(env) + ) - inline_policies = {"code_build_batch_policy": code_build_batch_policy, - "metrics_policy": metrics_policy, - } - role = iam.Role(scope=self, - id="{}-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=inline_policies) + inline_policies = { + "code_build_batch_policy": code_build_batch_policy, + "metrics_policy": metrics_policy, + } + role = iam.Role( + scope=self, + id="{}-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies=inline_policies, + ) logging_options = codebuild.LoggingOptions( - cloud_watch=codebuild.CloudWatchLoggingOptions( - log_group=log_group - ) + cloud_watch=codebuild.CloudWatchLoggingOptions(log_group=log_group) ) # Define CodeBuild. @@ -88,13 +115,24 @@ def __init__(self, role=role, timeout=Duration.minutes(180), logging=logging_options, - environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.SMALL, - privileged=False, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path, env=env)) + environment=codebuild.BuildEnvironment( + compute_type=codebuild.ComputeType.SMALL, + privileged=False, + build_image=codebuild.LinuxBuildImage.STANDARD_4_0, + ), + build_spec=BuildSpecLoader.load(spec_file_path, env=env), + ) cfn_project = project.node.default_child cfn_project.add_property_override("Visibility", "PUBLIC_READ") - cfn_project.add_property_override("ResourceAccessRole", resource_access_role.role_arn) + cfn_project.add_property_override( + "ResourceAccessRole", resource_access_role.role_arn + ) project.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=False, env=env) + PruneStaleGitHubBuilds( + scope=self, + id="PruneStaleGitHubBuilds", + project=project, + ec2_permissions=False, + env=env, + ) diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py index 44aeae454c..425ea797be 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py @@ -1,14 +1,24 @@ import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_s3 as s3, Environment +from aws_cdk import ( + Duration, + Stack, + aws_codebuild as codebuild, + aws_s3 as s3, + Environment, +) from constructs import Construct from util.build_spec_loader import BuildSpecLoader from util.metadata import ( GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_NAME, - GITHUB_REPO_OWNER, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME, + GITHUB_REPO_OWNER, + PRE_PROD_ACCOUNT, + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME, ) + class AwsLcGitHubX509CIStack(Stack): def __init__( self, @@ -88,8 +98,7 @@ def __init__( project_name=id, source=git_hub_source, build_spec=BuildSpecLoader.load( - "cdk/codebuild/github_ci_x509_omnibus.yaml", - env + "cdk/codebuild/github_ci_x509_omnibus.yaml", env ), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_6_0, diff --git a/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py index c0f9bc32b2..08d471d043 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py @@ -2,28 +2,45 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC import typing -from aws_cdk import Duration, Size, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_ec2 as ec2, aws_efs as efs, \ - Environment +from aws_cdk import ( + Duration, + Size, + Stack, + aws_codebuild as codebuild, + aws_iam as iam, + aws_ec2 as ec2, + aws_efs as efs, + Environment, +) from constructs import Construct from cdk.components import PruneStaleGitHubBuilds -from util.ecr_util import ecr_arn -from util.iam_policies import code_build_batch_policy_in_json, \ - code_build_publish_metrics_in_json -from util.metadata import GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, \ - PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME +from util.iam_policies import ( + code_build_batch_policy_in_json, + code_build_publish_metrics_in_json, +) +from util.metadata import ( + GITHUB_PUSH_CI_BRANCH_TARGETS, + GITHUB_REPO_OWNER, + GITHUB_REPO_NAME, + PRE_PROD_ACCOUNT, + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME, +) from util.build_spec_loader import BuildSpecLoader class AwsLcGitHubFuzzCIStack(Stack): """Define a stack used to batch execute AWS-LC tests in GitHub.""" - def __init__(self, - scope: Construct, - id: str, - spec_file_path: str, - env: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs) -> None: + def __init__( + self, + scope: Construct, + id: str, + spec_file_path: str, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs + ) -> None: super().__init__(scope, id, env=env, **kwargs) github_repo_owner = GITHUB_REPO_OWNER @@ -42,27 +59,40 @@ def __init__(self, codebuild.FilterGroup.in_event_of( codebuild.EventAction.PULL_REQUEST_CREATED, codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED), - codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH).and_branch_is( - GITHUB_PUSH_CI_BRANCH_TARGETS), + codebuild.EventAction.PULL_REQUEST_REOPENED, + ), + codebuild.FilterGroup.in_event_of( + codebuild.EventAction.PUSH + ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), ], - webhook_triggers_batch_build=True) + webhook_triggers_batch_build=True, + ) # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( code_build_batch_policy_in_json([id], env) ) - fuzz_policy = iam.PolicyDocument.from_json(code_build_publish_metrics_in_json(env)) - inline_policies = {"code_build_batch_policy": code_build_batch_policy, - "fuzz_policy": fuzz_policy} - role = iam.Role(scope=self, - id="{}-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=inline_policies) + fuzz_policy = iam.PolicyDocument.from_json( + code_build_publish_metrics_in_json(env) + ) + inline_policies = { + "code_build_batch_policy": code_build_batch_policy, + "fuzz_policy": fuzz_policy, + } + role = iam.Role( + scope=self, + id="{}-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies=inline_policies, + ) # Create the VPC for EFS and CodeBuild - public_subnet = ec2.SubnetConfiguration(name="PublicFuzzingSubnet", subnet_type=ec2.SubnetType.PUBLIC) - private_subnet = ec2.SubnetConfiguration(name="PrivateFuzzingSubnet", subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS) + public_subnet = ec2.SubnetConfiguration( + name="PublicFuzzingSubnet", subnet_type=ec2.SubnetType.PUBLIC + ) + private_subnet = ec2.SubnetConfiguration( + name="PrivateFuzzingSubnet", subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS + ) # Create a VPC with a single public and private subnet in a single AZ. This is to avoid the elastic IP limit # being used up by a bunch of idle NAT gateways @@ -70,21 +100,21 @@ def __init__(self, scope=self, id="{}-FuzzingVPC".format(id), subnet_configuration=[public_subnet, private_subnet], - max_azs=1 + max_azs=1, ) build_security_group = ec2.SecurityGroup( - scope=self, - id="{}-FuzzingSecurityGroup".format(id), - vpc=fuzz_vpc + scope=self, id="{}-FuzzingSecurityGroup".format(id), vpc=fuzz_vpc ) build_security_group.add_ingress_rule( peer=build_security_group, connection=ec2.Port.all_traffic(), - description="Allow all traffic inside security group" + description="Allow all traffic inside security group", ) - efs_subnet_selection = ec2.SubnetSelection(subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS) + efs_subnet_selection = ec2.SubnetSelection( + subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS + ) # Create the EFS to store the corpus and logs. EFS allows new filesystems to burst to 100 MB/s for the first 2 # TB of data read/written, after that the rate is limited based on the size of the filesystem. As of late @@ -116,12 +146,15 @@ def __init__(self, source=git_hub_source, role=role, timeout=Duration.minutes(120), - environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.LARGE, - privileged=True, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0), + environment=codebuild.BuildEnvironment( + compute_type=codebuild.ComputeType.LARGE, + privileged=True, + build_image=codebuild.LinuxBuildImage.STANDARD_4_0, + ), build_spec=BuildSpecLoader.load(spec_file_path, env), vpc=fuzz_vpc, - security_groups=[build_security_group]) + security_groups=[build_security_group], + ) fuzz_codebuild.enable_batch_builds() # CDK raw overrides: https://docs.aws.amazon.com/cdk/latest/guide/cfn_layer.html#cfn_layer_raw @@ -132,11 +165,23 @@ def __init__(self, # # TODO: add this to the CDK project above when it supports EfsFileSystemLocation cfn_codebuild = fuzz_codebuild.node.default_child - cfn_codebuild.add_override("Properties.FileSystemLocations", [{ - "Identifier": "fuzzing_root", - "Location": "%s.efs.%s.amazonaws.com:/" % (fuzz_filesystem.file_system_id, env.region), - "MountPoint": "/efs_fuzzing_root", - "Type": "EFS" - }]) - - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=fuzz_codebuild, ec2_permissions=False, env=env) + cfn_codebuild.add_override( + "Properties.FileSystemLocations", + [ + { + "Identifier": "fuzzing_root", + "Location": "%s.efs.%s.amazonaws.com:/" + % (fuzz_filesystem.file_system_id, env.region), + "MountPoint": "/efs_fuzzing_root", + "Type": "EFS", + } + ], + ) + + PruneStaleGitHubBuilds( + scope=self, + id="PruneStaleGitHubBuilds", + project=fuzz_codebuild, + ec2_permissions=False, + env=env, + ) diff --git a/tests/ci/cdk/cdk/bm_framework_stack.py b/tests/ci/cdk/cdk/bm_framework_stack.py index 1b59ac159a..a83c35de9e 100644 --- a/tests/ci/cdk/cdk/bm_framework_stack.py +++ b/tests/ci/cdk/cdk/bm_framework_stack.py @@ -5,25 +5,35 @@ import boto3 from botocore.exceptions import ClientError -from aws_cdk import Duration, Stack, aws_ec2 as ec2, aws_codebuild as codebuild, aws_iam as iam, aws_logs as logs +from aws_cdk import ( + Duration, + Stack, + aws_ec2 as ec2, + aws_codebuild as codebuild, + aws_iam as iam, + aws_logs as logs, +) from constructs import Construct from cdk.components import PruneStaleGitHubBuilds from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME -from util.iam_policies import code_build_batch_policy_in_json, ec2_bm_framework_policies_in_json, \ - ssm_bm_framework_policies_in_json, ecr_power_user_policy_in_json +from util.iam_policies import ( + code_build_batch_policy_in_json, + ec2_bm_framework_policies_in_json, + ssm_bm_framework_policies_in_json, + ecr_power_user_policy_in_json, +) from util.build_spec_loader import BuildSpecLoader # detailed documentation can be found here: https://docs.aws.amazon.com/cdk/api/latest/docs/aws-ec2-readme.html + class BmFrameworkStack(Stack): """Define a stack used to create a CodeBuild instance on which to execute the AWS-LC benchmarking framework""" - def __init__(self, - scope: Construct, - id: str, - spec_file_path: str, - **kwargs) -> None: + def __init__( + self, scope: Construct, id: str, spec_file_path: str, **kwargs + ) -> None: super().__init__(scope, id, **kwargs) # Define some variables that will be commonly used @@ -38,24 +48,38 @@ def __init__(self, codebuild.FilterGroup.in_event_of( codebuild.EventAction.PULL_REQUEST_CREATED, codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED) + codebuild.EventAction.PULL_REQUEST_REOPENED, + ) ], - webhook_triggers_batch_build=True) + webhook_triggers_batch_build=True, + ) # Define a IAM role for this stack. - code_build_batch_policy = iam.PolicyDocument.from_json(code_build_batch_policy_in_json([id])) - ec2_bm_framework_policy = iam.PolicyDocument.from_json(ec2_bm_framework_policies_in_json()) - ssm_bm_framework_policy = iam.PolicyDocument.from_json(ssm_bm_framework_policies_in_json()) - codebuild_inline_policies = {"code_build_batch_policy": code_build_batch_policy, - "ec2_bm_framework_policy": ec2_bm_framework_policy, - "ssm_bm_framework_policy": ssm_bm_framework_policy} - codebuild_role = iam.Role(scope=self, - id="{}-codebuild-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=codebuild_inline_policies, - managed_policies=[ - iam.ManagedPolicy.from_aws_managed_policy_name("CloudWatchAgentServerPolicy") - ]) + code_build_batch_policy = iam.PolicyDocument.from_json( + code_build_batch_policy_in_json([id]) + ) + ec2_bm_framework_policy = iam.PolicyDocument.from_json( + ec2_bm_framework_policies_in_json() + ) + ssm_bm_framework_policy = iam.PolicyDocument.from_json( + ssm_bm_framework_policies_in_json() + ) + codebuild_inline_policies = { + "code_build_batch_policy": code_build_batch_policy, + "ec2_bm_framework_policy": ec2_bm_framework_policy, + "ssm_bm_framework_policy": ssm_bm_framework_policy, + } + codebuild_role = iam.Role( + scope=self, + id="{}-codebuild-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies=codebuild_inline_policies, + managed_policies=[ + iam.ManagedPolicy.from_aws_managed_policy_name( + "CloudWatchAgentServerPolicy" + ) + ], + ) # Define CodeBuild. project = codebuild.Project( @@ -65,19 +89,26 @@ def __init__(self, source=git_hub_source, role=codebuild_role, timeout=Duration.minutes(120), - environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.SMALL, - privileged=False, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path)) + environment=codebuild.BuildEnvironment( + compute_type=codebuild.ComputeType.SMALL, + privileged=False, + build_image=codebuild.LinuxBuildImage.STANDARD_4_0, + ), + build_spec=BuildSpecLoader.load(spec_file_path), + ) project.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=False) + PruneStaleGitHubBuilds( + scope=self, + id="PruneStaleGitHubBuilds", + project=project, + ec2_permissions=False, + ) # use boto3 to determine if a cloudwatch logs group with the name we want exists, and if it doesn't, create it - logs_client = boto3.client('logs', region_name=AWS_REGION) + logs_client = boto3.client("logs", region_name=AWS_REGION) try: logs_client.describe_log_groups(logGroupNamePrefix=CLOUDWATCH_LOGS) except ClientError: # define CloudWatch Logs groups - logs.LogGroup(self, "{}-cw-logs".format(id), - log_group_name=CLOUDWATCH_LOGS) + logs.LogGroup(self, "{}-cw-logs".format(id), log_group_name=CLOUDWATCH_LOGS) diff --git a/tests/ci/cdk/cdk/components.py b/tests/ci/cdk/cdk/components.py index a1d8a6a81b..2788e75f4b 100644 --- a/tests/ci/cdk/cdk/components.py +++ b/tests/ci/cdk/cdk/components.py @@ -1,9 +1,17 @@ import pathlib import typing -from aws_cdk import aws_codebuild as codebuild, aws_lambda as lambda_, aws_ecr_assets as ecr_assets, \ - aws_secretsmanager as sm, \ - aws_events as events, aws_events_targets as events_targets, aws_iam as iam, Duration, Environment +from aws_cdk import ( + aws_codebuild as codebuild, + aws_lambda as lambda_, + aws_ecr_assets as ecr_assets, + aws_secretsmanager as sm, + aws_events as events, + aws_events_targets as events_targets, + aws_iam as iam, + Duration, + Environment, +) from constructs import Construct from util.metadata import GITHUB_REPO_OWNER, GITHUB_TOKEN_SECRET_NAME @@ -11,77 +19,97 @@ class PruneStaleGitHubBuilds(Construct): def __init__( - self, - scope: Construct, - id: str, - *, - project: codebuild.IProject, - env: typing.Union[Environment, typing.Dict[str, typing.Any]], - ec2_permissions: bool + self, + scope: Construct, + id: str, + *, + project: codebuild.IProject, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + ec2_permissions: bool ) -> None: super().__init__(scope, id) - github_token_secret = sm.Secret.from_secret_name_v2(scope=self, - id="{}-GitHubToken".format(id), - secret_name=GITHUB_TOKEN_SECRET_NAME) + github_token_secret = sm.Secret.from_secret_name_v2( + scope=self, + id="{}-GitHubToken".format(id), + secret_name=GITHUB_TOKEN_SECRET_NAME, + ) - lambda_function = lambda_.Function(scope=self, - id="LambdaFunction", - code=lambda_.Code.from_asset_image( - directory=str(pathlib.Path().joinpath("..", "lambda")), - target="purge-stale-builds", - platform=ecr_assets.Platform.LINUX_AMD64 - ), - handler=lambda_.Handler.FROM_IMAGE, - runtime=lambda_.Runtime.FROM_IMAGE, - environment={ - "CODEBUILD_PROJECT_NAME": project.project_name, - "GITHUB_REPO_OWNER": GITHUB_REPO_OWNER, - "GITHUB_TOKEN_SECRET_NAME": github_token_secret.secret_name, - "RUST_LOG": "info", - }) + lambda_function = lambda_.Function( + scope=self, + id="LambdaFunction", + code=lambda_.Code.from_asset_image( + directory=str(pathlib.Path().joinpath("..", "lambda")), + target="purge-stale-builds", + platform=ecr_assets.Platform.LINUX_AMD64, + ), + handler=lambda_.Handler.FROM_IMAGE, + runtime=lambda_.Runtime.FROM_IMAGE, + environment={ + "CODEBUILD_PROJECT_NAME": project.project_name, + "GITHUB_REPO_OWNER": GITHUB_REPO_OWNER, + "GITHUB_TOKEN_SECRET_NAME": github_token_secret.secret_name, + "RUST_LOG": "info", + }, + ) github_token_secret.grant_read(lambda_function) lambda_function.add_to_role_policy( - iam.PolicyStatement(effect=iam.Effect.ALLOW, - actions=[ - "codebuild:BatchGetBuildBatches", - "codebuild:ListBuildBatchesForProject", - "codebuild:StopBuildBatch" - ], - resources=[project.project_arn])) + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "codebuild:BatchGetBuildBatches", + "codebuild:ListBuildBatchesForProject", + "codebuild:StopBuildBatch", + ], + resources=[project.project_arn], + ) + ) if ec2_permissions: lambda_function.add_to_role_policy( - iam.PolicyStatement(effect=iam.Effect.ALLOW, - actions=[ - "ec2:TerminateInstances", - ], - resources=["arn:aws:ec2:{}:{}:instance/*".format(env.region, env.account)], - conditions={ - "StringEquals": { - "ec2:ResourceTag/ec2-framework-host": "ec2-framework-host" - } - })) + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "ec2:TerminateInstances", + ], + resources=[ + "arn:aws:ec2:{}:{}:instance/*".format(env.region, env.account) + ], + conditions={ + "StringEquals": { + "ec2:ResourceTag/ec2-framework-host": "ec2-framework-host" + } + }, + ) + ) # ec2:Describe* API actions do not support resource-level permissions. lambda_function.add_to_role_policy( - iam.PolicyStatement(effect=iam.Effect.ALLOW, - actions=[ - "ec2:DescribeInstances", - ], - resources=["*"])) + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "ec2:DescribeInstances", + ], + resources=["*"], + ) + ) lambda_function.add_to_role_policy( - iam.PolicyStatement(effect=iam.Effect.ALLOW, - actions=[ - "ssm:ListDocuments", - "ssm:DeleteDocument", - ], - resources=["arn:aws:ssm:{}:{}:*".format(env.region, env.account)])) + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "ssm:ListDocuments", + "ssm:DeleteDocument", + ], + resources=["arn:aws:ssm:{}:{}:*".format(env.region, env.account)], + ) + ) - - events.Rule(scope=self, id="PurgeEventRule", - description="Purge stale GitHub codebuild jobs and ec2 instances (once per minute)", - enabled=True, - schedule=events.Schedule.rate(Duration.minutes(1)), - targets=[events_targets.LambdaFunction(handler=lambda_function)]) + events.Rule( + scope=self, + id="PurgeEventRule", + description="Purge stale GitHub codebuild jobs and ec2 instances (once per minute)", + enabled=True, + schedule=events.Schedule.rate(Duration.minutes(1)), + targets=[events_targets.LambdaFunction(handler=lambda_function)], + ) diff --git a/tests/ci/cdk/cdk/ecr_stack.py b/tests/ci/cdk/cdk/ecr_stack.py index e6afa5cb51..2db88d919f 100644 --- a/tests/ci/cdk/cdk/ecr_stack.py +++ b/tests/ci/cdk/cdk/ecr_stack.py @@ -22,5 +22,5 @@ def __init__(self, scope: Construct, id: str, repo_name: str, **kwargs) -> None: repo.add_lifecycle_rule( description="Remove untagged images after 1 day", tag_status=ecr.TagStatus.UNTAGGED, - max_image_age=Duration.days(1) + max_image_age=Duration.days(1), ) diff --git a/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py b/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py index 631fed814c..08e694ca04 100644 --- a/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py +++ b/tests/ci/cdk/cdk/linux_docker_image_batch_build_stack.py @@ -2,7 +2,14 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, aws_ec2 as ec2, Environment +from aws_cdk import ( + Duration, + Stack, + aws_codebuild as codebuild, + aws_iam as iam, + aws_ec2 as ec2, + Environment, +) from constructs import Construct from util.metadata import ( @@ -11,11 +18,11 @@ GITHUB_SOURCE_VERSION, LINUX_AARCH_ECR_REPO, LINUX_X86_ECR_REPO, - PRE_PROD_ACCOUNT, - STAGING_GITHUB_REPO_OWNER, - STAGING_GITHUB_REPO_NAME ) -from util.iam_policies import code_build_batch_policy_in_json, ecr_power_user_policy_in_json +from util.iam_policies import ( + code_build_batch_policy_in_json, + ecr_power_user_policy_in_json, +) from util.yml_loader import YmlLoader @@ -23,11 +30,12 @@ class LinuxDockerImageBatchBuildStack(Stack): """Define a temporary stack used to batch build Linux Docker images.""" def __init__( - self, - scope: Construct, - id: str, - env: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs) -> None: + self, + scope: Construct, + id: str, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs + ) -> None: super().__init__(scope, id, env=env, **kwargs) github_repo_owner = GITHUB_REPO_OWNER @@ -39,28 +47,45 @@ def __init__( repo=github_repo_name, webhook=False, branch_or_ref=GITHUB_SOURCE_VERSION, - clone_depth=1) + clone_depth=1, + ) # Define a role. - code_build_batch_policy = iam.PolicyDocument.from_json(code_build_batch_policy_in_json([id], env)) + code_build_batch_policy = iam.PolicyDocument.from_json( + code_build_batch_policy_in_json([id], env) + ) ecr_repo_names = [LINUX_AARCH_ECR_REPO, LINUX_X86_ECR_REPO] - ecr_power_user_policy = iam.PolicyDocument.from_json(ecr_power_user_policy_in_json(ecr_repo_names, env)) - inline_policies = {"code_build_batch_policy": code_build_batch_policy, - "ecr_power_user_policy": ecr_power_user_policy} - role = iam.Role(scope=self, - id="{}-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=inline_policies) + ecr_power_user_policy = iam.PolicyDocument.from_json( + ecr_power_user_policy_in_json(ecr_repo_names, env) + ) + inline_policies = { + "code_build_batch_policy": code_build_batch_policy, + "ecr_power_user_policy": ecr_power_user_policy, + } + role = iam.Role( + scope=self, + id="{}-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies=inline_policies, + ) # Create build spec. - build_spec_content = YmlLoader.load("./cdk/codebuild/linux_img_build_omnibus.yaml") + build_spec_content = YmlLoader.load( + "./cdk/codebuild/linux_img_build_omnibus.yaml" + ) # Define environment variables. environment_variables = { "AWS_ACCOUNT_ID": codebuild.BuildEnvironmentVariable(value=env.account), - "AWS_ECR_REPO_X86": codebuild.BuildEnvironmentVariable(value=LINUX_X86_ECR_REPO), - "AWS_ECR_REPO_AARCH": codebuild.BuildEnvironmentVariable(value=LINUX_AARCH_ECR_REPO), - "GITHUB_REPO_OWNER": codebuild.BuildEnvironmentVariable(value=GITHUB_REPO_OWNER), + "AWS_ECR_REPO_X86": codebuild.BuildEnvironmentVariable( + value=LINUX_X86_ECR_REPO + ), + "AWS_ECR_REPO_AARCH": codebuild.BuildEnvironmentVariable( + value=LINUX_AARCH_ECR_REPO + ), + "GITHUB_REPO_OWNER": codebuild.BuildEnvironmentVariable( + value=GITHUB_REPO_OWNER + ), } # Define VPC @@ -76,9 +101,11 @@ def __init__( environment=codebuild.BuildEnvironment( compute_type=codebuild.ComputeType.SMALL, privileged=False, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0), + build_image=codebuild.LinuxBuildImage.STANDARD_4_0, + ), environment_variables=environment_variables, role=role, timeout=Duration.minutes(180), - build_spec=codebuild.BuildSpec.from_object(build_spec_content)) + build_spec=codebuild.BuildSpec.from_object(build_spec_content), + ) project.enable_batch_builds() diff --git a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py index 558c2aeeef..ef6b48372b 100644 --- a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py +++ b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py @@ -5,10 +5,12 @@ from aws_cdk import ( Stack, Tags, + PhysicalName, + Environment, aws_ec2 as ec2, aws_s3 as s3, aws_iam as iam, - aws_ssm as ssm, PhysicalName, CfnOutput, CfnParameter, Environment + aws_ssm as ssm, ) from constructs import Construct @@ -31,17 +33,13 @@ class WindowsDockerImageBuildStack(Stack): """Define a temporary stack used to build Windows Docker images.""" def __init__( - self, - scope: Construct, - id: str, - env: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs) -> None: - super().__init__( - scope, - id, - env=env, - **kwargs - ) + self, + scope: Construct, + id: str, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs + ) -> None: + super().__init__(scope, id, env=env, **kwargs) # Define SSM command document. # ecr_uri = ecr_windows_x86.ecr_repo.repository_uri @@ -132,7 +130,7 @@ def __init__( vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PUBLIC), machine_image=machine_image, user_data=setup_user_data, - instance_name="{}-instance".format(id) + instance_name="{}-instance".format(id), ) Tags.of(instance).add(WIN_EC2_TAG_KEY, WIN_EC2_TAG_VALUE) diff --git a/tests/ci/cdk/pipeline/ci_stage.py b/tests/ci/cdk/pipeline/ci_stage.py index c249cbaaf5..1a8aba875f 100644 --- a/tests/ci/cdk/pipeline/ci_stage.py +++ b/tests/ci/cdk/pipeline/ci_stage.py @@ -14,14 +14,15 @@ from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack from pipeline.codebuild_batch_step import CodeBuildBatchStep + class CiStage(Stage): def __init__( - self, - scope: Construct, - id: str, - pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs + self, + scope: Construct, + id: str, + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs, ): super().__init__( scope, @@ -41,10 +42,12 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-linux-x86", ) - self.build_options.append(BatchBuildOptions( - project="aws-lc-ci-linux-x86", - ignore_failure=False, - )) + self.build_options.append( + BatchBuildOptions( + project="aws-lc-ci-linux-x86", + ignore_failure=False, + ) + ) arm_stack_name = "aws-lc-ci-linux-arm" arm_build_spec_file = "cdk/codebuild/github_ci_linux_arm_omnibus.yaml" @@ -55,10 +58,12 @@ def __init__( env=deploy_environment, stack_name=arm_stack_name, ) - self.build_options.append(BatchBuildOptions( - project="aws-lc-ci-linux-arm", - ignore_failure=False, - )) + self.build_options.append( + BatchBuildOptions( + project="aws-lc-ci-linux-arm", + ignore_failure=False, + ) + ) integration_build_spec_file = "cdk/codebuild/github_ci_integration_omnibus.yaml" self.ci_integration_stack = AwsLcGitHubCIStack( @@ -68,10 +73,12 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-integration", ) - self.build_options.append(BatchBuildOptions( - project="aws-lc-ci-integration", - ignore_failure=True, - )) + self.build_options.append( + BatchBuildOptions( + project="aws-lc-ci-integration", + ignore_failure=True, + ) + ) fuzz_build_spec_file = "cdk/codebuild/github_ci_fuzzing_omnibus.yaml" self.ci_fuzzing_stack = AwsLcGitHubFuzzCIStack( @@ -81,10 +88,12 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-fuzzing", ) - self.build_options.append(BatchBuildOptions( - project="aws-lc-ci-fuzzing", - ignore_failure=False, - )) + self.build_options.append( + BatchBuildOptions( + project="aws-lc-ci-fuzzing", + ignore_failure=False, + ) + ) analytics_build_spec_file = "cdk/codebuild/github_ci_analytics_omnibus.yaml" self.ci_analytics_stack = AwsLcGitHubAnalyticsStack( @@ -94,10 +103,12 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-analytics", ) - self.build_options.append(BatchBuildOptions( - project="aws-lc-ci-analytics", - ignore_failure=True, - )) + self.build_options.append( + BatchBuildOptions( + project="aws-lc-ci-analytics", + ignore_failure=True, + ) + ) # bm_framework_build_spec_file = "cdk/codebuild/bm_framework_omnibus.yaml" # BmFrameworkStack(app, "aws-lc-ci-bm-framework", bm_framework_build_spec_file, env=env) @@ -111,10 +122,12 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-ec2-test-framework", ) - self.build_options.append(BatchBuildOptions( - project="aws-lc-ci-ec2-test-framework", - ignore_failure=True, - )) + self.build_options.append( + BatchBuildOptions( + project="aws-lc-ci-ec2-test-framework", + ignore_failure=True, + ) + ) android_build_spec_file = "cdk/codebuild/github_ci_android_omnibus.yaml" self.ci_android_stack = AwsLcAndroidCIStack( @@ -124,10 +137,12 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-devicefarm-android", ) - self.build_options.append(BatchBuildOptions( - project="aws-lc-ci-devicefarm-android", - ignore_failure=False, - )) + self.build_options.append( + BatchBuildOptions( + project="aws-lc-ci-devicefarm-android", + ignore_failure=False, + ) + ) win_x86_build_spec_file = "cdk/codebuild/github_ci_windows_x86_omnibus.yaml" self.ci_windows_x86_stack = AwsLcGitHubCIStack( @@ -137,22 +152,24 @@ def __init__( env=deploy_environment, stack_name="aws-lc-ci-windows-x86", ) - self.build_options.append(BatchBuildOptions( - project="aws-lc-ci-windows-x86", - ignore_failure=False, - )) + self.build_options.append( + BatchBuildOptions( + project="aws-lc-ci-windows-x86", + ignore_failure=False, + ) + ) @property def stacks(self) -> typing.List[Stack]: return [child for child in self.node.children if isinstance(child, Stack)] def add_stage_to_pipeline( - self, - pipeline: pipelines.CodePipeline, - input: pipelines.FileSet, - role: iam.Role, - max_retry: typing.Optional[int] = 2, - env: typing.Optional[typing.Mapping[str, str]] = None, + self, + pipeline: pipelines.CodePipeline, + input: pipelines.FileSet, + role: iam.Role, + max_retry: typing.Optional[int] = 2, + env: typing.Optional[typing.Mapping[str, str]] = None, ): stack_names = [stack.stack_name for stack in self.stacks] @@ -164,15 +181,15 @@ def add_stage_to_pipeline( commands=[ "cd tests/ci/cdk/pipeline/scripts", "chmod +x check_trigger_conditions.sh", - "trigger_conditions=$(./check_trigger_conditions.sh --build-type ci --stacks \"${STACKS}\")", - "export NEED_REBUILD=$(echo $trigger_conditions | sed -n 's/.*\(NEED_REBUILD=[0-9]*\).*/\\1/p' | cut -d'=' -f2 )" + 'trigger_conditions=$(./check_trigger_conditions.sh --build-type ci --stacks "${STACKS}")', + "export NEED_REBUILD=$(echo $trigger_conditions | sed -n 's/.*\(NEED_REBUILD=[0-9]*\).*/\\1/p' | cut -d'=' -f2 )", ], env={ **env, "STACKS": " ".join(stack_names), }, role=role, - timeout=Duration.minutes(60) + timeout=Duration.minutes(60), ) batch_build_jobs = { @@ -186,7 +203,7 @@ def add_stage_to_pipeline( "TIMEOUT": str(max_retry * options.timeout), **options.env, } - } + }, } for options in self.build_options ] @@ -199,7 +216,7 @@ def add_stage_to_pipeline( commands=[ "cd tests/ci/cdk/pipeline/scripts", "chmod +x build_target.sh", - "./build_target.sh --build-type ci --project ${PROJECT} --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}" + "./build_target.sh --build-type ci --project ${PROJECT} --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}", ], role=role, timeout=300, @@ -213,25 +230,20 @@ def add_stage_to_pipeline( ci_run_step.add_step_dependency(prebuild_check_step) - pipeline.add_stage( - self, - post=[ - prebuild_check_step, - ci_run_step - ] - ) + pipeline.add_stage(self, post=[prebuild_check_step, ci_run_step]) + class BatchBuildOptions: def __init__( - self, - project: str, - identifier: str = None, - ignore_failure: bool = False, - timeout: int = 120, - env: typing.Optional[typing.Mapping[str, str]] = None + self, + project: str, + identifier: str = None, + ignore_failure: bool = False, + timeout: int = 120, + env: typing.Optional[typing.Mapping[str, str]] = None, ): self.project = project - self.identifier = identifier or re.sub(r'[^a-zA-Z0-9]', '_', project) + self.identifier = identifier or re.sub(r"[^a-zA-Z0-9]", "_", project) self.ignore_failure = ignore_failure self.timeout = timeout - self.env = env or {} \ No newline at end of file + self.env = env or {} diff --git a/tests/ci/cdk/pipeline/codebuild_batch_step.py b/tests/ci/cdk/pipeline/codebuild_batch_step.py index 59a4ac6f5e..ee73f3af1f 100644 --- a/tests/ci/cdk/pipeline/codebuild_batch_step.py +++ b/tests/ci/cdk/pipeline/codebuild_batch_step.py @@ -12,9 +12,10 @@ aws_codepipeline_actions as cp_actions, aws_codebuild as codebuild, aws_codepipeline as codepipeline, - aws_iam as iam + aws_iam as iam, ) + @jsii.implements(pipelines.ICodePipelineActionFactory) class CodeBuildBatchStep(pipelines.Step): """ @@ -31,15 +32,18 @@ class CodeBuildBatchStep(pipelines.Step): :return: A new CodeBuildBatchStep. """ - def __init__(self, - id, - input: pipelines.FileSet, - action_name: str, - commands: typing.List[str], - partial_batch_build_spec: typing.Mapping[builtins.str, typing.Any], - role: iam.Role, - timeout: int = 300, - env: typing.Optional[typing.Mapping[str, str]]=None): + + def __init__( + self, + id, + input: pipelines.FileSet, + action_name: str, + commands: typing.List[str], + partial_batch_build_spec: typing.Mapping[builtins.str, typing.Any], + role: iam.Role, + timeout: int = 300, + env: typing.Optional[typing.Mapping[str, str]] = None, + ): super().__init__(id) self._discover_referenced_outputs(env) @@ -50,31 +54,33 @@ def __init__(self, self.partial_batch_build_spec = partial_batch_build_spec self.role = role self.timeout = timeout - self.env = { - key: codebuild.BuildEnvironmentVariable(value=value) - for key, value in env.items() - } if env else {} + self.env = ( + { + key: codebuild.BuildEnvironmentVariable(value=value) + for key, value in env.items() + } + if env + else {} + ) @jsii.member(jsii_name="produceAction") def produce_action( - self, - stage: codepipeline.IStage, - options: pipelines.ProduceActionOptions, + self, + stage: codepipeline.IStage, + options: pipelines.ProduceActionOptions, ) -> pipelines.CodePipelineActionFactoryResult: batch_build_project = codebuild.PipelineProject( options.scope, self.action_name, - build_spec=codebuild.BuildSpec.from_object({ - "version": 0.2, - "batch": self.partial_batch_build_spec, - "phases": { - "build": { - "commands": self.commands - } + build_spec=codebuild.BuildSpec.from_object( + { + "version": 0.2, + "batch": self.partial_batch_build_spec, + "phases": {"build": {"commands": self.commands}}, } - }), + ), role=self.role, - timeout=Duration.minutes(self.timeout) + timeout=Duration.minutes(self.timeout), ) batch_build_action = cp_actions.CodeBuildAction( @@ -83,11 +89,9 @@ def produce_action( run_order=options.run_order, project=batch_build_project, execute_batch_build=True, - environment_variables=self.env + environment_variables=self.env, ) stage.add_action(batch_build_action) - return pipelines.CodePipelineActionFactoryResult( - run_orders_consumed=1 - ) \ No newline at end of file + return pipelines.CodePipelineActionFactoryResult(run_orders_consumed=1) diff --git a/tests/ci/cdk/pipeline/deploy_util.py b/tests/ci/cdk/pipeline/deploy_util.py index d283b80d51..454db10f3b 100644 --- a/tests/ci/cdk/pipeline/deploy_util.py +++ b/tests/ci/cdk/pipeline/deploy_util.py @@ -1,6 +1,7 @@ from enum import Enum + class DeployEnvironmentType(Enum): - PRE_PROD="Staging" - PROD="Prod" - DEV="Dev" \ No newline at end of file + PRE_PROD = "Staging" + PROD = "Prod" + DEV = "Dev" diff --git a/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py b/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py index 00236e4f50..4c230a55e1 100644 --- a/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py +++ b/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py @@ -14,12 +14,12 @@ class LinuxDockerImageBuildStage(Stage): def __init__( - self, - scope: Construct, - id: str, - pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs + self, + scope: Construct, + id: str, + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs ): super().__init__( scope, @@ -35,14 +35,14 @@ def __init__( "aws-lc-ecr-linux-x86", LINUX_X86_ECR_REPO, env=deploy_environment, - stack_name="aws-lc-ecr-linux-x86" + stack_name="aws-lc-ecr-linux-x86", ) self.ecr_linux_aarch_stack = EcrStack( self, "aws-lc-ecr-linux-aarch", LINUX_AARCH_ECR_REPO, env=deploy_environment, - stack_name="aws-lc-ecr-linux-aarch" + stack_name="aws-lc-ecr-linux-aarch", ) # Define CodeBuild Batch job for building Docker images. @@ -50,7 +50,7 @@ def __init__( self, "aws-lc-docker-image-build-linux", env=deploy_environment, - stack_name="aws-lc-docker-image-build-linux" + stack_name="aws-lc-docker-image-build-linux", ) self.linux_docker_build_stack.add_dependency(self.ecr_linux_x86_stack) self.linux_docker_build_stack.add_dependency(self.ecr_linux_aarch_stack) @@ -63,13 +63,13 @@ def stacks(self) -> typing.List[Stack]: return [child for child in self.node.children if isinstance(child, Stack)] def add_stage_to_wave( - self, - wave: pipelines.Wave, - input: pipelines.FileSet, - role: iam.Role, - max_retry: typing.Optional[int] = 2, - additional_stacks: typing.Optional[typing.List[str]] = None, - env: typing.Optional[typing.Mapping[str, str]] = None + self, + wave: pipelines.Wave, + input: pipelines.FileSet, + role: iam.Role, + max_retry: typing.Optional[int] = 2, + additional_stacks: typing.Optional[typing.List[str]] = None, + env: typing.Optional[typing.Mapping[str, str]] = None, ): stacks = self.stacks + (additional_stacks if additional_stacks else []) stack_names = [stack.stack_name for stack in stacks] @@ -83,10 +83,10 @@ def add_stage_to_wave( commands=[ "cd tests/ci/cdk/pipeline/scripts", "chmod +x cleanup_orphaned_images.sh check_trigger_conditions.sh build_target.sh", - "./cleanup_orphaned_images.sh --repos \"${ECR_REPOS}\"", - "trigger_conditions=$(./check_trigger_conditions.sh --build-type docker --platform linux --stacks \"${STACKS}\")", + './cleanup_orphaned_images.sh --repos "${ECR_REPOS}"', + 'trigger_conditions=$(./check_trigger_conditions.sh --build-type docker --platform linux --stacks "${STACKS}")', "export NEED_REBUILD=$(echo $trigger_conditions | sed -n -e 's/.*\(NEED_REBUILD=[0-9]*\).*/\\1/p' | cut -d'=' -f2 )", - "./build_target.sh --build-type docker --platform linux --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}" + "./build_target.sh --build-type docker --platform linux --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}", ], env={ **env, @@ -96,15 +96,9 @@ def add_stage_to_wave( "TIMEOUT": str(timeout), }, role=role, - timeout=Duration.minutes(timeout) + timeout=Duration.minutes(timeout), ) - wave.add_stage( - self, - post=[ - docker_build_step - ] - ) + wave.add_stage(self, post=[docker_build_step]) self.need_rebuild = docker_build_step.exported_variable("NEED_REBUILD") - diff --git a/tests/ci/cdk/pipeline/pipeline_stack.py b/tests/ci/cdk/pipeline/pipeline_stack.py index fd0abc43a1..f8934e38af 100644 --- a/tests/ci/cdk/pipeline/pipeline_stack.py +++ b/tests/ci/cdk/pipeline/pipeline_stack.py @@ -10,7 +10,7 @@ aws_iam as iam, aws_events as events, aws_events_targets as targets, - aws_cloudwatch as cloudwatch + aws_cloudwatch as cloudwatch, ) from aws_cdk.pipelines import CodeBuildStep from constructs import Construct @@ -22,12 +22,13 @@ from pipeline.windows_docker_image_build_stage import WindowsDockerImageBuildStage from util.metadata import * + class AwsLcCiPipeline(Stack): def __init__( - self, - scope: Construct, - id: str, - **kwargs, + self, + scope: Construct, + id: str, + **kwargs, ) -> None: super().__init__( scope, @@ -48,14 +49,14 @@ def __init__( role_name="CrossAccountPipelineRole", assumed_by=iam.CompositePrincipal( iam.ServicePrincipal("codebuild.amazonaws.com"), - iam.ServicePrincipal("codepipeline.amazonaws.com") + iam.ServicePrincipal("codepipeline.amazonaws.com"), ), ) cross_account_role.add_to_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, - resources=['*'], + resources=["*"], actions=["codepipeline:GetPipelineExecution"], ) ) @@ -76,7 +77,7 @@ def __init__( pipeline_name="AwsLcCiPipeline", cross_account_keys=True, enable_key_rotation=True, - restart_execution_on_update=True + restart_execution_on_update=True, ) # Bucket contains artifacts from old pipeline executions @@ -99,7 +100,7 @@ def __init__( "LINUX_AARCH_ECR_REPO": LINUX_AARCH_ECR_REPO, "LINUX_X86_ECR_REPO": LINUX_X86_ECR_REPO, "WINDOWS_X86_ECR_REPO": WINDOWS_X86_ECR_REPO, - "IS_DEV": str(IS_DEV) + "IS_DEV": str(IS_DEV), } if DEPLOY_ACCOUNT is not None and DEPLOY_REGION is not None: @@ -115,13 +116,13 @@ def __init__( "Synth", input=source, commands=[ - "echo \"Environment variables:\"", + 'echo "Environment variables:"', "env", "npm install -g aws-cdk", "cd tests/ci", "python -m pip install -r requirements.txt", "cd cdk", - "cdk synth" + "cdk synth", ], env=cdk_env, primary_output_directory="tests/ci/cdk/cdk.out", @@ -137,7 +138,7 @@ def __init__( "StringEquals": { "iam:ResourceTag/aws-cdk:bootstrap-role": "lookup", } - } + }, ), ], ), @@ -158,56 +159,63 @@ def __init__( cross_account_role=cross_account_role, ) - #TODO: add prod env + # TODO: add prod env pipeline.build_pipeline() # Schedule pipeline to run every Tuesday 15:00 UTC or 7:00 PST events.Rule( - self, "WeeklyCodePipelineRun", + self, + "WeeklyCodePipelineRun", schedule=events.Schedule.cron( minute="0", hour="15", # weekday="TUE", #TODO: Uncomment this line. It's running everyday now to make sure I didn't break anything ), - targets=[ - targets.CodePipeline( - pipeline=base_pipeline - ) - ] + targets=[targets.CodePipeline(pipeline=base_pipeline)], ) def deploy_to_environment( - self, - deploy_environment_type: DeployEnvironmentType, - pipeline: pipelines.CodePipeline, - source: pipelines.CodePipelineSource, - cross_account_role: iam.Role, - codebuild_environment_variables: typing.Optional[typing.Mapping[str, str]] = None, + self, + deploy_environment_type: DeployEnvironmentType, + pipeline: pipelines.CodePipeline, + source: pipelines.CodePipelineSource, + cross_account_role: iam.Role, + codebuild_environment_variables: typing.Optional[ + typing.Mapping[str, str] + ] = None, ): - pipeline_environment = Environment(account=PIPELINE_ACCOUNT, region=PIPELINE_REGION) + pipeline_environment = Environment( + account=PIPELINE_ACCOUNT, region=PIPELINE_REGION + ) if deploy_environment_type == DeployEnvironmentType.PRE_PROD: - deploy_environment = Environment(account=PRE_PROD_ACCOUNT, region=PRE_PROD_REGION) + deploy_environment = Environment( + account=PRE_PROD_ACCOUNT, region=PRE_PROD_REGION + ) elif deploy_environment_type == DeployEnvironmentType.DEV: - deploy_environment = Environment(account=DEPLOY_ACCOUNT, region=DEPLOY_REGION) + deploy_environment = Environment( + account=DEPLOY_ACCOUNT, region=DEPLOY_REGION + ) else: deploy_environment = Environment(account=PROD_ACCOUNT, region=PROD_REGION) - codebuild_environment_variables = codebuild_environment_variables if codebuild_environment_variables else {} + codebuild_environment_variables = ( + codebuild_environment_variables if codebuild_environment_variables else {} + ) codebuild_environment_variables = { **codebuild_environment_variables, "PIPELINE_EXECUTION_ID": "#{codepipeline.PipelineExecutionId}", "DEPLOY_ACCOUNT": deploy_environment.account, - "DEPLOY_REGION": deploy_environment.region + "DEPLOY_REGION": deploy_environment.region, } cross_account_role.add_to_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, resources=[ - f'arn:aws:iam::{deploy_environment.account}:role/CrossAccountCodeBuildRole' + f"arn:aws:iam::{deploy_environment.account}:role/CrossAccountCodeBuildRole" ], actions=["sts:AssumeRole"], ) @@ -222,7 +230,9 @@ def deploy_to_environment( pipeline.add_stage(setup_stage) - docker_build_wave = pipeline.add_wave(f"{deploy_environment_type.value}-DockerImageBuild") + docker_build_wave = pipeline.add_wave( + f"{deploy_environment_type.value}-DockerImageBuild" + ) linux_stage = LinuxDockerImageBuildStage( self, @@ -237,7 +247,7 @@ def deploy_to_environment( role=cross_account_role, additional_stacks=setup_stage.stacks, max_retry=MAX_TEST_RETRY, - env=codebuild_environment_variables + env=codebuild_environment_variables, ) windows_stage = WindowsDockerImageBuildStage( @@ -253,7 +263,7 @@ def deploy_to_environment( role=cross_account_role, additional_stacks=setup_stage.stacks, max_retry=MAX_TEST_RETRY, - env=codebuild_environment_variables + env=codebuild_environment_variables, ) docker_build_wave.add_post( @@ -267,7 +277,7 @@ def deploy_to_environment( ], env={ **codebuild_environment_variables, - "ECR_REPOS": f"{' '.join(linux_stage.ecr_repo_names)} {' '.join(windows_stage.ecr_repo_names)}" + "ECR_REPOS": f"{' '.join(linux_stage.ecr_repo_names)} {' '.join(windows_stage.ecr_repo_names)}", }, role=cross_account_role, ) @@ -287,6 +297,6 @@ def deploy_to_environment( max_retry=MAX_TEST_RETRY, env={ **codebuild_environment_variables, - "PREVIOUS_REBUILDS": f'{linux_stage.need_rebuild} {linux_stage.need_rebuild}' + "PREVIOUS_REBUILDS": f"{linux_stage.need_rebuild} {linux_stage.need_rebuild}", }, - ) \ No newline at end of file + ) diff --git a/tests/ci/cdk/pipeline/scripts/build_target.sh b/tests/ci/cdk/pipeline/scripts/build_target.sh index 47e2c37b8a..7098c03302 100644 --- a/tests/ci/cdk/pipeline/scripts/build_target.sh +++ b/tests/ci/cdk/pipeline/scripts/build_target.sh @@ -9,7 +9,7 @@ source util.sh echo \"Environment variables:\" env -if [[ -z "${NEED_REBUILD+x}" || -z "${NEED_REBUILD}" || ${NEED_REBUILD} -eq 0 ]]; then +if [[ -z "${NEED_REBUILD+x}" || -z "${NEED_REBUILD}" || ${NEED_REBUILD} -eq 0 ]]; then echo "No rebuild needed" exit 0 fi @@ -23,8 +23,8 @@ function build_codebuild_ci_project() { local project=${1} if [[ -z ${project} ]]; then - echo "No project name provided." - exit 1 + echo "No project name provided." + exit 1 fi if [[ ${DEPLOY_ACCOUNT} == '351119683581' ]]; then @@ -41,13 +41,13 @@ function build_codebuild_ci_project() { echo "Waiting for CI tests for complete. This may take anywhere from 15 minutes to 1 hour" if ! codebuild_build_status_check "${TIMEOUT}"; then - echo "Tests failed, retrying ${attempt}/${MAX_RETRY}..." - if [[ ${attempt} -le ${MAX_RETRY} ]]; then - retry_batch_build - else - echo "CI tests failed" - exit 1 - fi + echo "Tests failed, retrying ${attempt}/${MAX_RETRY}..." + if [[ ${attempt} -le ${MAX_RETRY} ]]; then + retry_batch_build + else + echo "CI tests failed" + exit 1 + fi fi done @@ -64,15 +64,15 @@ function build_linux_docker_images() { attempt=$((attempt + 1)) echo "Waiting for docker images creation. Building the docker images need to take 1 hour." - # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. + # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. if ! codebuild_build_status_check "${TIMEOUT}"; then - echo "Build failed, retrying ${attempt}/${MAX_RETRY}..." - if [[ ${attempt} -le ${MAX_RETRY} ]]; then - retry_batch_build - else - echo "Failed to build Linux docker images" - exit 1 - fi + echo "Build failed, retrying ${attempt}/${MAX_RETRY}..." + if [[ ${attempt} -le ${MAX_RETRY} ]]; then + retry_batch_build + else + echo "Failed to build Linux docker images" + exit 1 + fi fi done @@ -93,8 +93,8 @@ function build_win_docker_images() { echo "Waiting for docker images creation. Building the docker images need to take 1 hour." # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. if ! win_docker_img_build_status_check "${TIMEOUT}"; then - echo "Build failed, retrying ${attempt}/${MAX_RETRY}..." - continue + echo "Build failed, retrying ${attempt}/${MAX_RETRY}..." + continue fi echo "Successfully built Windows docker images" @@ -152,15 +152,15 @@ fi if [[ ${BUILD_TYPE} == "docker" ]]; then if [[ -z "${PLATFORM+x}" || -z "${PLATFORM}" ]]; then - echo "When building Docker images, a platform must be specified" - exit 1 + echo "When building Docker images, a platform must be specified" + exit 1 fi -# if [[ ${PLATFORM} == "linux" ]]; then -# build_linux_docker_images -# elif [[ ${PLATFORM} == "windows" ]]; then -# build_win_docker_images -# fi + if [[ ${PLATFORM} == "linux" ]]; then + build_linux_docker_images + elif [[ ${PLATFORM} == "windows" ]]; then + build_win_docker_images + fi exit 0 fi @@ -171,4 +171,4 @@ if [[ ${BUILD_TYPE} == "ci" ]]; then fi build_codebuild_ci_project "${PROJECT}" -fi \ No newline at end of file +fi diff --git a/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh b/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh index b21cecfcd8..27850bb505 100644 --- a/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh +++ b/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh @@ -18,10 +18,10 @@ export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${COMMIT_HASH}" function check_pipeline_trigger_type() { trigger_type=$(aws codepipeline get-pipeline-execution \ - --pipeline-name AwsLcCiPipeline \ - --pipeline-execution-id ${PIPELINE_EXECUTION_ID} \ - --query 'pipelineExecution.trigger.triggerType' \ - --output text) + --pipeline-name AwsLcCiPipeline \ + --pipeline-execution-id ${PIPELINE_EXECUTION_ID} \ + --query 'pipelineExecution.trigger.triggerType' \ + --output text) # unblock execution for self-mutation, weekly cron job, and manual start/forced deploy if [[ "$trigger_type" == "StartPipelineExecution" || "$trigger_type" == "CloudWatchEvent" ]]; then @@ -50,13 +50,13 @@ function get_commit_changed_files() { function get_cfn_changeset() { for stack in ${STACKS}; do change_set_arn=$(aws cloudformation describe-stacks \ - --stack-name "${stack}" \ - --query "Stacks[0].ChangeSetId" \ - --output text) + --stack-name "${stack}" \ + --query "Stacks[0].ChangeSetId" \ + --output text) changes_count=$(aws cloudformation describe-change-set \ - --change-set-name "${change_set_arn}" \ - --stack-name "${stack}" \ - --query "Changes" | jq 'length') + --change-set-name "${change_set_arn}" \ + --stack-name "${stack}" \ + --query "Changes" | jq 'length') if [ "$changes_count" -gt 0 ]; then NEED_REBUILD=$((NEED_REBUILD + 1)) break @@ -94,7 +94,7 @@ if [[ -z "${BUILD_TYPE+x}" || -z "${BUILD_TYPE}" ]]; then exit 1 fi -if [[ -z "${STACKS+x}" || -z "${STACKS}" ]]; then +if [[ -z "${STACKS+x}" || -z "${STACKS}" ]]; then echo "No stacks provided." exit 1 fi @@ -107,8 +107,8 @@ fi if [[ ${BUILD_TYPE} == "docker" ]]; then if [[ -z "${PLATFORM+x}" || -z "${PLATFORM}" ]]; then - echo "A platform must be specified" - exit 1 + echo "A platform must be specified" + exit 1 fi check_pipeline_trigger_type @@ -121,4 +121,4 @@ elif [[ ${BUILD_TYPE} == "ci" ]]; then get_cfn_changeset fi -echo "NEED_REBUILD=$NEED_REBUILD" \ No newline at end of file +echo "NEED_REBUILD=$NEED_REBUILD" diff --git a/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh b/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh index 9e33dd5cd2..4573ae8a14 100644 --- a/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh +++ b/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh @@ -27,7 +27,7 @@ function remove_pending_images() { for tag in $tags; do if [[ "$tag" == *"_pending" ]]; then - new_tag="${tag%_pending}_latest" # Replace '_pending' with '_latest' + new_tag="${tag%_pending}_latest" # Replace '_pending' with '_latest' if echo "${tags}" | grep -q "${new_tag}"; then echo "Image with digest $image_digest is tagged as latest. Will only be removing pending tag..." diff --git a/tests/ci/cdk/pipeline/scripts/finalize_images.sh b/tests/ci/cdk/pipeline/scripts/finalize_images.sh index b313ee98ca..8fb6e27bb0 100644 --- a/tests/ci/cdk/pipeline/scripts/finalize_images.sh +++ b/tests/ci/cdk/pipeline/scripts/finalize_images.sh @@ -11,7 +11,7 @@ export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${CODEBUILD_RESOLVED_SOURCE_VERSION function promote_pending_tags_to_latest() { local repo=${1} - + # Get the list of images with tags ending in "_pending" echo "Fetching images from repository '$repo'..." @@ -31,7 +31,7 @@ function promote_pending_tags_to_latest() { # Check if any tag ends with '_pending' for tag in $tags; do if [[ "$tag" == *"_pending" ]]; then - new_tag="${tag%_pending}_latest" # Replace '_pending' with '_latest' + new_tag="${tag%_pending}_latest" # Replace '_pending' with '_latest' if echo "${tags}" | grep -q "${new_tag}"; then echo "Image with digest $image_digest already has tag '$new_tag' - skipping tag update" diff --git a/tests/ci/cdk/pipeline/scripts/util.sh b/tests/ci/cdk/pipeline/scripts/util.sh index 6429d44389..0336e0928a 100644 --- a/tests/ci/cdk/pipeline/scripts/util.sh +++ b/tests/ci/cdk/pipeline/scripts/util.sh @@ -1,4 +1,10 @@ -if [[ -z "${PIPELINE_EXECUTION_ID+x}" || -z "${PIPELINE_EXECUTION_ID}" ]]; then +#!/usr/bin/env bash +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 OR ISC + +set -ex + +if [[ -z "${PIPELINE_EXECUTION_ID+x}" || -z "${PIPELINE_EXECUTION_ID}" ]]; then TRIGGER_TYPE="manual" else TRIGGER_TYPE="pipeline" @@ -6,8 +12,8 @@ fi function assume_role() { if [[ -z ${CROSS_ACCOUNT_BUILD_ROLE_ARN} ]]; then - echo "No role arn provided" - return 1 + echo "No role arn provided" + return 1 fi local session_name=${CROSS_ACCOUNT_BUILD_SESSION:-"build-session"} @@ -32,7 +38,7 @@ function refresh_session() { function start_codebuild_project() { local project=${1} - local commit_hash=${2:-HEAD} + local commit_hash=${2:-main} if [[ -z ${project} ]]; then echo "No project name provided." @@ -41,16 +47,16 @@ function start_codebuild_project() { # https://awscli.amazonaws.com/v2/documentation/api/latest/reference/codebuild/start-build-batch.html build_id=$(aws codebuild start-build-batch --project-name ${project} \ - --source-version ${commit_hash} \ - --environment-variables-override "name=TRIGGER_TYPE,value=${TRIGGER_TYPE},type=PLAINTEXT" \ - --query "buildBatch.id" \ - --output text) + --source-version ${commit_hash} \ + --environment-variables-override "name=TRIGGER_TYPE,value=${TRIGGER_TYPE},type=PLAINTEXT" \ + --query "buildBatch.id" \ + --output text) export BUILD_BATCH_ID=${build_id} } function retry_batch_build() { aws codebuild retry-build-batch --id "${BUILD_BATCH_ID}" \ - --retry-type RETRY_FAILED_BUILDS + --retry-type RETRY_FAILED_BUILDS } function codebuild_build_status_check() { @@ -61,8 +67,8 @@ function codebuild_build_status_check() { for i in $(seq 1 ${status_check_max}); do # https://docs.aws.amazon.com/cli/latest/reference/codebuild/batch-get-build-batches.html build_batch_status=$(aws codebuild batch-get-build-batches --ids "${BUILD_BATCH_ID}" \ - --query "buildBatches[0].buildBatchStatus" \ - --output text 2>&1) + --query "buildBatches[0].buildBatchStatus" \ + --output text 2>&1) if [[ ${build_batch_status} == "SUCCEEDED" ]]; then echo "Build ${BUILD_BATCH_ID} finished successfully." return 0 @@ -105,8 +111,8 @@ function start_windows_img_build() { --document-name "${WIN_DOCKER_BUILD_SSM_DOCUMENT}" \ --output-s3-bucket-name "${S3_FOR_WIN_DOCKER_IMG_BUILD}" \ --output-s3-key-prefix 'runcommand' \ - --parameters "TriggerType=[\"${TRIGGER_TYPE}\"]" \ - | jq -r '.Command.CommandId') + --parameters "TriggerType=[\"${TRIGGER_TYPE}\"]" | + jq -r '.Command.CommandId') # Export for checking command run status. export WINDOWS_DOCKER_IMG_BUILD_COMMAND_ID="${command_id}" echo "Windows ec2 is executing SSM command." @@ -146,4 +152,4 @@ function win_docker_img_build_status_check() { done echo "SSM command ${WINDOWS_DOCKER_IMG_BUILD_COMMAND_ID} takes more time than expected." return 1 -} \ No newline at end of file +} diff --git a/tests/ci/cdk/pipeline/setup_stage.py b/tests/ci/cdk/pipeline/setup_stage.py index f7276d46d7..fc660fd930 100644 --- a/tests/ci/cdk/pipeline/setup_stage.py +++ b/tests/ci/cdk/pipeline/setup_stage.py @@ -1,23 +1,24 @@ import typing -from aws_cdk import Stage, aws_codebuild as codebuild, Environment, Stack, aws_iam as iam +from aws_cdk import ( + Stage, + Environment, + Stack, + aws_iam as iam, +) from constructs import Construct -from cdk.ecr_stack import EcrStack -from cdk.linux_docker_image_batch_build_stack import LinuxDockerImageBatchBuildStack -from cdk.windows_docker_image_build_stack import WindowsDockerImageBuildStack -from util.metadata import LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO, WINDOWS_X86_ECR_REPO, PIPELINE_ACCOUNT - class SetupStage(Stage): """Define a stack of IAM role to allow cross-account deployment""" + def __init__( - self, - scope: Construct, - id: str, - pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs + self, + scope: Construct, + id: str, + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs, ): super().__init__( scope, @@ -32,54 +33,62 @@ def __init__( pipeline_environment=pipeline_environment, deploy_environment=deploy_environment, stack_name="aws-lc-ci-pipeline-setup", - **kwargs + **kwargs, ) @property def stacks(self): return [child for child in self.node.children if isinstance(child, Stack)] + class SetupStack(Stack): """Define a stack of IAM role to allow cross-account deployment""" + def __init__( - self, - scope: Construct, - id: str, - pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs + self, + scope: Construct, + id: str, + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs, ) -> None: super().__init__(scope, id, env=deploy_environment, **kwargs) cross_account_role = iam.Role( self, - 'CrossAccountCodeBuildRole', - role_name='CrossAccountCodeBuildRole', - assumed_by=iam.ArnPrincipal(f'arn:aws:iam::{pipeline_environment.account}:role/CrossAccountPipelineRole'), #TODO: add a conditional to exclude this in dev env + "CrossAccountCodeBuildRole", + role_name="CrossAccountCodeBuildRole", + assumed_by=iam.ArnPrincipal( + f"arn:aws:iam::{pipeline_environment.account}:role/CrossAccountPipelineRole" + ), # TODO: add a conditional to exclude this in dev env ) # Grant access to all CodeBuild projects - cross_account_role.add_to_policy(iam.PolicyStatement( - effect=iam.Effect.ALLOW, - actions=[ - 'codebuild:*' - ], - resources=[f'arn:aws:codebuild:{deploy_environment.region}:{deploy_environment.account}:project/aws-lc-*'] - )) + cross_account_role.add_to_policy( + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=["codebuild:*"], + resources=[ + f"arn:aws:codebuild:{deploy_environment.region}:{deploy_environment.account}:project/aws-lc-*" + ], + ) + ) - cross_account_role.add_to_policy(iam.PolicyStatement( - effect=iam.Effect.ALLOW, - actions=[ - 'cloudformation:DescribeChangeSet', - 'cloudformation:DescribeStacks', - 'ec2:DescribeInstances', - 'ssm:DescribeInstanceInformation', - 'ssm:SendCommand', - 'ssm:ListCommands', - 'ecr:DescribeImages', - 'ecr:BatchGetImage', - 'ecr:PutImage', - 'ecr:BatchDeleteImage' - ], - resources=['*'] - )) \ No newline at end of file + cross_account_role.add_to_policy( + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "cloudformation:DescribeChangeSet", + "cloudformation:DescribeStacks", + "ec2:DescribeInstances", + "ssm:DescribeInstanceInformation", + "ssm:SendCommand", + "ssm:ListCommands", + "ecr:DescribeImages", + "ecr:BatchGetImage", + "ecr:PutImage", + "ecr:BatchDeleteImage", + ], + resources=["*"], + ) + ) diff --git a/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py b/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py index 4e37cd469f..02036a9f19 100644 --- a/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py +++ b/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py @@ -8,17 +8,22 @@ from cdk.ecr_stack import EcrStack from cdk.windows_docker_image_build_stack import WindowsDockerImageBuildStack -from util.metadata import WINDOWS_X86_ECR_REPO, WIN_EC2_TAG_KEY, WIN_EC2_TAG_VALUE, SSM_DOCUMENT_NAME +from util.metadata import ( + WINDOWS_X86_ECR_REPO, + WIN_EC2_TAG_KEY, + WIN_EC2_TAG_VALUE, + SSM_DOCUMENT_NAME, +) class WindowsDockerImageBuildStage(Stage): def __init__( - self, - scope: Construct, - id: str, - pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs + self, + scope: Construct, + id: str, + pipeline_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + deploy_environment: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs ): super().__init__( scope, @@ -32,7 +37,7 @@ def __init__( "aws-lc-ecr-windows-x86", WINDOWS_X86_ECR_REPO, env=deploy_environment, - stack_name="aws-lc-ecr-windows-x86" + stack_name="aws-lc-ecr-windows-x86", ) self.windows_docker_build_stack = WindowsDockerImageBuildStack( @@ -53,13 +58,13 @@ def stacks(self) -> typing.List[Stack]: return [child for child in self.node.children if isinstance(child, Stack)] def add_stage_to_wave( - self, - wave: pipelines.Wave, - input: pipelines.FileSet, - role: iam.Role, - max_retry: typing.Optional[int] = 2, - additional_stacks: typing.Optional[typing.List[str]] = None, - env: typing.Optional[typing.Mapping[str, str]] = None + self, + wave: pipelines.Wave, + input: pipelines.FileSet, + role: iam.Role, + max_retry: typing.Optional[int] = 2, + additional_stacks: typing.Optional[typing.List[str]] = None, + env: typing.Optional[typing.Mapping[str, str]] = None, ): stacks = self.stacks + (additional_stacks if additional_stacks else []) stack_names = [stack.stack_name for stack in stacks] @@ -73,10 +78,10 @@ def add_stage_to_wave( commands=[ "cd tests/ci/cdk/pipeline/scripts", "chmod +x cleanup_orphaned_images.sh check_trigger_conditions.sh build_target.sh", - "./cleanup_orphaned_images.sh --repos \"${ECR_REPOS}\"", - "trigger_conditions=$(./check_trigger_conditions.sh --build-type docker --platform windows --stacks \"${STACKS}\")", + './cleanup_orphaned_images.sh --repos "${ECR_REPOS}"', + 'trigger_conditions=$(./check_trigger_conditions.sh --build-type docker --platform windows --stacks "${STACKS}")', "export NEED_REBUILD=$(echo $trigger_conditions | sed -n -e 's/.*\(NEED_REBUILD=[0-9]*\).*/\\1/p' | cut -d'=' -f2 )", - "./build_target.sh --build-type docker --platform windows --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}" + "./build_target.sh --build-type docker --platform windows --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}", ], env={ **env, @@ -90,14 +95,9 @@ def add_stage_to_wave( "S3_FOR_WIN_DOCKER_IMG_BUILD": self.s3_bucket_name, }, role=role, - timeout=Duration.minutes(timeout) + timeout=Duration.minutes(timeout), ) - wave.add_stage( - self, - post=[ - docker_build_step - ] - ) + wave.add_stage(self, post=[docker_build_step]) - self.need_rebuild = docker_build_step.exported_variable("NEED_REBUILD") \ No newline at end of file + self.need_rebuild = docker_build_step.exported_variable("NEED_REBUILD") diff --git a/tests/ci/cdk/util/build_spec_loader.py b/tests/ci/cdk/util/build_spec_loader.py index f0eb27c9d4..bd65e1e4ac 100644 --- a/tests/ci/cdk/util/build_spec_loader.py +++ b/tests/ci/cdk/util/build_spec_loader.py @@ -24,7 +24,9 @@ def load(file_path, env): # If the deployment uses team account, the change of batch BuildSpec file is loaded automatically without deployment. # else, the change will require manual deployment via CDK command. if can_autoload: - return codebuild.BuildSpec.from_source_filename("tests/ci/cdk/{}".format(file_path)) + return codebuild.BuildSpec.from_source_filename( + "tests/ci/cdk/{}".format(file_path) + ) # TODO(CryptoAlg-1276): remove below when the batch BuildSpec supports the env variable of account and region. placeholder_map = { PROD_ACCOUNT: env.account, @@ -34,6 +36,6 @@ def load(file_path, env): file_text = original_file.read() for key in placeholder_map.keys(): file_text = file_text.replace(key, placeholder_map[key]) - with tempfile.NamedTemporaryFile(mode='w+', delete=False) as temp_file: + with tempfile.NamedTemporaryFile(mode="w+", delete=False) as temp_file: temp_file.write(file_text) return codebuild.BuildSpec.from_asset(temp_file.name) diff --git a/tests/ci/cdk/util/devicefarm_util.py b/tests/ci/cdk/util/devicefarm_util.py index 372ecedf12..595e5e9059 100644 --- a/tests/ci/cdk/util/devicefarm_util.py +++ b/tests/ci/cdk/util/devicefarm_util.py @@ -11,4 +11,4 @@ DEVICEFARM_PROJECT = EnvUtil.get("DEVICEFARM_PROJECT", None) DEVICEFARM_DEVICE_POOL = EnvUtil.get("DEVICEFARM_DEVICE_POOL", None) ANDROID_TEST_NAME = EnvUtil.get("ANDROID_TEST_NAME", "AWS-LC Android Test") -AWS_REGION = EnvUtil.get("AWS_REGION", None) \ No newline at end of file +AWS_REGION = EnvUtil.get("AWS_REGION", None) diff --git a/tests/ci/cdk/util/ecr_util.py b/tests/ci/cdk/util/ecr_util.py index cbbd0016ca..88361a1ce3 100644 --- a/tests/ci/cdk/util/ecr_util.py +++ b/tests/ci/cdk/util/ecr_util.py @@ -1,2 +1,4 @@ def ecr_arn(ecr_repo_name, env): - return "{}.dkr.ecr.{}.amazonaws.com/{}".format(env.account, env.region, ecr_repo_name) + return "{}.dkr.ecr.{}.amazonaws.com/{}".format( + env.account, env.region, ecr_repo_name + ) diff --git a/tests/ci/cdk/util/env_util.py b/tests/ci/cdk/util/env_util.py index 3e44cdccdd..e2a7c832c9 100644 --- a/tests/ci/cdk/util/env_util.py +++ b/tests/ci/cdk/util/env_util.py @@ -23,4 +23,3 @@ def get(key, default_value: typing.Optional[str] = None): @staticmethod def get_optional(key): return os.environ.get(key) - diff --git a/tests/ci/cdk/util/iam_policies.py b/tests/ci/cdk/util/iam_policies.py index 2158d105d5..d52150bb25 100644 --- a/tests/ci/cdk/util/iam_policies.py +++ b/tests/ci/cdk/util/iam_policies.py @@ -3,7 +3,10 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC -def ec2_policies_in_json(ec2_role_name, ec2_security_group_id, ec2_subnet_id, ec2_vpc_id, env): + +def ec2_policies_in_json( + ec2_role_name, ec2_security_group_id, ec2_subnet_id, ec2_vpc_id, env +): """ Define an IAM policy that gives permissions for starting, stopping, and getting details of EC2 instances and their Vpcs :return: an IAM policy statement in json. @@ -24,13 +27,22 @@ def ec2_policies_in_json(ec2_role_name, ec2_security_group_id, ec2_subnet_id, ec "arn:aws:iam::{}:role/{}".format(env.account, ec2_role_name), "arn:aws:ec2:{}:{}:instance/*".format(env.region, env.account), "arn:aws:ec2:{}::image/*".format(env.region), - "arn:aws:ec2:{}:{}:network-interface/*".format(env.region, env.account), + "arn:aws:ec2:{}:{}:network-interface/*".format( + env.region, env.account + ), "arn:aws:ec2:{}:{}:volume/*".format(env.region, env.account), - "arn:aws:ec2:{}:{}:security-group/{}".format(env.region, env.account, ec2_security_group_id), - "arn:aws:ec2:{}:{}:subnet/{}".format(env.region, env.account, ec2_subnet_id), - "arn:aws:ec2:{}:{}:vpc/{}".format(env.region, env.account, ec2_vpc_id), - ] - }] + "arn:aws:ec2:{}:{}:security-group/{}".format( + env.region, env.account, ec2_security_group_id + ), + "arn:aws:ec2:{}:{}:subnet/{}".format( + env.region, env.account, ec2_subnet_id + ), + "arn:aws:ec2:{}:{}:vpc/{}".format( + env.region, env.account, ec2_vpc_id + ), + ], + } + ], } @@ -49,16 +61,20 @@ def ssm_policies_in_json(env): "ssm:CreateDocument", "ssm:DeleteDocument", "ssm:ListCommands", - "ssm:DescribeInstanceInformation" + "ssm:DescribeInstanceInformation", ], "Resource": [ - "arn:aws:ec2:{}:{}:instance/*".format(env.region, env.account), # Needed for ssm:SendCommand + "arn:aws:ec2:{}:{}:instance/*".format( + env.region, env.account + ), # Needed for ssm:SendCommand "arn:aws:ssm:{}:{}:*".format(env.region, env.account), "arn:aws:ssm:{}:{}:document/*".format(env.region, env.account), - ] - }] + ], + } + ], } + def code_build_batch_policy_in_json(project_ids, env): """ Define an IAM policy statement for CodeBuild batch operation. @@ -67,7 +83,11 @@ def code_build_batch_policy_in_json(project_ids, env): """ resources = [] for project_id in project_ids: - resources.append("arn:aws:codebuild:{}:{}:project/{}*".format(env.region, env.account, project_id)) + resources.append( + "arn:aws:codebuild:{}:{}:project/{}*".format( + env.region, env.account, project_id + ) + ) return { "Version": "2012-10-17", "Statement": [ @@ -76,13 +96,14 @@ def code_build_batch_policy_in_json(project_ids, env): "Action": [ "codebuild:StartBuild", "codebuild:StopBuild", - "codebuild:RetryBuild" + "codebuild:RetryBuild", ], - "Resource": resources + "Resource": resources, } - ] + ], } + def code_build_cloudwatch_logs_policy_in_json(log_groups): """ Define an IAM policy statement for CloudWatch logs associated with CodeBuild projects. @@ -95,16 +116,11 @@ def code_build_cloudwatch_logs_policy_in_json(log_groups): return { "Version": "2012-10-17", "Statement": [ - { - "Effect": "Allow", - "Action": [ - "logs:GetLogEvents" - ], - "Resource": resources - } - ] + {"Effect": "Allow", "Action": ["logs:GetLogEvents"], "Resource": resources} + ], } + def code_build_publish_metrics_in_json(env): """ Define an IAM policy that only grants access to publish CloudWatch metrics to the current region in the same @@ -119,21 +135,15 @@ def code_build_publish_metrics_in_json(env): "Resource": "*", "Condition": { "StringEquals": { - "aws:RequestedRegion": [ - env.region - ], - "cloudwatch:namespace": [ - "AWS-LC-Fuzz", - "AWS-LC" - ] + "aws:RequestedRegion": [env.region], + "cloudwatch:namespace": ["AWS-LC-Fuzz", "AWS-LC"], } - } + }, } - ] + ], } - def s3_read_write_policy_in_json(s3_bucket_name): """ Define an IAM policy statement for reading and writing to S3 bucket. @@ -144,15 +154,10 @@ def s3_read_write_policy_in_json(s3_bucket_name): "Statement": [ { "Effect": "Allow", - "Action": [ - "s3:PutObject", - "s3:GetObject" - ], - "Resource": [ - "arn:aws:s3:::{}/*".format(s3_bucket_name) - ] + "Action": ["s3:PutObject", "s3:GetObject"], + "Resource": ["arn:aws:s3:::{}/*".format(s3_bucket_name)], } - ] + ], } @@ -180,10 +185,8 @@ def ecr_power_user_policy_in_json(ecr_repo_names, env): "Statement": [ { "Effect": "Allow", - "Action": [ - "ecr:GetAuthorizationToken" - ], - "Resource": "*" + "Action": ["ecr:GetAuthorizationToken"], + "Resource": "*", }, { "Effect": "Allow", @@ -202,13 +205,14 @@ def ecr_power_user_policy_in_json(ecr_repo_names, env): "ecr:InitiateLayerUpload", "ecr:UploadLayerPart", "ecr:CompleteLayerUpload", - "ecr:PutImage" + "ecr:PutImage", ], - "Resource": ecr_arns - } - ] + "Resource": ecr_arns, + }, + ], } + def device_farm_access_policy_in_json(env): """ Define an IAM policy statement for Device Farm operations. @@ -233,7 +237,7 @@ def device_farm_access_policy_in_json(env): "devicefarm:ListSuites", "devicefarm:ListTests", ], - "Resource": resources + "Resource": resources, } - ] + ], } diff --git a/tests/ci/cdk/util/metadata.py b/tests/ci/cdk/util/metadata.py index 7135f70349..4531e181dd 100644 --- a/tests/ci/cdk/util/metadata.py +++ b/tests/ci/cdk/util/metadata.py @@ -22,23 +22,35 @@ STAGING_GITHUB_REPO_OWNER = "aws" STAGING_GITHUB_REPO_NAME = "private-aws-lc-staging" -IS_DEV = EnvUtil.get("IS_DEV", "False") == "True" #TODO: change default value to true +IS_DEV = EnvUtil.get("IS_DEV", "False") == "True" # TODO: change default value to true MAX_TEST_RETRY = int(EnvUtil.get("MAX_TEST_RETRY", "2")) # Used when AWS CDK defines ECR repos. -LINUX_AARCH_ECR_REPO = EnvUtil.get("ECR_LINUX_AARCH_REPO_NAME", "aws-lc-docker-images-linux-aarch") -LINUX_X86_ECR_REPO = EnvUtil.get("ECR_LINUX_X86_REPO_NAME", "aws-lc-docker-images-linux-x86") -WINDOWS_X86_ECR_REPO = EnvUtil.get("ECR_WINDOWS_X86_REPO_NAME", "aws-lc-docker-images-windows-x86") +LINUX_AARCH_ECR_REPO = EnvUtil.get( + "ECR_LINUX_AARCH_REPO_NAME", "aws-lc-docker-images-linux-aarch" +) +LINUX_X86_ECR_REPO = EnvUtil.get( + "ECR_LINUX_X86_REPO_NAME", "aws-lc-docker-images-linux-x86" +) +WINDOWS_X86_ECR_REPO = EnvUtil.get( + "ECR_WINDOWS_X86_REPO_NAME", "aws-lc-docker-images-windows-x86" +) # Used when AWS CodeBuild needs to create web_hooks. GITHUB_REPO_OWNER = EnvUtil.get("GITHUB_REPO_OWNER", "aws") GITHUB_REPO_NAME = EnvUtil.get("GITHUB_REPO_NAME", "aws-lc") GITHUB_SOURCE_VERSION = EnvUtil.get("GITHUB_SOURCE_VERSION", "main") -GITHUB_TOKEN_SECRET_NAME = EnvUtil.get("GITHUB_TOKEN_SECRET_NAME", "aws-lc/ci/github/token") +GITHUB_TOKEN_SECRET_NAME = EnvUtil.get( + "GITHUB_TOKEN_SECRET_NAME", "aws-lc/ci/github/token" +) # Used when AWS CDK defines resources for Windows docker image build. WIN_EC2_TAG_KEY = EnvUtil.get("WIN_EC2_TAG_KEY", "aws-lc") -WIN_EC2_TAG_VALUE = EnvUtil.get("WIN_EC2_TAG_VALUE", "aws-lc-windows-docker-image-build") -SSM_DOCUMENT_NAME = EnvUtil.get("WIN_DOCKER_BUILD_SSM_DOCUMENT", "AWSLC-BuildWindowsDockerImages") - -GITHUB_PUSH_CI_BRANCH_TARGETS = r"(main|fips-\d{4}-\d{2}-\d{2}.*)" \ No newline at end of file +WIN_EC2_TAG_VALUE = EnvUtil.get( + "WIN_EC2_TAG_VALUE", "aws-lc-windows-docker-image-build" +) +SSM_DOCUMENT_NAME = EnvUtil.get( + "WIN_DOCKER_BUILD_SSM_DOCUMENT", "AWSLC-BuildWindowsDockerImages" +) + +GITHUB_PUSH_CI_BRANCH_TARGETS = r"(main|fips-\d{4}-\d{2}-\d{2}.*)" diff --git a/tests/ci/cdk/util/yml_loader.py b/tests/ci/cdk/util/yml_loader.py index da70888a6e..bdefa11766 100644 --- a/tests/ci/cdk/util/yml_loader.py +++ b/tests/ci/cdk/util/yml_loader.py @@ -12,7 +12,9 @@ class YmlLoader(object): """Responsible for loading yml file as python object.""" @staticmethod - def load(file_path, placeholder_map: typing.Optional[typing.Mapping[str, str]] = None): + def load( + file_path, placeholder_map: typing.Optional[typing.Mapping[str, str]] = None + ): """ Used to load yml file and replace some placeholders if needed. :param file_path: path to the yml file. From 7ec6289fb489b6f7effe83ffaec2a52d4c932dbd Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Wed, 26 Mar 2025 16:33:32 -0700 Subject: [PATCH 05/10] Fix minor bugs --- tests/ci/cdk/cdk/aws_lc_github_ci_stack.py | 1 - .../windows_docker_build_ssm_document.yaml | 2 +- tests/ci/cdk/pipeline/pipeline_stack.py | 4 ++-- tests/ci/cdk/pipeline/scripts/build_target.sh | 23 +++++++++++++++---- .../scripts/check_trigger_conditions.sh | 2 +- tests/ci/cdk/run-cdk.sh | 5 ++-- 6 files changed, 25 insertions(+), 12 deletions(-) diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py index 7492064e1d..db47afff46 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py @@ -23,7 +23,6 @@ GITHUB_PUSH_CI_BRANCH_TARGETS, GITHUB_REPO_OWNER, GITHUB_REPO_NAME, - PIPELINE_ACCOUNT, PRE_PROD_ACCOUNT, STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME, diff --git a/tests/ci/cdk/cdk/ssm/windows_docker_build_ssm_document.yaml b/tests/ci/cdk/cdk/ssm/windows_docker_build_ssm_document.yaml index 94ac25ef36..3d2a80eff8 100644 --- a/tests/ci/cdk/cdk/ssm/windows_docker_build_ssm_document.yaml +++ b/tests/ci/cdk/cdk/ssm/windows_docker_build_ssm_document.yaml @@ -13,7 +13,7 @@ mainSteps: - $ErrorActionPreference = 'Stop' - $TRIGGER_TYPE = '{{ TriggerType }}' - "Get-ChildItem Env: | Sort-Object Name" - - rm -Recurse -Force docker-images + - if (Test-Path "docker-images") { Remove-Item -Recurse -Force "docker-images" } - mkdir docker-images - cd docker-images - Set-ExecutionPolicy Bypass -Scope Process -Force; [Net.ServicePointManager]::SecurityProtocol = [Net.ServicePointManager]::SecurityProtocol -bor [Net.SecurityProtocolType]::Tls12; $env:chocolateyUseWindowsCompression = 'true'; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1')) | Out-Null diff --git a/tests/ci/cdk/pipeline/pipeline_stack.py b/tests/ci/cdk/pipeline/pipeline_stack.py index f8934e38af..c02795d5e8 100644 --- a/tests/ci/cdk/pipeline/pipeline_stack.py +++ b/tests/ci/cdk/pipeline/pipeline_stack.py @@ -268,12 +268,12 @@ def deploy_to_environment( docker_build_wave.add_post( CodeBuildStep( - f"{deploy_environment_type.value}-CompleteDockerBuild", + f"{deploy_environment_type.value}-FinalizeImages", input=source, commands=[ "cd tests/ci/cdk/pipeline/scripts", "chmod +x finalize_images.sh", - # "./finalize_images.sh --repos \"${ECR_REPOS}\"", + "./finalize_images.sh --repos \"${ECR_REPOS}\"", ], env={ **codebuild_environment_variables, diff --git a/tests/ci/cdk/pipeline/scripts/build_target.sh b/tests/ci/cdk/pipeline/scripts/build_target.sh index 7098c03302..94b2c863fb 100644 --- a/tests/ci/cdk/pipeline/scripts/build_target.sh +++ b/tests/ci/cdk/pipeline/scripts/build_target.sh @@ -37,15 +37,19 @@ function build_codebuild_ci_project() { start_codebuild_project "${project}" "${source_version}" while [[ ${attempt} -le ${MAX_RETRY} ]]; do + if [[ $attempt -gt 0 ]]; then + echo "Retrying ${attempt}/${MAX_RETRY}..." + fi + attempt=$((attempt + 1)) echo "Waiting for CI tests for complete. This may take anywhere from 15 minutes to 1 hour" if ! codebuild_build_status_check "${TIMEOUT}"; then - echo "Tests failed, retrying ${attempt}/${MAX_RETRY}..." + echo "Tests failed." if [[ ${attempt} -le ${MAX_RETRY} ]]; then retry_batch_build else - echo "CI tests failed" + echo "CI tests failed." exit 1 fi fi @@ -61,12 +65,16 @@ function build_linux_docker_images() { start_codebuild_project aws-lc-docker-image-build-linux "${COMMIT_HASH}" while [[ ${attempt} -le ${MAX_RETRY} ]]; do + if [[ $attempt -gt 0 ]]; then + echo "Retrying ${attempt}/${MAX_RETRY}..." + fi + attempt=$((attempt + 1)) echo "Waiting for docker images creation. Building the docker images need to take 1 hour." # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. if ! codebuild_build_status_check "${TIMEOUT}"; then - echo "Build failed, retrying ${attempt}/${MAX_RETRY}..." + echo "Build failed." if [[ ${attempt} -le ${MAX_RETRY} ]]; then retry_batch_build else @@ -83,17 +91,22 @@ function build_win_docker_images() { local attempt=0 while [[ ${attempt} -le ${MAX_RETRY} ]]; do + if [[ $attempt -gt 0 ]]; then + echo "Retrying ${attempt}/${MAX_RETRY}..." + fi + attempt=$((attempt + 1)) + echo "Executing AWS SSM commands to build Windows docker images." if ! start_windows_img_build; then - echo "Failed to start build, retrying ${attempt}/${MAX_RETRY}..." + echo "Failed to start build" continue fi echo "Waiting for docker images creation. Building the docker images need to take 1 hour." # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. if ! win_docker_img_build_status_check "${TIMEOUT}"; then - echo "Build failed, retrying ${attempt}/${MAX_RETRY}..." + echo "Build failed" continue fi diff --git a/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh b/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh index 27850bb505..1d8cd3081a 100644 --- a/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh +++ b/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh @@ -6,7 +6,7 @@ set -exuo pipefail source util.sh -NEED_REBUILD=${NEED_REBUILD:-1} +NEED_REBUILD=${NEED_REBUILD:-0} COMMIT_HASH=${COMMIT_HASH:-$CODEBUILD_RESOLVED_SOURCE_VERSION} LINUX_DOCKER_PATH="tests/ci/docker_images/(dependencies|linux)" diff --git a/tests/ci/cdk/run-cdk.sh b/tests/ci/cdk/run-cdk.sh index 593bd0ecff..b9f5482dfe 100755 --- a/tests/ci/cdk/run-cdk.sh +++ b/tests/ci/cdk/run-cdk.sh @@ -296,8 +296,8 @@ For aws-lc continuous integration setup, this script uses aws cli to build some Options: --help Displays this help - --aws-account AWS account for CDK deploy/destroy. Default to '620771051181'. - --aws-region AWS region for AWS resources creation. Default to 'us-west-2'. + --deploy-account AWS account for CDK deploy/destroy. Default to '620771051181'. + --deploy-region AWS region for AWS resources creation. Default to 'us-west-2'. --github-repo-owner GitHub repository owner. Default to 'aws'. --github-source-version GitHub source version. Default to 'main'. --action Required. The value can be @@ -313,6 +313,7 @@ Options: 'synth': synthesizes and prints the CloudFormation template for the stacks. 'bootstrap': Bootstraps the CDK stack. This is needed before deployment or updating the CI. 'invoke': invoke a custom command. Provide the custom command through '--command ' + --command Custom command to invoke. Required for '--action invoke'. EOF } From 775eeedb45ff2df8d03dc500ec682fb82619ee2c Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Fri, 28 Mar 2025 13:41:06 -0700 Subject: [PATCH 06/10] Add step to auto-sync private repo --- tests/ci/cdk/pipeline/ci_stage.py | 41 ++++++++++++++++++- tests/ci/cdk/pipeline/codebuild_batch_step.py | 3 ++ tests/ci/cdk/pipeline/pipeline_stack.py | 6 ++- tests/ci/cdk/pipeline/scripts/build_target.sh | 4 +- tests/ci/cdk/pipeline/setup_stage.py | 6 +-- 5 files changed, 52 insertions(+), 8 deletions(-) diff --git a/tests/ci/cdk/pipeline/ci_stage.py b/tests/ci/cdk/pipeline/ci_stage.py index 1a8aba875f..18430ec7dd 100644 --- a/tests/ci/cdk/pipeline/ci_stage.py +++ b/tests/ci/cdk/pipeline/ci_stage.py @@ -4,7 +4,7 @@ import re import typing -from aws_cdk import Stage, Environment, Duration, pipelines, aws_iam as iam, Stack +from aws_cdk import Stage, Environment, Duration, Stack, pipelines, aws_iam as iam, aws_codebuild as codebuild from constructs import Construct from cdk.aws_lc_analytics_stack import AwsLcGitHubAnalyticsStack @@ -13,6 +13,7 @@ from cdk.aws_lc_github_ci_stack import AwsLcGitHubCIStack from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack from pipeline.codebuild_batch_step import CodeBuildBatchStep +from util.metadata import PRE_PROD_ACCOUNT class CiStage(Stage): @@ -173,6 +174,37 @@ def add_stage_to_pipeline( ): stack_names = [stack.stack_name for stack in self.stacks] + private_repo_sync_step=None + + if self.stacks[0].account == PRE_PROD_ACCOUNT: + private_repo_sync_step = pipelines.CodeBuildStep( + "PrivateRepoSync", + build_environment=codebuild.BuildEnvironment( + environment_variables={ + "GITHUB_PAT": codebuild.BuildEnvironmentVariable( + type=codebuild.BuildEnvironmentVariableType.SECRETS_MANAGER, + value="aws-lc/ci/github/token", + ), + } + ), + commands=[ + "env", + "curl -H \"Authorization: token ${GITHUB_PAT}\" https://api.github.com/user", + "git clone https://${GITHUB_PAT}@github.com/${STAGING_GITHUB_REPO_OWNER}/${STAGING_GITHUB_REPO_NAME}.git", + "git remote add upstream https://github.com/aws/aws-lc.git", + "git fetch upstream", + "git checkout main", + "git merge upstream/main", + # "git push origin main", + ], + env={ + "STAGING_GITHUB_REPO_OWNER": "aws", + "STAGING_GITHUB_REPO_NAME": "private-aws-lc-staging", + }, + role=role, + timeout=Duration.minutes(60), + ) + env = env or {} prebuild_check_step = pipelines.CodeBuildStep( @@ -220,6 +252,7 @@ def add_stage_to_pipeline( ], role=role, timeout=300, + project_description=f"Pipeline step AwsLcCiPipeline/{self.stage_name}/StartWait", partial_batch_build_spec=batch_build_jobs, env={ **env, @@ -230,7 +263,11 @@ def add_stage_to_pipeline( ci_run_step.add_step_dependency(prebuild_check_step) - pipeline.add_stage(self, post=[prebuild_check_step, ci_run_step]) + pipeline.add_stage( + self, + pre=[private_repo_sync_step] if private_repo_sync_step else None, + post=[prebuild_check_step, ci_run_step] + ) class BatchBuildOptions: diff --git a/tests/ci/cdk/pipeline/codebuild_batch_step.py b/tests/ci/cdk/pipeline/codebuild_batch_step.py index ee73f3af1f..ab8e15211b 100644 --- a/tests/ci/cdk/pipeline/codebuild_batch_step.py +++ b/tests/ci/cdk/pipeline/codebuild_batch_step.py @@ -42,6 +42,7 @@ def __init__( partial_batch_build_spec: typing.Mapping[builtins.str, typing.Any], role: iam.Role, timeout: int = 300, + project_description: str = None, env: typing.Optional[typing.Mapping[str, str]] = None, ): super().__init__(id) @@ -54,6 +55,7 @@ def __init__( self.partial_batch_build_spec = partial_batch_build_spec self.role = role self.timeout = timeout + self.project_description = project_description self.env = ( { key: codebuild.BuildEnvironmentVariable(value=value) @@ -80,6 +82,7 @@ def produce_action( } ), role=self.role, + description=self.project_description, timeout=Duration.minutes(self.timeout), ) diff --git a/tests/ci/cdk/pipeline/pipeline_stack.py b/tests/ci/cdk/pipeline/pipeline_stack.py index c02795d5e8..332e096eb3 100644 --- a/tests/ci/cdk/pipeline/pipeline_stack.py +++ b/tests/ci/cdk/pipeline/pipeline_stack.py @@ -57,7 +57,11 @@ def __init__( iam.PolicyStatement( effect=iam.Effect.ALLOW, resources=["*"], - actions=["codepipeline:GetPipelineExecution"], + actions=[ + "codepipeline:GetPipelineExecution", + "secretsmanager:GetSecretValue", + "kms:Decrypt" + ], ) ) diff --git a/tests/ci/cdk/pipeline/scripts/build_target.sh b/tests/ci/cdk/pipeline/scripts/build_target.sh index 94b2c863fb..bca7d586d0 100644 --- a/tests/ci/cdk/pipeline/scripts/build_target.sh +++ b/tests/ci/cdk/pipeline/scripts/build_target.sh @@ -165,7 +165,7 @@ fi if [[ ${BUILD_TYPE} == "docker" ]]; then if [[ -z "${PLATFORM+x}" || -z "${PLATFORM}" ]]; then - echo "When building Docker images, a platform must be specified" + echo "When building Docker images, a platform must be specified." exit 1 fi @@ -179,7 +179,7 @@ fi if [[ ${BUILD_TYPE} == "ci" ]]; then if [[ -z "${PROJECT+x}" || -z "${PROJECT}" ]]; then - echo "When building CI tests, a project name must be specified" + echo "When building CI tests, a project name must be specified." exit 1 fi diff --git a/tests/ci/cdk/pipeline/setup_stage.py b/tests/ci/cdk/pipeline/setup_stage.py index fc660fd930..b7fb613b88 100644 --- a/tests/ci/cdk/pipeline/setup_stage.py +++ b/tests/ci/cdk/pipeline/setup_stage.py @@ -56,11 +56,11 @@ def __init__( cross_account_role = iam.Role( self, - "CrossAccountCodeBuildRole", - role_name="CrossAccountCodeBuildRole", + "CrossAccountBuildRole", + role_name="CrossAccountBuildRole", assumed_by=iam.ArnPrincipal( f"arn:aws:iam::{pipeline_environment.account}:role/CrossAccountPipelineRole" - ), # TODO: add a conditional to exclude this in dev env + ), ) # Grant access to all CodeBuild projects From 3537e22fc6e11c9654226b4573faa0eedcbc04ef Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Wed, 2 Apr 2025 11:37:52 -0700 Subject: [PATCH 07/10] Fix dev pipeline deployment bug --- tests/ci/cdk/README.md | 4 ++-- tests/ci/cdk/cdk/windows_docker_image_build_stack.py | 2 +- tests/ci/cdk/pipeline/ci_stage.py | 9 +++++---- .../ci/cdk/pipeline/windows_docker_image_build_stage.py | 2 +- tests/ci/cdk/run-cdk.sh | 4 ++-- 5 files changed, 11 insertions(+), 10 deletions(-) diff --git a/tests/ci/cdk/README.md b/tests/ci/cdk/README.md index 85eeb50b03..968f8eb02a 100644 --- a/tests/ci/cdk/README.md +++ b/tests/ci/cdk/README.md @@ -3,7 +3,7 @@ AWS-LC CI uses AWS CDK to define and deploy AWS resources (e.g. AWS CodeBuild, ECR). ## Table of Contents -- [CI Setup](#ci-setup) +- [CDK Setup](#cdk-setup) - [Before running CDK command](#before-running-cdk-command) - [Minimal permissions](#minimal-permissions) - [Pipeline Commands](#pipeline-commands) @@ -104,7 +104,7 @@ To deploy dev pipeline to the same account as your CI: To deploy dev pipeline but pipeline is hosted in a separate account: ``` -./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --github-source-version ${GITHUB_SOURCE_VERSION} --pipeline-acount ${PIPELINE_ACCOUNT_ID} --deploy-account ${DEPLOY_ACCOUNT_ID} --action deploy-dev-pipeline +./run-cdk.sh --github-repo-owner ${GITHUB_REPO_OWNER} --github-source-version ${GITHUB_SOURCE_VERSION} --pipeline-account ${PIPELINE_ACCOUNT_ID} --deploy-account ${DEPLOY_ACCOUNT_ID} --action deploy-dev-pipeline ``` To deploy production pipeline using default parameters: diff --git a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py index ef6b48372b..d12fe7cd42 100644 --- a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py +++ b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py @@ -136,5 +136,5 @@ def __init__( Tags.of(instance).add(WIN_EC2_TAG_KEY, WIN_EC2_TAG_VALUE) self.output = { - "s3_bucket_name": bucket.bucket_name, + "s3_bucket_name": bucket._generate_physical_name(), } diff --git a/tests/ci/cdk/pipeline/ci_stage.py b/tests/ci/cdk/pipeline/ci_stage.py index 18430ec7dd..a232a7c0fd 100644 --- a/tests/ci/cdk/pipeline/ci_stage.py +++ b/tests/ci/cdk/pipeline/ci_stage.py @@ -13,7 +13,8 @@ from cdk.aws_lc_github_ci_stack import AwsLcGitHubCIStack from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack from pipeline.codebuild_batch_step import CodeBuildBatchStep -from util.metadata import PRE_PROD_ACCOUNT +from util.metadata import PRE_PROD_ACCOUNT, GITHUB_TOKEN_SECRET_NAME, STAGING_GITHUB_REPO_OWNER, \ + STAGING_GITHUB_REPO_NAME class CiStage(Stage): @@ -183,7 +184,7 @@ def add_stage_to_pipeline( environment_variables={ "GITHUB_PAT": codebuild.BuildEnvironmentVariable( type=codebuild.BuildEnvironmentVariableType.SECRETS_MANAGER, - value="aws-lc/ci/github/token", + value=GITHUB_TOKEN_SECRET_NAME, ), } ), @@ -198,8 +199,8 @@ def add_stage_to_pipeline( # "git push origin main", ], env={ - "STAGING_GITHUB_REPO_OWNER": "aws", - "STAGING_GITHUB_REPO_NAME": "private-aws-lc-staging", + "STAGING_GITHUB_REPO_OWNER": STAGING_GITHUB_REPO_OWNER, + "STAGING_GITHUB_REPO_NAME": STAGING_GITHUB_REPO_NAME, }, role=role, timeout=Duration.minutes(60), diff --git a/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py b/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py index 02036a9f19..026735b117 100644 --- a/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py +++ b/tests/ci/cdk/pipeline/windows_docker_image_build_stage.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC import typing -from aws_cdk import Stage, Environment, Stack, Duration, aws_iam as iam, pipelines, Fn +from aws_cdk import Stage, Environment, Stack, Duration, aws_iam as iam, pipelines from aws_cdk.pipelines import CodeBuildStep from constructs import Construct diff --git a/tests/ci/cdk/run-cdk.sh b/tests/ci/cdk/run-cdk.sh index b9f5482dfe..8976ae5c0a 100755 --- a/tests/ci/cdk/run-cdk.sh +++ b/tests/ci/cdk/run-cdk.sh @@ -230,7 +230,7 @@ function deploy_production_pipeline() { } function deploy_dev_pipeline() { - if [[ -z "${DEPLOY_ACCOUNT:+x}" || -z "${PIPELINE_ACCOUNT}" ]]; then + if [[ -z "${DEPLOY_ACCOUNT:+x}" || -z "${DEPLOY_ACCOUNT}" ]]; then echo "The pipeline needs a deployment acount to know where to deploy the CI to." exit 1 fi @@ -241,7 +241,7 @@ function deploy_dev_pipeline() { fi if [[ -z "${PIPELINE_ACCOUNT+x}" || -z "${PIPELINE_ACCOUNT}" ]]; then - export PIPELINE_ACCOUNT=DEPLOY_ACCOUNT + export PIPELINE_ACCOUNT=${DEPLOY_ACCOUNT} fi if [[ ${PIPELINE_ACCOUNT+x} == '774305600158' ]]; then From 0a19c45e83b5b7b663f1eaa206e9d3a7fbabef61 Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Wed, 2 Apr 2025 11:51:34 -0700 Subject: [PATCH 08/10] More cleanup --- tests/ci/cdk/cdk/aws_lc_android_ci_stack.py | 82 ++++++------------- .../ci/cdk/cdk/aws_lc_github_ci_x509_stack.py | 1 - 2 files changed, 24 insertions(+), 59 deletions(-) diff --git a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py index c191894ee6..a548a26eed 100644 --- a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py @@ -2,28 +2,13 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC import typing -from aws_cdk import ( - Duration, - Environment, - Stack, - aws_codebuild as codebuild, - aws_iam as iam, -) +from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, Environment from constructs import Construct from cdk.components import PruneStaleGitHubBuilds -from util.iam_policies import ( - code_build_batch_policy_in_json, - device_farm_access_policy_in_json, -) -from util.metadata import ( - GITHUB_REPO_OWNER, - GITHUB_REPO_NAME, - GITHUB_PUSH_CI_BRANCH_TARGETS, - PRE_PROD_ACCOUNT, - STAGING_GITHUB_REPO_OWNER, - STAGING_GITHUB_REPO_NAME, -) +from util.iam_policies import code_build_batch_policy_in_json, device_farm_access_policy_in_json +from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME, GITHUB_PUSH_CI_BRANCH_TARGETS, PRE_PROD_ACCOUNT, \ + STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME from util.build_spec_loader import BuildSpecLoader @@ -33,14 +18,12 @@ class AwsLcAndroidCIStack(Stack): # The Device Farm resource used to in this CI spec, must be manually created. # TODO: Automate Device Farm creation with cdk script. - def __init__( - self, - scope: Construct, - id: str, - spec_file_path: str, - env: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs - ) -> None: + def __init__(self, + scope: Construct, + id: str, + spec_file_path: str, + env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], + **kwargs) -> None: super().__init__(scope, id, env=env, **kwargs) github_repo_owner = GITHUB_REPO_OWNER @@ -59,14 +42,11 @@ def __init__( codebuild.FilterGroup.in_event_of( codebuild.EventAction.PULL_REQUEST_CREATED, codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED, - ), - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PUSH - ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), + codebuild.EventAction.PULL_REQUEST_REOPENED), + codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH).and_branch_is( + GITHUB_PUSH_CI_BRANCH_TARGETS), ], - webhook_triggers_batch_build=True, - ) + webhook_triggers_batch_build=True) # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( @@ -75,16 +55,11 @@ def __init__( device_farm_policy = iam.PolicyDocument.from_json( device_farm_access_policy_in_json(env) ) - inline_policies = { - "code_build_batch_policy": code_build_batch_policy, - "device_farm_policy": device_farm_policy, - } - role = iam.Role( - scope=self, - id="{}-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=inline_policies, - ) + inline_policies = {"code_build_batch_policy": code_build_batch_policy, "device_farm_policy": device_farm_policy} + role = iam.Role(scope=self, + id="{}-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies=inline_policies) # Define CodeBuild. project = codebuild.Project( @@ -94,19 +69,10 @@ def __init__( source=git_hub_source, role=role, timeout=Duration.minutes(180), - environment=codebuild.BuildEnvironment( - compute_type=codebuild.ComputeType.SMALL, - privileged=False, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0, - ), - build_spec=BuildSpecLoader.load(spec_file_path, env), - ) + environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.SMALL, + privileged=False, + build_image=codebuild.LinuxBuildImage.STANDARD_4_0), + build_spec=BuildSpecLoader.load(spec_file_path, env)) project.enable_batch_builds() - PruneStaleGitHubBuilds( - scope=self, - id="PruneStaleGitHubBuilds", - project=project, - ec2_permissions=False, - env=env, - ) + PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=False, env=env) diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py index 425ea797be..a0b8d740db 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py @@ -18,7 +18,6 @@ STAGING_GITHUB_REPO_NAME, ) - class AwsLcGitHubX509CIStack(Stack): def __init__( self, From 1d84eea800b2dc00ae6db61109c27e11d44b21bc Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Thu, 17 Apr 2025 09:17:16 -0700 Subject: [PATCH 09/10] Incorporate comments --- tests/ci/cdk/app.py | 39 +--- tests/ci/cdk/cdk/aws_lc_analytics_stack.py | 23 +- tests/ci/cdk/cdk/aws_lc_android_ci_stack.py | 102 +++++---- tests/ci/cdk/cdk/aws_lc_base_ci_stack.py | 58 ++++++ .../cdk/aws_lc_ec2_test_framework_ci_stack.py | 49 ++--- tests/ci/cdk/cdk/aws_lc_github_ci_stack.py | 34 +-- .../ci/cdk/cdk/aws_lc_github_ci_x509_stack.py | 34 +-- .../ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py | 34 +-- tests/ci/cdk/cdk/bm_framework_stack.py | 114 ---------- .../codebuild/ec2_test_framework_omnibus.yaml | 8 +- tests/ci/cdk/cdk/ecr_stack.py | 6 + .../cdk/windows_docker_image_build_stack.py | 7 +- tests/ci/cdk/pipeline/ci_stage.py | 197 +++--------------- tests/ci/cdk/pipeline/ci_util.py | 99 +++++++++ tests/ci/cdk/pipeline/codebuild_batch_step.py | 2 - tests/ci/cdk/pipeline/deploy_util.py | 7 - .../linux_docker_image_build_stage.py | 1 - tests/ci/cdk/pipeline/pipeline_stack.py | 19 +- tests/ci/cdk/pipeline/scripts/build_target.sh | 83 ++++---- .../scripts/check_trigger_conditions.sh | 10 +- .../scripts/cleanup_orphaned_images.sh | 4 +- .../cdk/pipeline/scripts/finalize_images.sh | 4 +- tests/ci/cdk/pipeline/scripts/util.sh | 4 +- tests/ci/cdk/run-cdk.sh | 19 +- tests/ci/cdk/util/metadata.py | 4 + tests/ci/docker_images/linux-aarch/common.sh | 6 +- tests/ci/docker_images/linux-x86/common.sh | 6 +- .../ci/docker_images/windows/push_images.ps1 | 10 +- 28 files changed, 388 insertions(+), 595 deletions(-) create mode 100644 tests/ci/cdk/cdk/aws_lc_base_ci_stack.py delete mode 100644 tests/ci/cdk/cdk/bm_framework_stack.py create mode 100644 tests/ci/cdk/pipeline/ci_util.py delete mode 100644 tests/ci/cdk/pipeline/deploy_util.py diff --git a/tests/ci/cdk/app.py b/tests/ci/cdk/app.py index efd6f52997..6f198ceb9c 100644 --- a/tests/ci/cdk/app.py +++ b/tests/ci/cdk/app.py @@ -5,16 +5,10 @@ from aws_cdk import Environment, App -# from cdk.bm_framework_stack import BmFrameworkStack -from cdk.aws_lc_analytics_stack import AwsLcGitHubAnalyticsStack -from cdk.aws_lc_android_ci_stack import AwsLcAndroidCIStack -from cdk.aws_lc_github_ci_stack import AwsLcGitHubCIStack -from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack -from cdk.aws_lc_ec2_test_framework_ci_stack import AwsLcEC2TestingCIStack from cdk.linux_docker_image_batch_build_stack import LinuxDockerImageBatchBuildStack +from pipeline.ci_util import add_ci_stacks from pipeline.pipeline_stack import AwsLcCiPipeline from cdk.windows_docker_image_build_stack import WindowsDockerImageBuildStack -from cdk.aws_lc_github_ci_x509_stack import AwsLcGitHubX509CIStack from cdk.ecr_stack import EcrStack from util.metadata import ( LINUX_X86_ECR_REPO, @@ -35,7 +29,7 @@ env=Environment(account=PIPELINE_ACCOUNT, region=PIPELINE_REGION), ) -if DEPLOY_ACCOUNT is not None and DEPLOY_REGION is not None: +if DEPLOY_ACCOUNT and DEPLOY_REGION: # Initialize env. env = Environment(account=DEPLOY_ACCOUNT, region=DEPLOY_REGION) @@ -52,33 +46,6 @@ # Windows Docker images are created by running commands in Windows EC2 instance. WindowsDockerImageBuildStack(app, "aws-lc-docker-image-build-windows", env=env) - # Define CodeBuild Batch job for testing code. - x86_build_spec_file = "cdk/codebuild/github_ci_linux_x86_omnibus.yaml" - AwsLcGitHubCIStack(app, "aws-lc-ci-linux-x86", x86_build_spec_file, env=env) - arm_build_spec_file = "cdk/codebuild/github_ci_linux_arm_omnibus.yaml" - AwsLcGitHubCIStack(app, "aws-lc-ci-linux-arm", arm_build_spec_file, env=env) - integration_build_spec_file = "cdk/codebuild/github_ci_integration_omnibus.yaml" - AwsLcGitHubCIStack( - app, "aws-lc-ci-integration", integration_build_spec_file, env=env - ) - win_x86_build_spec_file = "cdk/codebuild/github_ci_windows_x86_omnibus.yaml" - AwsLcGitHubCIStack(app, "aws-lc-ci-windows-x86", win_x86_build_spec_file, env=env) - fuzz_build_spec_file = "cdk/codebuild/github_ci_fuzzing_omnibus.yaml" - AwsLcGitHubFuzzCIStack(app, "aws-lc-ci-fuzzing", fuzz_build_spec_file, env=env) - analytics_build_spec_file = "cdk/codebuild/github_ci_analytics_omnibus.yaml" - AwsLcGitHubAnalyticsStack( - app, "aws-lc-ci-analytics", analytics_build_spec_file, env=env - ) - # bm_framework_build_spec_file = "cdk/codebuild/bm_framework_omnibus.yaml" - # BmFrameworkStack(app, "aws-lc-ci-bm-framework", bm_framework_build_spec_file, env=env) - ec2_test_framework_build_spec_file = "cdk/codebuild/ec2_test_framework_omnibus.yaml" - AwsLcEC2TestingCIStack( - app, "aws-lc-ci-ec2-test-framework", ec2_test_framework_build_spec_file, env=env - ) - android_build_spec_file = "cdk/codebuild/github_ci_android_omnibus.yaml" - AwsLcAndroidCIStack( - app, "aws-lc-ci-devicefarm-android", android_build_spec_file, env=env - ) - AwsLcGitHubX509CIStack(app, "aws-lc-ci-x509", env=env) + add_ci_stacks(app, env=env) app.synth() diff --git a/tests/ci/cdk/cdk/aws_lc_analytics_stack.py b/tests/ci/cdk/cdk/aws_lc_analytics_stack.py index 4691835fd5..3340197ebb 100644 --- a/tests/ci/cdk/cdk/aws_lc_analytics_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_analytics_stack.py @@ -13,6 +13,7 @@ ) from constructs import Construct +from cdk.aws_lc_base_ci_stack import AwsLcBaseCiStack from cdk.components import PruneStaleGitHubBuilds from util.iam_policies import code_build_publish_metrics_in_json from util.metadata import ( @@ -25,7 +26,7 @@ from util.build_spec_loader import BuildSpecLoader -class AwsLcGitHubAnalyticsStack(Stack): +class AwsLcGitHubAnalyticsStack(AwsLcBaseCiStack): """Define a stack used to batch execute AWS-LC tests in GitHub.""" def __init__( @@ -36,20 +37,12 @@ def __init__( env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs ) -> None: - super().__init__(scope, id, env=env, **kwargs) + super().__init__(scope, id, env=env, timeout=120, **kwargs) - # Define CodeBuild resource. - github_repo_owner = GITHUB_REPO_OWNER - github_repo_name = GITHUB_REPO_NAME - - if env.account == PRE_PROD_ACCOUNT: - github_repo_owner = STAGING_GITHUB_REPO_OWNER - github_repo_name = STAGING_GITHUB_REPO_NAME - - # Define CodeBuild resource. - git_hub_source = codebuild.Source.git_hub( - owner=github_repo_owner, - repo=github_repo_name, + # Override default CodeBuild resource. + self.git_hub_source = codebuild.Source.git_hub( + owner=self.github_repo_owner, + repo=self.github_repo_name, webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH) @@ -77,7 +70,7 @@ def __init__( scope=self, id="AnalyticsCodeBuild", project_name=id, - source=git_hub_source, + source=self.git_hub_source, role=role, timeout=Duration.minutes(120), environment=codebuild.BuildEnvironment( diff --git a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py index a548a26eed..9db467bfbd 100644 --- a/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_android_ci_stack.py @@ -2,51 +2,47 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC import typing -from aws_cdk import Duration, Stack, aws_codebuild as codebuild, aws_iam as iam, Environment +from aws_cdk import ( + Duration, + Stack, + aws_codebuild as codebuild, + aws_iam as iam, + Environment, +) from constructs import Construct +from cdk.aws_lc_base_ci_stack import AwsLcBaseCiStack from cdk.components import PruneStaleGitHubBuilds -from util.iam_policies import code_build_batch_policy_in_json, device_farm_access_policy_in_json -from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME, GITHUB_PUSH_CI_BRANCH_TARGETS, PRE_PROD_ACCOUNT, \ - STAGING_GITHUB_REPO_OWNER, STAGING_GITHUB_REPO_NAME +from util.iam_policies import ( + code_build_batch_policy_in_json, + device_farm_access_policy_in_json, +) +from util.metadata import ( + GITHUB_REPO_OWNER, + GITHUB_REPO_NAME, + GITHUB_PUSH_CI_BRANCH_TARGETS, + PRE_PROD_ACCOUNT, + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME, +) from util.build_spec_loader import BuildSpecLoader -class AwsLcAndroidCIStack(Stack): +class AwsLcAndroidCIStack(AwsLcBaseCiStack): """Define a stack used to batch execute AWS-LC tests in GitHub.""" # The Device Farm resource used to in this CI spec, must be manually created. # TODO: Automate Device Farm creation with cdk script. - def __init__(self, - scope: Construct, - id: str, - spec_file_path: str, - env: typing.Optional[typing.Union[Environment, typing.Dict[str, typing.Any]]], - **kwargs) -> None: - super().__init__(scope, id, env=env, **kwargs) - - github_repo_owner = GITHUB_REPO_OWNER - github_repo_name = GITHUB_REPO_NAME - - if env.account == PRE_PROD_ACCOUNT: - github_repo_owner = STAGING_GITHUB_REPO_OWNER - github_repo_name = STAGING_GITHUB_REPO_NAME - - # Define CodeBuild resource. - git_hub_source = codebuild.Source.git_hub( - owner=github_repo_owner, - repo=github_repo_name, - webhook=True, - webhook_filters=[ - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PULL_REQUEST_CREATED, - codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED), - codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH).and_branch_is( - GITHUB_PUSH_CI_BRANCH_TARGETS), - ], - webhook_triggers_batch_build=True) + def __init__( + self, + scope: Construct, + id: str, + spec_file_path: str, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + **kwargs + ) -> None: + super().__init__(scope, id, env=env, timeout=180, **kwargs) # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( @@ -55,24 +51,38 @@ def __init__(self, device_farm_policy = iam.PolicyDocument.from_json( device_farm_access_policy_in_json(env) ) - inline_policies = {"code_build_batch_policy": code_build_batch_policy, "device_farm_policy": device_farm_policy} - role = iam.Role(scope=self, - id="{}-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=inline_policies) + inline_policies = { + "code_build_batch_policy": code_build_batch_policy, + "device_farm_policy": device_farm_policy, + } + role = iam.Role( + scope=self, + id="{}-role".format(id), + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + inline_policies=inline_policies, + ) # Define CodeBuild. project = codebuild.Project( scope=self, id=id, project_name=id, - source=git_hub_source, + source=self.git_hub_source, role=role, - timeout=Duration.minutes(180), - environment=codebuild.BuildEnvironment(compute_type=codebuild.ComputeType.SMALL, - privileged=False, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0), - build_spec=BuildSpecLoader.load(spec_file_path, env)) + timeout=Duration.minutes(self.timeout), + environment=codebuild.BuildEnvironment( + compute_type=codebuild.ComputeType.SMALL, + privileged=False, + build_image=codebuild.LinuxBuildImage.STANDARD_4_0, + ), + build_spec=BuildSpecLoader.load(spec_file_path, env), + ) project.enable_batch_builds() - PruneStaleGitHubBuilds(scope=self, id="PruneStaleGitHubBuilds", project=project, ec2_permissions=False, env=env) + PruneStaleGitHubBuilds( + scope=self, + id="PruneStaleGitHubBuilds", + project=project, + ec2_permissions=False, + env=env, + ) diff --git a/tests/ci/cdk/cdk/aws_lc_base_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_base_ci_stack.py new file mode 100644 index 0000000000..15d9564edd --- /dev/null +++ b/tests/ci/cdk/cdk/aws_lc_base_ci_stack.py @@ -0,0 +1,58 @@ +import typing + +from aws_cdk import aws_codebuild as codebuild, Environment, Stack +from constructs import Construct + +from cdk.components import PruneStaleGitHubBuilds +from util.metadata import ( + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME, + PRE_PROD_ACCOUNT, + GITHUB_REPO_OWNER, + GITHUB_REPO_NAME, + GITHUB_PUSH_CI_BRANCH_TARGETS, +) + + +class AwsLcBaseCiStack(Stack): + def __init__( + self, + scope: Construct, + id: str, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], + ignore_failure: typing.Optional[bool] = False, + timeout: typing.Optional[int] = 60, + **kwargs + ) -> None: + super().__init__(scope, id, env=env, **kwargs) + self.ignore_failure = ignore_failure + self.timeout = timeout + self.env = env + + self.github_repo_owner = ( + STAGING_GITHUB_REPO_OWNER + if (env.account == PRE_PROD_ACCOUNT) + else GITHUB_REPO_OWNER + ) + self.github_repo_name = ( + STAGING_GITHUB_REPO_NAME + if (env.account == PRE_PROD_ACCOUNT) + else GITHUB_REPO_NAME + ) + + self.git_hub_source = codebuild.Source.git_hub( + owner=self.github_repo_owner, + repo=self.github_repo_name, + webhook=True, + webhook_filters=[ + codebuild.FilterGroup.in_event_of( + codebuild.EventAction.PULL_REQUEST_CREATED, + codebuild.EventAction.PULL_REQUEST_UPDATED, + codebuild.EventAction.PULL_REQUEST_REOPENED, + ), + codebuild.FilterGroup.in_event_of( + codebuild.EventAction.PUSH + ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), + ], + webhook_triggers_batch_build=True, + ) diff --git a/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py index 5cb35275e3..c0b61869c7 100644 --- a/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_ec2_test_framework_ci_stack.py @@ -21,6 +21,7 @@ ) from constructs import Construct +from cdk.aws_lc_base_ci_stack import AwsLcBaseCiStack from cdk.components import PruneStaleGitHubBuilds from util.metadata import ( GITHUB_PUSH_CI_BRANCH_TARGETS, @@ -44,7 +45,7 @@ # detailed documentation can be found here: https://docs.aws.amazon.com/cdk/api/latest/docs/aws-ec2-readme.html -class AwsLcEC2TestingCIStack(Stack): +class AwsLcEC2TestingCIStack(AwsLcBaseCiStack): """Define a stack used to create a CodeBuild instance on which to execute the AWS-LC m1 ci ec2 instance""" def __init__( @@ -55,32 +56,7 @@ def __init__( env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs ) -> None: - super().__init__(scope, id, env=env, **kwargs) - - github_repo_owner = GITHUB_REPO_OWNER - github_repo_name = GITHUB_REPO_NAME - - if env.account == PRE_PROD_ACCOUNT: - github_repo_owner = STAGING_GITHUB_REPO_OWNER - github_repo_name = STAGING_GITHUB_REPO_NAME - - # Define CodeBuild resource. - git_hub_source = codebuild.Source.git_hub( - owner=github_repo_owner, - repo=github_repo_name, - webhook=True, - webhook_filters=[ - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PULL_REQUEST_CREATED, - codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED, - ), - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PUSH - ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), - ], - webhook_triggers_batch_build=True, - ) + super().__init__(scope, id, env=env, timeout=120, **kwargs) # S3 bucket for testing internal fixes. s3_read_write_policy = iam.PolicyDocument.from_json( @@ -124,10 +100,13 @@ def __init__( ) # create security group with default rules - # security_group = ec2.SecurityGroup(self, id="{}-ec2-sg".format(id), - # allow_all_outbound=True, - # vpc=vpc, - # security_group_name='codebuild_ec2_sg') + security_group = ec2.SecurityGroup( + self, + id="{}-ec2-sg".format(id), + allow_all_outbound=True, + vpc=vpc, + security_group_name="codebuild_ec2_sg", + ) # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( @@ -136,7 +115,7 @@ def __init__( ec2_policy = iam.PolicyDocument.from_json( ec2_policies_in_json( ec2_role.role_name, - vpc.vpc_default_security_group, + security_group.security_group_id, selected_subnets.subnets[0].subnet_id, vpc.vpc_id, env, @@ -165,9 +144,9 @@ def __init__( scope=self, id=id, project_name=id, - source=git_hub_source, + source=self.git_hub_source, role=codebuild_role, - timeout=Duration.minutes(120), + timeout=Duration.minutes(self.timeout), environment=codebuild.BuildEnvironment( compute_type=codebuild.ComputeType.SMALL, privileged=False, @@ -176,7 +155,7 @@ def __init__( build_spec=BuildSpecLoader.load(spec_file_path, env), environment_variables={ "EC2_SECURITY_GROUP_ID": codebuild.BuildEnvironmentVariable( - value=vpc.vpc_default_security_group + value=security_group.security_group_id ), "EC2_SUBNET_ID": codebuild.BuildEnvironmentVariable( value=selected_subnets.subnets[0].subnet_id diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py index db47afff46..474b84a89b 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_stack.py @@ -13,6 +13,7 @@ ) from constructs import Construct +from cdk.aws_lc_base_ci_stack import AwsLcBaseCiStack from cdk.components import PruneStaleGitHubBuilds from util.iam_policies import ( code_build_batch_policy_in_json, @@ -30,7 +31,7 @@ from util.build_spec_loader import BuildSpecLoader -class AwsLcGitHubCIStack(Stack): +class AwsLcGitHubCIStack(AwsLcBaseCiStack): """Define a stack used to batch execute AWS-LC tests in GitHub.""" def __init__( @@ -41,32 +42,7 @@ def __init__( env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs ) -> None: - super().__init__(scope, id, env=env, **kwargs) - - github_repo_owner = GITHUB_REPO_OWNER - github_repo_name = GITHUB_REPO_NAME - - if env.account == PRE_PROD_ACCOUNT: - github_repo_owner = STAGING_GITHUB_REPO_OWNER - github_repo_name = STAGING_GITHUB_REPO_NAME - - # Define CodeBuild resource. - git_hub_source = codebuild.Source.git_hub( - owner=github_repo_owner, - repo=github_repo_name, - webhook=True, - webhook_filters=[ - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PULL_REQUEST_CREATED, - codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED, - ), - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PUSH - ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), - ], - webhook_triggers_batch_build=True, - ) + super().__init__(scope, id, env=env, timeout=180, **kwargs) # Define a IAM role for accessing build resources log_group = logs.LogGroup(self, id="{}-public-logs".format(id)) @@ -110,9 +86,9 @@ def __init__( scope=self, id=id, project_name=id, - source=git_hub_source, + source=self.git_hub_source, role=role, - timeout=Duration.minutes(180), + timeout=Duration.minutes(self.timeout), logging=logging_options, environment=codebuild.BuildEnvironment( compute_type=codebuild.ComputeType.SMALL, diff --git a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py index a0b8d740db..11a59d3d6e 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_ci_x509_stack.py @@ -8,6 +8,8 @@ Environment, ) from constructs import Construct + +from cdk.aws_lc_base_ci_stack import AwsLcBaseCiStack from util.build_spec_loader import BuildSpecLoader from util.metadata import ( GITHUB_PUSH_CI_BRANCH_TARGETS, @@ -18,7 +20,8 @@ STAGING_GITHUB_REPO_NAME, ) -class AwsLcGitHubX509CIStack(Stack): + +class AwsLcGitHubX509CIStack(AwsLcBaseCiStack): def __init__( self, scope: Construct, @@ -28,31 +31,6 @@ def __init__( ) -> None: super().__init__(scope, id, env=env, **kwargs) - github_repo_owner = GITHUB_REPO_OWNER - github_repo_name = GITHUB_REPO_NAME - - if env.account == PRE_PROD_ACCOUNT: - github_repo_owner = STAGING_GITHUB_REPO_OWNER - github_repo_name = STAGING_GITHUB_REPO_NAME - - # Define CodeBuild resource. - git_hub_source = codebuild.Source.git_hub( - owner=github_repo_owner, - repo=github_repo_name, - webhook=True, - webhook_filters=[ - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PULL_REQUEST_CREATED, - codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED, - ), - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PUSH - ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), - ], - webhook_triggers_batch_build=True, - ) - self.reports_bucket = s3.Bucket( self, "aws-lc-x509-reports", @@ -91,11 +69,11 @@ def __init__( noncurrent_version_expiration=Duration.days(1), ) - self.codebuild_project = codebuild.Project( + self.project = codebuild.Project( self, id, project_name=id, - source=git_hub_source, + source=self.git_hub_source, build_spec=BuildSpecLoader.load( "cdk/codebuild/github_ci_x509_omnibus.yaml", env ), diff --git a/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py b/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py index 08d471d043..7587f4404d 100644 --- a/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py +++ b/tests/ci/cdk/cdk/aws_lc_github_fuzz_ci_stack.py @@ -14,6 +14,7 @@ ) from constructs import Construct +from cdk.aws_lc_base_ci_stack import AwsLcBaseCiStack from cdk.components import PruneStaleGitHubBuilds from util.iam_policies import ( code_build_batch_policy_in_json, @@ -30,7 +31,7 @@ from util.build_spec_loader import BuildSpecLoader -class AwsLcGitHubFuzzCIStack(Stack): +class AwsLcGitHubFuzzCIStack(AwsLcBaseCiStack): """Define a stack used to batch execute AWS-LC tests in GitHub.""" def __init__( @@ -41,32 +42,7 @@ def __init__( env: typing.Union[Environment, typing.Dict[str, typing.Any]], **kwargs ) -> None: - super().__init__(scope, id, env=env, **kwargs) - - github_repo_owner = GITHUB_REPO_OWNER - github_repo_name = GITHUB_REPO_NAME - - if env.account == PRE_PROD_ACCOUNT: - github_repo_owner = STAGING_GITHUB_REPO_OWNER - github_repo_name = STAGING_GITHUB_REPO_NAME - - # Define CodeBuild resource. - git_hub_source = codebuild.Source.git_hub( - owner=github_repo_owner, - repo=github_repo_name, - webhook=True, - webhook_filters=[ - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PULL_REQUEST_CREATED, - codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED, - ), - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PUSH - ).and_branch_is(GITHUB_PUSH_CI_BRANCH_TARGETS), - ], - webhook_triggers_batch_build=True, - ) + super().__init__(scope, id, env=env, timeout=120, **kwargs) # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( @@ -143,9 +119,9 @@ def __init__( scope=self, id="FuzzingCodeBuild", project_name=id, - source=git_hub_source, + source=self.git_hub_source, role=role, - timeout=Duration.minutes(120), + timeout=Duration.minutes(self.timeout), environment=codebuild.BuildEnvironment( compute_type=codebuild.ComputeType.LARGE, privileged=True, diff --git a/tests/ci/cdk/cdk/bm_framework_stack.py b/tests/ci/cdk/cdk/bm_framework_stack.py deleted file mode 100644 index a83c35de9e..0000000000 --- a/tests/ci/cdk/cdk/bm_framework_stack.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 OR ISC - -import subprocess -import boto3 - -from botocore.exceptions import ClientError -from aws_cdk import ( - Duration, - Stack, - aws_ec2 as ec2, - aws_codebuild as codebuild, - aws_iam as iam, - aws_logs as logs, -) -from constructs import Construct - -from cdk.components import PruneStaleGitHubBuilds -from util.metadata import GITHUB_REPO_OWNER, GITHUB_REPO_NAME -from util.iam_policies import ( - code_build_batch_policy_in_json, - ec2_bm_framework_policies_in_json, - ssm_bm_framework_policies_in_json, - ecr_power_user_policy_in_json, -) -from util.build_spec_loader import BuildSpecLoader - -# detailed documentation can be found here: https://docs.aws.amazon.com/cdk/api/latest/docs/aws-ec2-readme.html - - -class BmFrameworkStack(Stack): - """Define a stack used to create a CodeBuild instance on which to execute the AWS-LC benchmarking framework""" - - def __init__( - self, scope: Construct, id: str, spec_file_path: str, **kwargs - ) -> None: - super().__init__(scope, id, **kwargs) - - # Define some variables that will be commonly used - CLOUDWATCH_LOGS = "{}-{}-cw-logs".format(AWS_ACCOUNT, id) - - # Define CodeBuild resource. - git_hub_source = codebuild.Source.git_hub( - owner=GITHUB_REPO_OWNER, - repo=GITHUB_REPO_NAME, - webhook=True, - webhook_filters=[ - codebuild.FilterGroup.in_event_of( - codebuild.EventAction.PULL_REQUEST_CREATED, - codebuild.EventAction.PULL_REQUEST_UPDATED, - codebuild.EventAction.PULL_REQUEST_REOPENED, - ) - ], - webhook_triggers_batch_build=True, - ) - - # Define a IAM role for this stack. - code_build_batch_policy = iam.PolicyDocument.from_json( - code_build_batch_policy_in_json([id]) - ) - ec2_bm_framework_policy = iam.PolicyDocument.from_json( - ec2_bm_framework_policies_in_json() - ) - ssm_bm_framework_policy = iam.PolicyDocument.from_json( - ssm_bm_framework_policies_in_json() - ) - codebuild_inline_policies = { - "code_build_batch_policy": code_build_batch_policy, - "ec2_bm_framework_policy": ec2_bm_framework_policy, - "ssm_bm_framework_policy": ssm_bm_framework_policy, - } - codebuild_role = iam.Role( - scope=self, - id="{}-codebuild-role".format(id), - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - inline_policies=codebuild_inline_policies, - managed_policies=[ - iam.ManagedPolicy.from_aws_managed_policy_name( - "CloudWatchAgentServerPolicy" - ) - ], - ) - - # Define CodeBuild. - project = codebuild.Project( - scope=self, - id=id, - project_name=id, - source=git_hub_source, - role=codebuild_role, - timeout=Duration.minutes(120), - environment=codebuild.BuildEnvironment( - compute_type=codebuild.ComputeType.SMALL, - privileged=False, - build_image=codebuild.LinuxBuildImage.STANDARD_4_0, - ), - build_spec=BuildSpecLoader.load(spec_file_path), - ) - project.enable_batch_builds() - - PruneStaleGitHubBuilds( - scope=self, - id="PruneStaleGitHubBuilds", - project=project, - ec2_permissions=False, - ) - - # use boto3 to determine if a cloudwatch logs group with the name we want exists, and if it doesn't, create it - logs_client = boto3.client("logs", region_name=AWS_REGION) - try: - logs_client.describe_log_groups(logGroupNamePrefix=CLOUDWATCH_LOGS) - except ClientError: - # define CloudWatch Logs groups - logs.LogGroup(self, "{}-cw-logs".format(id), log_group_name=CLOUDWATCH_LOGS) diff --git a/tests/ci/cdk/cdk/codebuild/ec2_test_framework_omnibus.yaml b/tests/ci/cdk/cdk/codebuild/ec2_test_framework_omnibus.yaml index 355c2742e0..69b8ea8292 100644 --- a/tests/ci/cdk/cdk/codebuild/ec2_test_framework_omnibus.yaml +++ b/tests/ci/cdk/cdk/codebuild/ec2_test_framework_omnibus.yaml @@ -13,7 +13,7 @@ batch: type: LINUX_CONTAINER privileged-mode: false compute-type: BUILD_GENERAL1_SMALL - image: 183295444613.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest + image: 620771051181.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest variables: EC2_AMI: "ami-0c29a2c5cf69b5a9c" EC2_INSTANCE_TYPE: "c6g.2xlarge" @@ -26,7 +26,7 @@ batch: type: LINUX_CONTAINER privileged-mode: false compute-type: BUILD_GENERAL1_SMALL - image: 183295444613.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest + image: 620771051181.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest variables: EC2_AMI: "ami-0c29a2c5cf69b5a9c" EC2_INSTANCE_TYPE: "c6g.4xlarge" @@ -40,7 +40,7 @@ batch: type: LINUX_CONTAINER privileged-mode: false compute-type: BUILD_GENERAL1_SMALL - image: 183295444613.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest + image: 620771051181.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest variables: EC2_AMI: "ami-0c29a2c5cf69b5a9c" EC2_INSTANCE_TYPE: "r8g.2xlarge" @@ -53,7 +53,7 @@ batch: type: LINUX_CONTAINER privileged-mode: false compute-type: BUILD_GENERAL1_SMALL - image: 183295444613.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest + image: 620771051181.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest variables: EC2_AMI: "ami-0c29a2c5cf69b5a9c" EC2_INSTANCE_TYPE: "r8g.2xlarge" diff --git a/tests/ci/cdk/cdk/ecr_stack.py b/tests/ci/cdk/cdk/ecr_stack.py index 2db88d919f..61cedad148 100644 --- a/tests/ci/cdk/cdk/ecr_stack.py +++ b/tests/ci/cdk/cdk/ecr_stack.py @@ -14,6 +14,12 @@ def __init__(self, scope: Construct, id: str, repo_name: str, **kwargs) -> None: repo = ecr.Repository(scope=self, id=id, repository_name=repo_name) repo.grant_pull_push(iam.ServicePrincipal("codebuild.amazonaws.com")) repo.grant_pull(iam.ArnPrincipal("arn:aws:iam::222961743098:role/scrutini-ecr")) + repo.add_lifecycle_rule( + description="Retain latest images", + tag_pattern_list=["*_latest"], + max_image_age=Duration.days(7300), + ) + repo.add_lifecycle_rule( description="Expire images older than 1 month", max_image_age=Duration.days(30), diff --git a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py index d12fe7cd42..236c2036b6 100644 --- a/tests/ci/cdk/cdk/windows_docker_image_build_stack.py +++ b/tests/ci/cdk/cdk/windows_docker_image_build_stack.py @@ -25,6 +25,7 @@ WIN_EC2_TAG_VALUE, SSM_DOCUMENT_NAME, GITHUB_SOURCE_VERSION, + S3_FOR_WIN_DOCKER_IMG_BUILD, ) from util.yml_loader import YmlLoader @@ -37,7 +38,7 @@ def __init__( scope: Construct, id: str, env: typing.Union[Environment, typing.Dict[str, typing.Any]], - **kwargs + **kwargs, ) -> None: super().__init__(scope, id, env=env, **kwargs) @@ -70,7 +71,7 @@ def __init__( bucket = s3.Bucket( scope=self, id="{}-s3".format(id), - bucket_name=PhysicalName.GENERATE_IF_NEEDED, + bucket_name=f"{env.account}-{S3_FOR_WIN_DOCKER_IMG_BUILD}", block_public_access=s3.BlockPublicAccess.BLOCK_ALL, ) @@ -136,5 +137,5 @@ def __init__( Tags.of(instance).add(WIN_EC2_TAG_KEY, WIN_EC2_TAG_VALUE) self.output = { - "s3_bucket_name": bucket._generate_physical_name(), + "s3_bucket_name": f"{env.account}-{S3_FOR_WIN_DOCKER_IMG_BUILD}", } diff --git a/tests/ci/cdk/pipeline/ci_stage.py b/tests/ci/cdk/pipeline/ci_stage.py index a232a7c0fd..089c6ef186 100644 --- a/tests/ci/cdk/pipeline/ci_stage.py +++ b/tests/ci/cdk/pipeline/ci_stage.py @@ -4,17 +4,25 @@ import re import typing -from aws_cdk import Stage, Environment, Duration, Stack, pipelines, aws_iam as iam, aws_codebuild as codebuild +from aws_cdk import ( + Stage, + Environment, + Duration, + pipelines, + aws_iam as iam, + aws_codebuild as codebuild, +) from constructs import Construct -from cdk.aws_lc_analytics_stack import AwsLcGitHubAnalyticsStack -from cdk.aws_lc_android_ci_stack import AwsLcAndroidCIStack -from cdk.aws_lc_ec2_test_framework_ci_stack import AwsLcEC2TestingCIStack -from cdk.aws_lc_github_ci_stack import AwsLcGitHubCIStack -from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack +from cdk.aws_lc_base_ci_stack import AwsLcBaseCiStack +from pipeline.ci_util import add_ci_stacks from pipeline.codebuild_batch_step import CodeBuildBatchStep -from util.metadata import PRE_PROD_ACCOUNT, GITHUB_TOKEN_SECRET_NAME, STAGING_GITHUB_REPO_OWNER, \ - STAGING_GITHUB_REPO_NAME +from util.metadata import ( + PRE_PROD_ACCOUNT, + GITHUB_TOKEN_SECRET_NAME, + STAGING_GITHUB_REPO_OWNER, + STAGING_GITHUB_REPO_NAME, +) class CiStage(Stage): @@ -33,137 +41,14 @@ def __init__( **kwargs, ) - self.build_options = [] - - # Define CodeBuild Batch job for testing code. - x86_build_spec_file = "cdk/codebuild/github_ci_linux_x86_omnibus.yaml" - self.ci_linux_x86_stack = AwsLcGitHubCIStack( - self, - "aws-lc-ci-linux-x86", - x86_build_spec_file, - env=deploy_environment, - stack_name="aws-lc-ci-linux-x86", - ) - self.build_options.append( - BatchBuildOptions( - project="aws-lc-ci-linux-x86", - ignore_failure=False, - ) - ) - - arm_stack_name = "aws-lc-ci-linux-arm" - arm_build_spec_file = "cdk/codebuild/github_ci_linux_arm_omnibus.yaml" - self.ci_linux_aarch_stack = AwsLcGitHubCIStack( - self, - arm_stack_name, - arm_build_spec_file, - env=deploy_environment, - stack_name=arm_stack_name, - ) - self.build_options.append( - BatchBuildOptions( - project="aws-lc-ci-linux-arm", - ignore_failure=False, - ) - ) - - integration_build_spec_file = "cdk/codebuild/github_ci_integration_omnibus.yaml" - self.ci_integration_stack = AwsLcGitHubCIStack( - self, - "aws-lc-ci-integration", - integration_build_spec_file, - env=deploy_environment, - stack_name="aws-lc-ci-integration", - ) - self.build_options.append( - BatchBuildOptions( - project="aws-lc-ci-integration", - ignore_failure=True, - ) - ) - - fuzz_build_spec_file = "cdk/codebuild/github_ci_fuzzing_omnibus.yaml" - self.ci_fuzzing_stack = AwsLcGitHubFuzzCIStack( - self, - "aws-lc-ci-fuzzing", - fuzz_build_spec_file, - env=deploy_environment, - stack_name="aws-lc-ci-fuzzing", - ) - self.build_options.append( - BatchBuildOptions( - project="aws-lc-ci-fuzzing", - ignore_failure=False, - ) - ) - - analytics_build_spec_file = "cdk/codebuild/github_ci_analytics_omnibus.yaml" - self.ci_analytics_stack = AwsLcGitHubAnalyticsStack( - self, - "aws-lc-ci-analytics", - analytics_build_spec_file, - env=deploy_environment, - stack_name="aws-lc-ci-analytics", - ) - self.build_options.append( - BatchBuildOptions( - project="aws-lc-ci-analytics", - ignore_failure=True, - ) - ) - - # bm_framework_build_spec_file = "cdk/codebuild/bm_framework_omnibus.yaml" - # BmFrameworkStack(app, "aws-lc-ci-bm-framework", bm_framework_build_spec_file, env=env) - ec2_test_framework_build_spec_file = ( - "cdk/codebuild/ec2_test_framework_omnibus.yaml" - ) - self.ci_ec2_test_framework_stack = AwsLcEC2TestingCIStack( - self, - "aws-lc-ci-ec2-test-framework", - ec2_test_framework_build_spec_file, - env=deploy_environment, - stack_name="aws-lc-ci-ec2-test-framework", - ) - self.build_options.append( - BatchBuildOptions( - project="aws-lc-ci-ec2-test-framework", - ignore_failure=True, - ) - ) - - android_build_spec_file = "cdk/codebuild/github_ci_android_omnibus.yaml" - self.ci_android_stack = AwsLcAndroidCIStack( - self, - "aws-lc-ci-devicefarm-android", - android_build_spec_file, - env=deploy_environment, - stack_name="aws-lc-ci-devicefarm-android", - ) - self.build_options.append( - BatchBuildOptions( - project="aws-lc-ci-devicefarm-android", - ignore_failure=False, - ) - ) - - win_x86_build_spec_file = "cdk/codebuild/github_ci_windows_x86_omnibus.yaml" - self.ci_windows_x86_stack = AwsLcGitHubCIStack( - self, - "aws-lc-ci-windows-x86", - win_x86_build_spec_file, - env=deploy_environment, - stack_name="aws-lc-ci-windows-x86", - ) - self.build_options.append( - BatchBuildOptions( - project="aws-lc-ci-windows-x86", - ignore_failure=False, - ) - ) + # Add CodeBuild Batch job for testing code. + add_ci_stacks(self, env=deploy_environment) @property - def stacks(self) -> typing.List[Stack]: - return [child for child in self.node.children if isinstance(child, Stack)] + def stacks(self) -> typing.List[AwsLcBaseCiStack]: + return [ + child for child in self.node.children if isinstance(child, AwsLcBaseCiStack) + ] def add_stage_to_pipeline( self, @@ -175,14 +60,14 @@ def add_stage_to_pipeline( ): stack_names = [stack.stack_name for stack in self.stacks] - private_repo_sync_step=None + private_repo_sync_step = None if self.stacks[0].account == PRE_PROD_ACCOUNT: private_repo_sync_step = pipelines.CodeBuildStep( "PrivateRepoSync", build_environment=codebuild.BuildEnvironment( environment_variables={ - "GITHUB_PAT": codebuild.BuildEnvironmentVariable( + "GITHUB_PAT": codebuild.BuildEnvironmentVariable( type=codebuild.BuildEnvironmentVariableType.SECRETS_MANAGER, value=GITHUB_TOKEN_SECRET_NAME, ), @@ -190,7 +75,7 @@ def add_stage_to_pipeline( ), commands=[ "env", - "curl -H \"Authorization: token ${GITHUB_PAT}\" https://api.github.com/user", + 'curl -H "Authorization: token ${GITHUB_PAT}" https://api.github.com/user', "git clone https://${GITHUB_PAT}@github.com/${STAGING_GITHUB_REPO_OWNER}/${STAGING_GITHUB_REPO_NAME}.git", "git remote add upstream https://github.com/aws/aws-lc.git", "git fetch upstream", @@ -225,20 +110,20 @@ def add_stage_to_pipeline( timeout=Duration.minutes(60), ) + batch_timeout = max([stack.timeout for stack in self.stacks]) * (max_retry + 1) batch_build_jobs = { "build-list": [ { - "identifier": options.identifier, - "ignore-failure": options.ignore_failure, + "identifier": re.sub(r"[^a-zA-Z0-9]", "_", stack.stack_name), + "ignore-failure": stack.ignore_failure, "env": { "variables": { - "PROJECT": options.project, - "TIMEOUT": str(max_retry * options.timeout), - **options.env, + "PROJECT": stack.stack_name, + "TIMEOUT": batch_timeout, } }, } - for options in self.build_options + for stack in self.stacks ] } @@ -252,7 +137,7 @@ def add_stage_to_pipeline( "./build_target.sh --build-type ci --project ${PROJECT} --max-retry ${MAX_RETRY} --timeout ${TIMEOUT}", ], role=role, - timeout=300, + timeout=batch_timeout, project_description=f"Pipeline step AwsLcCiPipeline/{self.stage_name}/StartWait", partial_batch_build_spec=batch_build_jobs, env={ @@ -267,21 +152,5 @@ def add_stage_to_pipeline( pipeline.add_stage( self, pre=[private_repo_sync_step] if private_repo_sync_step else None, - post=[prebuild_check_step, ci_run_step] + post=[prebuild_check_step, ci_run_step], ) - - -class BatchBuildOptions: - def __init__( - self, - project: str, - identifier: str = None, - ignore_failure: bool = False, - timeout: int = 120, - env: typing.Optional[typing.Mapping[str, str]] = None, - ): - self.project = project - self.identifier = identifier or re.sub(r"[^a-zA-Z0-9]", "_", project) - self.ignore_failure = ignore_failure - self.timeout = timeout - self.env = env or {} diff --git a/tests/ci/cdk/pipeline/ci_util.py b/tests/ci/cdk/pipeline/ci_util.py new file mode 100644 index 0000000000..55337efc93 --- /dev/null +++ b/tests/ci/cdk/pipeline/ci_util.py @@ -0,0 +1,99 @@ +import typing + +from aws_cdk import Environment +from constructs import Construct + +from cdk.aws_lc_analytics_stack import AwsLcGitHubAnalyticsStack +from cdk.aws_lc_android_ci_stack import AwsLcAndroidCIStack +from cdk.aws_lc_ec2_test_framework_ci_stack import AwsLcEC2TestingCIStack +from cdk.aws_lc_github_ci_stack import AwsLcGitHubCIStack +from cdk.aws_lc_github_fuzz_ci_stack import AwsLcGitHubFuzzCIStack + + +# Define CodeBuild Batch jobs for testing code. +def add_ci_stacks( + scope: Construct, + env: typing.Union[Environment, typing.Dict[str, typing.Any]], +): + # define customized settings to run CodeBuild jobs from CodePipeline + build_options = [] + + x86_build_spec_file = "cdk/codebuild/github_ci_linux_x86_omnibus.yaml" + AwsLcGitHubCIStack( + scope, + "aws-lc-ci-linux-x86", + x86_build_spec_file, + env=env, + ignore_failure=False, + stack_name="aws-lc-ci-linux-x86", + ) + + arm_build_spec_file = "cdk/codebuild/github_ci_linux_arm_omnibus.yaml" + AwsLcGitHubCIStack( + scope, + "aws-lc-ci-linux-arm", + arm_build_spec_file, + env=env, + ignore_failure=False, + stack_name="aws-lc-ci-linux-arm", + ) + + integration_build_spec_file = "cdk/codebuild/github_ci_integration_omnibus.yaml" + AwsLcGitHubCIStack( + scope, + "aws-lc-ci-integration", + integration_build_spec_file, + env=env, + ignore_failure=True, + stack_name="aws-lc-ci-integration", + ) + + fuzz_build_spec_file = "cdk/codebuild/github_ci_fuzzing_omnibus.yaml" + AwsLcGitHubFuzzCIStack( + scope, + "aws-lc-ci-fuzzing", + fuzz_build_spec_file, + env=env, + ignore_failure=False, + stack_name="aws-lc-ci-fuzzing", + ) + + analytics_build_spec_file = "cdk/codebuild/github_ci_analytics_omnibus.yaml" + AwsLcGitHubAnalyticsStack( + scope, + "aws-lc-ci-analytics", + analytics_build_spec_file, + env=env, + ignore_failure=True, + stack_name="aws-lc-ci-analytics", + ) + + ec2_test_framework_build_spec_file = "cdk/codebuild/ec2_test_framework_omnibus.yaml" + AwsLcEC2TestingCIStack( + scope, + "aws-lc-ci-ec2-test-framework", + ec2_test_framework_build_spec_file, + env=env, + ignore_failure=True, + stack_name="aws-lc-ci-ec2-test-framework", + ) + + android_build_spec_file = "cdk/codebuild/github_ci_android_omnibus.yaml" + AwsLcAndroidCIStack( + scope, + "aws-lc-ci-devicefarm-android", + android_build_spec_file, + env=env, + ignore_failure=False, + stack_name="aws-lc-ci-devicefarm-android", + ) + + win_x86_build_spec_file = "cdk/codebuild/github_ci_windows_x86_omnibus.yaml" + AwsLcGitHubCIStack( + scope, + "aws-lc-ci-windows-x86", + win_x86_build_spec_file, + env=env, + ignore_failure=False, + stack_name="aws-lc-ci-windows-x86", + ) diff --git a/tests/ci/cdk/pipeline/codebuild_batch_step.py b/tests/ci/cdk/pipeline/codebuild_batch_step.py index ab8e15211b..b7cd6afaf8 100644 --- a/tests/ci/cdk/pipeline/codebuild_batch_step.py +++ b/tests/ci/cdk/pipeline/codebuild_batch_step.py @@ -3,7 +3,6 @@ import builtins import re import typing -from typing import Mapping import jsii from aws_cdk import ( @@ -65,7 +64,6 @@ def __init__( else {} ) - @jsii.member(jsii_name="produceAction") def produce_action( self, stage: codepipeline.IStage, diff --git a/tests/ci/cdk/pipeline/deploy_util.py b/tests/ci/cdk/pipeline/deploy_util.py deleted file mode 100644 index 454db10f3b..0000000000 --- a/tests/ci/cdk/pipeline/deploy_util.py +++ /dev/null @@ -1,7 +0,0 @@ -from enum import Enum - - -class DeployEnvironmentType(Enum): - PRE_PROD = "Staging" - PROD = "Prod" - DEV = "Dev" diff --git a/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py b/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py index 4c230a55e1..984639eaa8 100644 --- a/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py +++ b/tests/ci/cdk/pipeline/linux_docker_image_build_stage.py @@ -8,7 +8,6 @@ from cdk.ecr_stack import EcrStack from cdk.linux_docker_image_batch_build_stack import LinuxDockerImageBatchBuildStack -from pipeline.deploy_util import DeployEnvironmentType from util.metadata import LINUX_X86_ECR_REPO, LINUX_AARCH_ECR_REPO diff --git a/tests/ci/cdk/pipeline/pipeline_stack.py b/tests/ci/cdk/pipeline/pipeline_stack.py index 332e096eb3..24201cdd3a 100644 --- a/tests/ci/cdk/pipeline/pipeline_stack.py +++ b/tests/ci/cdk/pipeline/pipeline_stack.py @@ -1,6 +1,7 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 OR ISC import typing +from enum import Enum from aws_cdk import Stack, Environment, Duration from aws_cdk import ( @@ -10,19 +11,24 @@ aws_iam as iam, aws_events as events, aws_events_targets as targets, - aws_cloudwatch as cloudwatch, + aws_codebuild as codebuild, ) from aws_cdk.pipelines import CodeBuildStep from constructs import Construct from pipeline.ci_stage import CiStage -from pipeline.deploy_util import DeployEnvironmentType from pipeline.linux_docker_image_build_stage import LinuxDockerImageBuildStage from pipeline.setup_stage import SetupStage from pipeline.windows_docker_image_build_stage import WindowsDockerImageBuildStage from util.metadata import * +class DeployEnvironmentType(Enum): + PRE_PROD = "Staging" + PROD = "Prod" + DEV = "Dev" + + class AwsLcCiPipeline(Stack): def __init__( self, @@ -60,7 +66,7 @@ def __init__( actions=[ "codepipeline:GetPipelineExecution", "secretsmanager:GetSecretValue", - "kms:Decrypt" + "kms:Decrypt", ], ) ) @@ -133,6 +139,9 @@ def __init__( ), self_mutation=True, code_build_defaults=pipelines.CodeBuildOptions( + build_environment=codebuild.BuildEnvironment( + compute_type=codebuild.ComputeType.MEDIUM, + ), role_policy=[ iam.PolicyStatement( effect=iam.Effect.ALLOW, @@ -219,7 +228,7 @@ def deploy_to_environment( iam.PolicyStatement( effect=iam.Effect.ALLOW, resources=[ - f"arn:aws:iam::{deploy_environment.account}:role/CrossAccountCodeBuildRole" + f"arn:aws:iam::{deploy_environment.account}:role/CrossAccountBuildRole" ], actions=["sts:AssumeRole"], ) @@ -277,7 +286,7 @@ def deploy_to_environment( commands=[ "cd tests/ci/cdk/pipeline/scripts", "chmod +x finalize_images.sh", - "./finalize_images.sh --repos \"${ECR_REPOS}\"", + './finalize_images.sh --repos "${ECR_REPOS}"', ], env={ **codebuild_environment_variables, diff --git a/tests/ci/cdk/pipeline/scripts/build_target.sh b/tests/ci/cdk/pipeline/scripts/build_target.sh index bca7d586d0..83ba25fb0b 100644 --- a/tests/ci/cdk/pipeline/scripts/build_target.sh +++ b/tests/ci/cdk/pipeline/scripts/build_target.sh @@ -9,13 +9,13 @@ source util.sh echo \"Environment variables:\" env -if [[ -z "${NEED_REBUILD+x}" || -z "${NEED_REBUILD}" || ${NEED_REBUILD} -eq 0 ]]; then +if [[ -z "${NEED_REBUILD:+x}" || ${NEED_REBUILD} -eq 0 ]]; then echo "No rebuild needed" exit 0 fi export COMMIT_HASH=${COMMIT_HASH:-$CODEBUILD_RESOLVED_SOURCE_VERSION} -export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountCodeBuildRole" +export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountBuildRole" export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${COMMIT_HASH}" function build_codebuild_ci_project() { @@ -33,70 +33,59 @@ function build_codebuild_ci_project() { source_version=${COMMIT_HASH} fi - echo "Starting CI tests in ${project}" - start_codebuild_project "${project}" "${source_version}" - while [[ ${attempt} -le ${MAX_RETRY} ]]; do - if [[ $attempt -gt 0 ]]; then - echo "Retrying ${attempt}/${MAX_RETRY}..." - fi - - attempt=$((attempt + 1)) - - echo "Waiting for CI tests for complete. This may take anywhere from 15 minutes to 1 hour" - if ! codebuild_build_status_check "${TIMEOUT}"; then - echo "Tests failed." - if [[ ${attempt} -le ${MAX_RETRY} ]]; then - retry_batch_build + if [[ ${attempt} -eq 0 ]]; then + echo "Starting CI tests in ${project}" + start_codebuild_project "${project}" "${source_version}" else - echo "CI tests failed." - exit 1 + echo "Retrying ${attempt}/${MAX_RETRY}..." + retry_batch_build + fi + + echo "Waiting for docker images creation. Building the docker images need to take 1 hour." + # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. + if codebuild_build_status_check "${TIMEOUT}"; then + echo "All tests completed successfully" + exit 0 fi - fi - done - echo "All tests completed successfully" + attempt=$((attempt + 1)) + done + + echo "CI tests failed." + exit 1 } function build_linux_docker_images() { local attempt=0 - echo "Activating AWS CodeBuild to build Linux aarch & x86 docker images." - start_codebuild_project aws-lc-docker-image-build-linux "${COMMIT_HASH}" - while [[ ${attempt} -le ${MAX_RETRY} ]]; do - if [[ $attempt -gt 0 ]]; then + if [[ ${attempt} -eq 0 ]]; then + echo "Activating AWS CodeBuild to build Linux aarch & x86 docker images." + start_codebuild_project aws-lc-docker-image-build-linux "${COMMIT_HASH}" + else echo "Retrying ${attempt}/${MAX_RETRY}..." + retry_batch_build fi - attempt=$((attempt + 1)) - echo "Waiting for docker images creation. Building the docker images need to take 1 hour." # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. - if ! codebuild_build_status_check "${TIMEOUT}"; then - echo "Build failed." - if [[ ${attempt} -le ${MAX_RETRY} ]]; then - retry_batch_build - else - echo "Failed to build Linux docker images" - exit 1 - fi + if codebuild_build_status_check "${TIMEOUT}"; then + echo "Successfully built Linux docker images" + exit 0 fi + + attempt=$((attempt + 1)) done - echo "Successfully built Linux docker images" + echo "Failed to build Linux docker images" + exit 1 } function build_win_docker_images() { local attempt=0 while [[ ${attempt} -le ${MAX_RETRY} ]]; do - if [[ $attempt -gt 0 ]]; then - echo "Retrying ${attempt}/${MAX_RETRY}..." - fi - - attempt=$((attempt + 1)) - echo "Executing AWS SSM commands to build Windows docker images." if ! start_windows_img_build; then echo "Failed to start build" @@ -107,6 +96,8 @@ function build_win_docker_images() { # TODO(CryptoAlg-624): These image build may fail due to the Docker Hub pull limits made on 2020-11-01. if ! win_docker_img_build_status_check "${TIMEOUT}"; then echo "Build failed" + attempt=$((attempt + 1)) + echo "Retrying ${attempt}/${MAX_RETRY}..." continue fi @@ -149,7 +140,7 @@ while [[ $# -gt 0 ]]; do done MAX_RETRY=${MAX_RETRY:-0} -TIMEOUT=${TIMEOUT:-180} # 3 hours +TIMEOUT=${TIMEOUT:-180} # 3 hours F if [[ -z ${BUILD_TYPE} ]]; then echo "No build type provided." @@ -158,13 +149,13 @@ fi assume_role -if [[ -z "${BUILD_TYPE+x}" || -z "${BUILD_TYPE}" ]]; then +if [[ -z "${BUILD_TYPE:+x}" ]]; then echo "No build type provided." exit 1 fi if [[ ${BUILD_TYPE} == "docker" ]]; then - if [[ -z "${PLATFORM+x}" || -z "${PLATFORM}" ]]; then + if [[ -z "${PLATFORM:+x}" ]]; then echo "When building Docker images, a platform must be specified." exit 1 fi @@ -178,7 +169,7 @@ if [[ ${BUILD_TYPE} == "docker" ]]; then fi if [[ ${BUILD_TYPE} == "ci" ]]; then - if [[ -z "${PROJECT+x}" || -z "${PROJECT}" ]]; then + if [[ -z "${PROJECT:+x}" ]]; then echo "When building CI tests, a project name must be specified." exit 1 fi diff --git a/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh b/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh index 1d8cd3081a..4fb9114bb6 100644 --- a/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh +++ b/tests/ci/cdk/pipeline/scripts/check_trigger_conditions.sh @@ -13,7 +13,7 @@ LINUX_DOCKER_PATH="tests/ci/docker_images/(dependencies|linux)" WINDOWS_DOCKER_PATH="tests/ci/docker_images/windows" PIPELINE_PATH="tests/ci/cdk/pipeline" -export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountCodeBuildRole" +export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountBuildRole" export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${COMMIT_HASH}" function check_pipeline_trigger_type() { @@ -89,24 +89,24 @@ while [[ $# -gt 0 ]]; do shift done -if [[ -z "${BUILD_TYPE+x}" || -z "${BUILD_TYPE}" ]]; then +if [[ -z "${BUILD_TYPE:+x}" ]]; then echo "No build type provided." exit 1 fi -if [[ -z "${STACKS+x}" || -z "${STACKS}" ]]; then +if [[ -z "${STACKS:+x}" ]]; then echo "No stacks provided." exit 1 fi -if [[ -n "${PREVIOUS_REBUILDS+x}" && -n "${PREVIOUS_REBUILDS}" ]]; then +if [[ -n "${PREVIOUS_REBUILDS:-}" ]]; then for previous_rebuild in ${PREVIOUS_REBUILDS}; do NEED_REBUILD=$((NEED_REBUILD + previous_rebuild)) done fi if [[ ${BUILD_TYPE} == "docker" ]]; then - if [[ -z "${PLATFORM+x}" || -z "${PLATFORM}" ]]; then + if [[ -z "${PLATFORM:+x}" ]]; then echo "A platform must be specified" exit 1 fi diff --git a/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh b/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh index 4573ae8a14..db1528a61e 100644 --- a/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh +++ b/tests/ci/cdk/pipeline/scripts/cleanup_orphaned_images.sh @@ -6,7 +6,7 @@ set -exuo pipefail source util.sh -export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountCodeBuildRole" +export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountBuildRole" export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${CODEBUILD_RESOLVED_SOURCE_VERSION}" function remove_pending_images() { @@ -67,7 +67,7 @@ while [[ $# -gt 0 ]]; do shift done -if [[ -z "${REPOS+x}" || -z "${REPOS}" ]]; then +if [[ -z "${REPOS:+x}" ]]; then echo "No build type provided." exit 1 fi diff --git a/tests/ci/cdk/pipeline/scripts/finalize_images.sh b/tests/ci/cdk/pipeline/scripts/finalize_images.sh index 8fb6e27bb0..cca1466d97 100644 --- a/tests/ci/cdk/pipeline/scripts/finalize_images.sh +++ b/tests/ci/cdk/pipeline/scripts/finalize_images.sh @@ -6,7 +6,7 @@ set -exuo pipefail source util.sh -export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountCodeBuildRole" +export CROSS_ACCOUNT_BUILD_ROLE_ARN="arn:aws:iam::${DEPLOY_ACCOUNT}:role/CrossAccountBuildRole" export CROSS_ACCOUNT_BUILD_SESSION="pipeline-${CODEBUILD_RESOLVED_SOURCE_VERSION}" function promote_pending_tags_to_latest() { @@ -76,7 +76,7 @@ while [[ $# -gt 0 ]]; do shift done -if [[ -z "${REPOS+x}" || -z "${REPOS}" ]]; then +if [[ -z "${REPOS:+x}" ]]; then echo "No build type provided." exit 1 fi diff --git a/tests/ci/cdk/pipeline/scripts/util.sh b/tests/ci/cdk/pipeline/scripts/util.sh index 0336e0928a..51ce499091 100644 --- a/tests/ci/cdk/pipeline/scripts/util.sh +++ b/tests/ci/cdk/pipeline/scripts/util.sh @@ -4,7 +4,7 @@ set -ex -if [[ -z "${PIPELINE_EXECUTION_ID+x}" || -z "${PIPELINE_EXECUTION_ID}" ]]; then +if [[ -z "${PIPELINE_EXECUTION_ID:+x}" ]]; then TRIGGER_TYPE="manual" else TRIGGER_TYPE="pipeline" @@ -28,7 +28,7 @@ function refresh_session() { unset AWS_SECRET_ACCESS_KEY unset AWS_SESSION_TOKEN - if [[ -z "${PIPELINE_EXECUTION_ID+x}" || -z "${PIPELINE_EXECUTION_ID}" ]]; then + if [[ -z "${PIPELINE_EXECUTION_ID:+x}" ]]; then echo "Security token expired. Please monitor build progress on the console" exit 1 fi diff --git a/tests/ci/cdk/run-cdk.sh b/tests/ci/cdk/run-cdk.sh index 8976ae5c0a..2ee517c7d2 100755 --- a/tests/ci/cdk/run-cdk.sh +++ b/tests/ci/cdk/run-cdk.sh @@ -230,7 +230,7 @@ function deploy_production_pipeline() { } function deploy_dev_pipeline() { - if [[ -z "${DEPLOY_ACCOUNT:+x}" || -z "${DEPLOY_ACCOUNT}" ]]; then + if [[ -z "${DEPLOY_ACCOUNT:+x}" ]]; then echo "The pipeline needs a deployment acount to know where to deploy the CI to." exit 1 fi @@ -240,11 +240,11 @@ function deploy_dev_pipeline() { exit 1 fi - if [[ -z "${PIPELINE_ACCOUNT+x}" || -z "${PIPELINE_ACCOUNT}" ]]; then + if [[ -z "${PIPELINE_ACCOUNT:+x}" ]]; then export PIPELINE_ACCOUNT=${DEPLOY_ACCOUNT} fi - if [[ ${PIPELINE_ACCOUNT+x} == '774305600158' ]]; then + if [[ ${PIPELINE_ACCOUNT} == '774305600158' ]]; then echo "Cannot deploy. The production pipeline is hosted with the same name in this pipeline account." exit 1 fi @@ -319,17 +319,17 @@ EOF function export_global_variables() { # If these variables are not set or empty, defaults are export. - if [[ -z "${DEPLOY_ACCOUNT+x}" || -z "${DEPLOY_ACCOUNT}" ]]; then + if [[ -z "${DEPLOY_ACCOUNT:+x}" ]]; then export DEPLOY_ACCOUNT='620771051181' fi - if [[ -z "${DEPLOY_REGION+x}" || -z "${DEPLOY_REGION}" ]]; then + if [[ -z "${DEPLOY_REGION:+x}" ]]; then export DEPLOY_REGION='us-west-2' export AWS_DEFAULT_REGION="${DEPLOY_REGION}" fi - if [[ -z "${GITHUB_REPO_OWNER+x}" || -z "${GITHUB_REPO_OWNER}" ]]; then + if [[ -z "${GITHUB_REPO_OWNER:+x}" ]]; then export GITHUB_REPO_OWNER='aws' fi - if [[ -z "${GITHUB_SOURCE_VERSION+x}" || -z "${GITHUB_SOURCE_VERSION}" ]]; then + if [[ -z "${GITHUB_SOURCE_VERSION:+x}" ]]; then export GITHUB_SOURCE_VERSION='main' fi # Other variables for managing resources. @@ -342,6 +342,7 @@ function export_global_variables() { export WIN_EC2_TAG_KEY='aws-lc' export WIN_EC2_TAG_VALUE='aws-lc-windows-docker-image-build' export WIN_DOCKER_BUILD_SSM_DOCUMENT='AWSLC-BuildWindowsDockerImages' + export S3_FOR_WIN_DOCKER_IMG_BUILD='aws-lc-windows-docker-image-build-s3' export MAX_TEST_RETRY=2 export IMG_BUILD_STATUS='unknown' # 620771051181 and 351119683581 is AWS-LC team AWS account. @@ -402,7 +403,7 @@ function main() { done # Make sure action is set. - if [[ -z "${ACTION+x}" || -z "${ACTION}" ]]; then + if [[ -z "${ACTION:+x}" ]]; then echo "${ACTION} is required input." exit 1 fi @@ -451,7 +452,7 @@ function main() { cdk bootstrap ;; invoke) - if [[ -z "${COMMAND+x}" || -z "${COMMAND}" ]]; then + if [[ -z "${COMMAND:+x}" ]]; then echo "--action invoke requires a command." exit 1 fi diff --git a/tests/ci/cdk/util/metadata.py b/tests/ci/cdk/util/metadata.py index 4531e181dd..a17d0f31c0 100644 --- a/tests/ci/cdk/util/metadata.py +++ b/tests/ci/cdk/util/metadata.py @@ -53,4 +53,8 @@ "WIN_DOCKER_BUILD_SSM_DOCUMENT", "AWSLC-BuildWindowsDockerImages" ) +S3_FOR_WIN_DOCKER_IMG_BUILD = EnvUtil.get( + "S3_FOR_WIN_DOCKER_IMG_BUILD", "aws-lc-windows-docker-image-build-s3" +) + GITHUB_PUSH_CI_BRANCH_TARGETS = r"(main|fips-\d{4}-\d{2}-\d{2}.*)" diff --git a/tests/ci/docker_images/linux-aarch/common.sh b/tests/ci/docker_images/linux-aarch/common.sh index dc7fb407d7..b3d123c22a 100755 --- a/tests/ci/docker_images/linux-aarch/common.sh +++ b/tests/ci/docker_images/linux-aarch/common.sh @@ -5,9 +5,9 @@ set -ex if [[ -n "${TRIGGER_TYPE:+x}" && "${TRIGGER_TYPE}" == "pipeline" ]]; then - TAG="pending" + TAG_SUFFIX="pending" else - TAG="latest" + TAG_SUFFIX="latest" fi function validate_input() { @@ -26,7 +26,7 @@ function tag_and_push_img() { target="${2}" validate_input 'target' "${target}" img_push_date=$(date +%Y-%m-%d) - docker_img_with_tag="${target}_${TAG}" + docker_img_with_tag="${target}_${TAG_SUFFIX}" docker_img_with_date="${target}_${img_push_date}" docker tag "${source}" "${docker_img_with_tag}" docker tag "${source}" "${docker_img_with_date}" diff --git a/tests/ci/docker_images/linux-x86/common.sh b/tests/ci/docker_images/linux-x86/common.sh index dc7fb407d7..b3d123c22a 100755 --- a/tests/ci/docker_images/linux-x86/common.sh +++ b/tests/ci/docker_images/linux-x86/common.sh @@ -5,9 +5,9 @@ set -ex if [[ -n "${TRIGGER_TYPE:+x}" && "${TRIGGER_TYPE}" == "pipeline" ]]; then - TAG="pending" + TAG_SUFFIX="pending" else - TAG="latest" + TAG_SUFFIX="latest" fi function validate_input() { @@ -26,7 +26,7 @@ function tag_and_push_img() { target="${2}" validate_input 'target' "${target}" img_push_date=$(date +%Y-%m-%d) - docker_img_with_tag="${target}_${TAG}" + docker_img_with_tag="${target}_${TAG_SUFFIX}" docker_img_with_date="${target}_${img_push_date}" docker tag "${source}" "${docker_img_with_tag}" docker tag "${source}" "${docker_img_with_date}" diff --git a/tests/ci/docker_images/windows/push_images.ps1 b/tests/ci/docker_images/windows/push_images.ps1 index ae57eb977a..09481efaef 100644 --- a/tests/ci/docker_images/windows/push_images.ps1 +++ b/tests/ci/docker_images/windows/push_images.ps1 @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 OR ISC $ECS_REPO=$args[0] -$TAG = if (-not [string]::IsNullOrEmpty($TRIGGER_TYPE) -and $TRIGGER_TYPE -eq "pipeline") { +$TAG_SUFFIX = if (-not [string]::IsNullOrEmpty($TRIGGER_TYPE) -and $TRIGGER_TYPE -eq "pipeline") { "pending" } else { "latest" @@ -15,12 +15,12 @@ if ($args[0] -eq $null) { Write-Host "$ECS_REPO" -docker tag vs2015 ${ECS_REPO}:vs2015_${TAG} +docker tag vs2015 ${ECS_REPO}:vs2015_${TAG_SUFFIX} docker tag vs2015 ${ECS_REPO}:vs2015-$(Get-Date -UFormat %Y-%m-%d-%H) -docker push ${ECS_REPO}:vs2015_${TAG} +docker push ${ECS_REPO}:vs2015_${TAG_SUFFIX} docker push ${ECS_REPO}:vs2015-$(Get-Date -UFormat %Y-%m-%d-%H) -docker tag vs2017 ${ECS_REPO}:vs2017_${TAG} +docker tag vs2017 ${ECS_REPO}:vs2017_${TAG_SUFFIX} docker tag vs2017 ${ECS_REPO}:vs2017-$(Get-Date -UFormat %Y-%m-%d-%H) -docker push ${ECS_REPO}:vs2017_${TAG} +docker push ${ECS_REPO}:vs2017_${TAG_SUFFIX} docker push ${ECS_REPO}:vs2017-$(Get-Date -UFormat %Y-%m-%d-%H) From cf7cdeb9db9fea21844c6187041d37fe67e1b50b Mon Sep 17 00:00:00 2001 From: Nghi Ho Date: Mon, 21 Apr 2025 11:55:23 -0700 Subject: [PATCH 10/10] Remove stale benchmark scripts --- tests/ci/build_run_benchmarks.sh | 108 ------------ .../cdk/codebuild/bm_framework_omnibus.yaml | 16 -- .../cdk/ssm/bm_framework_ssm_document.yaml | 45 ----- .../codebuild/linux-x86/run_bm_framework.yml | 13 -- tests/ci/run_bm_framework.sh | 154 ------------------ 5 files changed, 336 deletions(-) delete mode 100755 tests/ci/build_run_benchmarks.sh delete mode 100644 tests/ci/cdk/cdk/codebuild/bm_framework_omnibus.yaml delete mode 100644 tests/ci/cdk/cdk/ssm/bm_framework_ssm_document.yaml delete mode 100644 tests/ci/codebuild/linux-x86/run_bm_framework.yml delete mode 100755 tests/ci/run_bm_framework.sh diff --git a/tests/ci/build_run_benchmarks.sh b/tests/ci/build_run_benchmarks.sh deleted file mode 100755 index c005f605e0..0000000000 --- a/tests/ci/build_run_benchmarks.sh +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env bash -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 OR ISC - -set -x - -# set default value of directory name -if [ -z "${PR_FOLDER_NAME}" ]; then export PR_FOLDER_NAME=aws-lc; fi - -# Get AWS_ACCOUNT_ID -if [ -z "${AWS_ACCOUNT_ID}" ]; then AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text); fi - -AWSLC_PR_ROOT=$(pwd) - -cd .. - -# run this from the bm_framework root directory! -AWSLC_PR_ROOT=$(pwd)/"${PR_FOLDER_NAME}" -AWSLC_PROD_ROOT=$(pwd)/aws-lc-prod - -source ${AWSLC_PR_ROOT}/tests/ci/common_posix_setup.sh - -# clone the various repositories we need (we already have aws-lc-pr since we need it to run this script) -git clone https://github.com/aws/aws-lc.git aws-lc-prod - -# build AWSLC pr -mkdir -p "${PR_FOLDER_NAME}"/build -${CMAKE_COMMAND} -B"${PR_FOLDER_NAME}"/build -H"${PR_FOLDER_NAME}" -GNinja -DCMAKE_BUILD_TYPE=Release \ - -DBUILD_TESTING=OFF -ninja -C "${PR_FOLDER_NAME}"/build - -# build FIPS compliant version of AWSLC pr -mkdir -p "${PR_FOLDER_NAME}"/fips_build -${CMAKE_COMMAND} -B"${PR_FOLDER_NAME}"/fips_build -H"${PR_FOLDER_NAME}" -GNinja -DFIPS=1 -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=TRUE -ninja -C "${PR_FOLDER_NAME}"/fips_build - -# build AWSLC prod -mkdir -p aws-lc-prod/build -${CMAKE_COMMAND} -Baws-lc-prod/build -Haws-lc-prod -GNinja -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -ninja -C aws-lc-prod/build - -#build FIPS compliant version of AWSLC prod -mkdir -p aws-lc-prod/fips_build -${CMAKE_COMMAND} -Baws-lc-prod/fips_build -Haws-lc-prod -GNinja -DFIPS=1 -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=TRUE -ninja -C aws-lc-prod/fips_build - -./"${PR_FOLDER_NAME}"/build/tool/bssl speed -timeout 1 -json > aws-lc-pr_bm.json -./"${PR_FOLDER_NAME}"/fips_build/tool/bssl speed -timeout 1 -json > aws-lc-pr_fips_bm.json - -./aws-lc-prod/build/tool/bssl speed -timeout 1 -json > aws-lc-prod_bm.json -./aws-lc-prod/fips_build/tool/bssl speed -timeout 1 -json > aws-lc-prod_fips_bm.json - - -./"${PR_FOLDER_NAME}"/build/tool/bssl speed -filter trusttoken -timeout 1 -json > aws-lc-pr_tt_bm.json -./"${PR_FOLDER_NAME}"/fips_build/tool/bssl speed -filter trusttoken -timeout 1 -json > aws-lc-pr_tt_fips_bm.json -./aws-lc-prod/build/tool/bssl speed -filter trusttoken -timeout 1 -json > aws-lc-prod_tt_bm.json -./aws-lc-prod/fips_build/tool/bssl speed -filter trusttoken -timeout 1 -json > aws-lc-prod_tt_fips_bm.json - -# convert results from .json to .csv -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/convert_json_to_csv.py aws-lc-pr_bm.json -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/convert_json_to_csv.py aws-lc-pr_fips_bm.json -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/convert_json_to_csv.py aws-lc-prod_bm.json -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/convert_json_to_csv.py aws-lc-prod_fips_bm.json - -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/convert_json_to_csv.py aws-lc-pr_tt_bm.json -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/convert_json_to_csv.py aws-lc-pr_tt_fips_bm.json -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/convert_json_to_csv.py aws-lc-prod_tt_bm.json -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/convert_json_to_csv.py aws-lc-prod_tt_fips_bm.json - -# once we have csvs, we want to update the main benchmark results files with the sequential trusttoken results -# files will be updated in place -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/update_results.py aws-lc-pr_bm.csv aws-lc-pr_tt_bm.csv python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/update_results.py aws-lc-pr_fips_bm.csv aws-lc-pr_tt_fips_bm.csv python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/update_results.py aws-lc-prod_bm.csv aws-lc-prod_tt_bm.csv python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/update_results.py aws-lc-prod_fips_bm.csv aws-lc-prod_tt_fips_bm.csv - -# check for regressions! -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/compare_results.py aws-lc-prod_bm.csv aws-lc-pr_bm.csv prod_vs_pr.csv -prod_vs_pr_code="$?" -python3 "${PR_FOLDER_NAME}"/tests/ci/benchmark_framework/compare_results.py aws-lc-prod_fips_bm.csv aws-lc-pr_fips_bm.csv prod_vs_pr_fips.csv -prod_vs_pr_fips_code="$?" - -# upload results to s3 -aws s3 cp aws-lc-pr_bm.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-pr-bucket/${CODEBUILD_SOURCE_VERSION}/aws-lc-pr_bm.csv" -aws s3 cp aws-lc-pr_fips_bm.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-pr-bucket/${CODEBUILD_SOURCE_VERSION}/aws-lc-pr_fips_bm.csv" -aws s3 cp aws-lc-prod_bm.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-prod-bucket/${CODEBUILD_SOURCE_VERSION}/aws-lc-prod_bm.csv" -aws s3 cp aws-lc-prod_fips_bm.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-prod-bucket/${CODEBUILD_SOURCE_VERSION}/aws-lc-prod_fips_bm.csv" - -# upload results to lastest folders in s3 -aws s3 mv aws-lc-pr_bm.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-pr-bucket/latest-${CODEBUILD_WEBHOOK_TRIGGER}/aws-lc-pr_bm.csv" -aws s3 mv aws-lc-pr_fips_bm.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-pr-bucket/latest-${CODEBUILD_WEBHOOK_TRIGGER}/aws-lc-pr_fips_bm.csv" -aws s3 mv aws-lc-prod_bm.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-prod-bucket/latest/aws-lc-prod_bm.csv" -aws s3 mv aws-lc-prod_fips_bm.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-prod-bucket/latest/aws-lc-prod_fips_bm.csv" - -# if any of the results gave an exit code of 5, there's a performance regression -# we only want to actually fail the vote if we've detected a regression in the pr version of aws-lc and tip of main of aws-lc (for fips and non-fips) -exit_fail=false -if [ "${prod_vs_pr_code}" != 0 ]; then - aws s3 cp prod_vs_pr.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-pr-bucket/${CODEBUILD_SOURCE_VERSION}/prod_vs_pr.csv" - aws s3 mv prod_vs_pr.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-pr-bucket/latest-${CODEBUILD_WEBHOOK_TRIGGER}/prod_vs_pr.csv" - exit_fail=true -fi -if [ "${prod_vs_pr_fips_code}" != 0 ]; then - aws s3 cp prod_vs_pr_fips.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-pr-bucket/${CODEBUILD_SOURCE_VERSION}/prod_vs_pr_fips.csv" - aws s3 mv prod_vs_pr_fips.csv s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-pr-bucket/latest-${CODEBUILD_WEBHOOK_TRIGGER}/prod_vs_pr_fips.csv" - exit_fail=true -fi - -if [ "${exit_fail}" = true ]; then - exit 1 -fi diff --git a/tests/ci/cdk/cdk/codebuild/bm_framework_omnibus.yaml b/tests/ci/cdk/cdk/codebuild/bm_framework_omnibus.yaml deleted file mode 100644 index ea1aea37c0..0000000000 --- a/tests/ci/cdk/cdk/codebuild/bm_framework_omnibus.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 OR ISC - -version: 0.2 - -# Doc for batch https://docs.aws.amazon.com/codebuild/latest/userguide/batch-build-buildspec.html#build-spec.batch.build-list -batch: - build-list: - - - identifier: ubuntu2004_bm_framework - buildspec: ./tests/ci/codebuild/linux-x86/run_bm_framework.yml - env: - type: LINUX_CONTAINER - privileged-mode: true - compute-type: BUILD_GENERAL1_LARGE - image: 620771051181.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-linux-x86:ubuntu-20.04_clang-7x-bm-framework_latest diff --git a/tests/ci/cdk/cdk/ssm/bm_framework_ssm_document.yaml b/tests/ci/cdk/cdk/ssm/bm_framework_ssm_document.yaml deleted file mode 100644 index 6264088f24..0000000000 --- a/tests/ci/cdk/cdk/ssm/bm_framework_ssm_document.yaml +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 OR ISC - ---- -schemaVersion: '2.2' -description: aws-lc:bmFrameworkEc2Benchmark -mainSteps: - - action: aws:runShellScript - name: runShellScript - inputs: - timeoutSeconds: '7200' - runCommand: - - sudo -i - - export DEBIAN_FRONTEND=noninteractive - - export CPU_TYPE=$(dpkg --print-architecture) - # if we want to disable CPU features, pass in the {OPENSSL_ia32cap} value - - if [ {OPENSSL_ia32cap} ]; then export OPENSSL_ia32cap={OPENSSL_ia32cap}; fi - - echo "${OPENSSL_ia32cap}" - # if we have a cpu type of x86, we want linux-x86 - - if [ "${CPU_TYPE}" = amd64 ]; then export CPU_ARCH=linux-x86; export AWS_CLI_PREFIX=x86_; sudo sh -c "echo 1 > /sys/devices/system/cpu/intel_pstate/no_turbo"; fi - # if we have a cpu type of arm, we want linux-aarch - - if [ "${CPU_TYPE}" = arm64 ]; then export CPU_ARCH=linux-aarch; export AWS_CLI_PREFIX=aarch; fi - # install aws-cli - - apt-get -y install unzip - - curl "https://awscli.amazonaws.com/awscli-exe-linux-${AWS_CLI_PREFIX}64.zip" -o "awscliv2.zip" - - unzip awscliv2.zip - - ./aws/install - # create bm_framework directory and checkout aws-lc - - mkdir bm_framework - - cd bm_framework - - git clone {GITHUB_REPO} aws-lc-pr - - cd aws-lc-pr - - git checkout {COMMIT_ID} - - cd ../ - # install docker if its not already installed - - chmod +x aws-lc-pr/tests/ci/benchmark_framework/install_docker.sh - - ./aws-lc-pr/tests/ci/benchmark_framework/install_docker.sh - # log into docker and get needed docker image from ecr - - export ECR_REPO="{AWS_ACCOUNT_ID}.dkr.ecr.us-west-2.amazonaws.com/aws-lc-docker-images-${CPU_ARCH}" - - docker login -u AWS -p $(aws ecr get-login-password) https://"${ECR_REPO}" - - docker pull "${ECR_REPO}:ubuntu-20.04_clang-7x-bm-framework_latest" - - # start the container and run the bm script - - exec_docker="docker run --env PR_FOLDER_NAME=aws-lc-pr --env OPENSSL_ia32cap=${OPENSSL_ia32cap} --env AWS_ACCOUNT_ID={AWS_ACCOUNT_ID} --env PR_NUM={PR_NUM} --env COMMIT_ID={COMMIT_ID} --env CPU_TYPE=${CPU_TYPE} --env NOHW_TYPE={NOHW_TYPE} -v `pwd`:`pwd` -w `pwd` ${ECR_REPO}:ubuntu-20.04_clang-7x-bm-framework_latest" - - chmod +x aws-lc-pr/tests/ci/build_run_benchmarks.sh - - $exec_docker ./aws-lc-pr/tests/ci/build_run_benchmarks.sh \ No newline at end of file diff --git a/tests/ci/codebuild/linux-x86/run_bm_framework.yml b/tests/ci/codebuild/linux-x86/run_bm_framework.yml deleted file mode 100644 index 04e14054bb..0000000000 --- a/tests/ci/codebuild/linux-x86/run_bm_framework.yml +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 OR ISC - -version: 0.2 - -env: - variables: - GOPROXY: https://proxy.golang.org,direct - -phases: - build: - commands: - - ./tests/ci/build_run_benchmarks.sh diff --git a/tests/ci/run_bm_framework.sh b/tests/ci/run_bm_framework.sh deleted file mode 100755 index a2c56cbdeb..0000000000 --- a/tests/ci/run_bm_framework.sh +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/env bash -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 OR ISC - -set -exo pipefail - -# Please run from project root folder! -# You'll want to set the codebuild env variables set if running locally -source tests/ci/common_ssm_setup.sh - -# cleanup code -cleanup() { - set +e - # kill ec2 instances after we're done w/ them - for id in ${instance_ids};do - aws ec2 terminate-instances --instance-ids "${id}" - done - - # delete the various documents that we created - for name in ${ssm_document_names};do - aws ssm delete-document --name "${name}" - done -} - -# we wanna run the cleanup code on exit -trap cleanup EXIT - -# print some information for reference -echo GitHub PR Number: "${CODEBUILD_WEBHOOK_TRIGGER}" -echo GitHub Commit Version: "${CODEBUILD_SOURCE_VERSION}" -AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) -echo AWS Account ID: "${AWS_ACCOUNT_ID}" -echo GitHub Repo Link: "${CODEBUILD_SOURCE_REPO_URL}" - -# get information for ec2 instances -vpc_id="$(aws ec2 describe-vpcs --filter Name=tag:Name,Values=aws-lc-ci-bm-framework/aws-lc-ci-bm-framework-ec2-vpc --query Vpcs[*].VpcId --output text)" -sg_id="$(aws ec2 describe-security-groups --filter Name=vpc-id,Values="${vpc_id}" --filter Name=group-name,Values=bm_framework_ec2_sg --query SecurityGroups[*].GroupId --output text)" -subnet_id="$(aws ec2 describe-subnets --filter Name=vpc-id,Values="${vpc_id}" --filter Name=state,Values=available --filter Name=tag:Name,Values=aws-lc-ci-bm-framework/aws-lc-ci-bm-framework-ec2-vpc/PrivateSubnet1 --query Subnets[*].SubnetId --output text)" - -#$1 is nohw type, $2 is OPENSSL_ia32cap value -generate_ssm_document_file() { - # use sed to replace placeholder values inside preexisting document - sed -e "s,{AWS_ACCOUNT_ID},${AWS_ACCOUNT_ID},g" \ - -e "s,{PR_NUM},${CODEBUILD_WEBHOOK_TRIGGER},g" \ - -e "s,{COMMIT_ID},${CODEBUILD_SOURCE_VERSION},g" \ - -e "s,{GITHUB_REPO},${CODEBUILD_SOURCE_REPO_URL},g" \ - -e "s,{OPENSSL_ia32cap},$2,g" \ - -e "s,{NOHW_TYPE},$1,g" \ - tests/ci/cdk/cdk/ssm/bm_framework_ssm_document.yaml \ - >tests/ci/cdk/cdk/ssm/bm_framework_"$1"_ssm_document.yaml -} - -# create the ssm documents that will be used for the various ssm commands -generate_ssm_document_file "" "" -generate_ssm_document_file "nosha" "~0x100000000" -generate_ssm_document_file "noavx" "~0x1000000000000000:0xC0010020" - -#$1 for ami, $2 for instance-type, echos the instance id so we can capture the output -create_ec2_instances() { - local instance_id - instance_id="$(aws ec2 run-instances --image-id "$1" --count 1 \ - --instance-type "$2" --security-group-ids "${sg_id}" --subnet-id "${subnet_id}" \ - --block-device-mappings 'DeviceName="/dev/sda1",Ebs={DeleteOnTermination=True,VolumeSize=200}' \ - --tag-specifications 'ResourceType="instance",Tags=[{Key="aws-lc",Value="aws-lc-ci-bm-framework-ec2-x86-instance"}]' \ - --iam-instance-profile Name=aws-lc-ci-bm-framework-ec2-profile \ - --placement 'AvailabilityZone=us-west-2a' \ - --query Instances[*].InstanceId --output text)" - echo "${instance_id}" -} - -# create ec2 instances for x86 and arm -x86_id=$(create_ec2_instances "ami-01773ce53581acf22" "c5.metal") -arm_id=$(create_ec2_instances "ami-018e246d8c0f39ae5" "c6g.metal") -x86_nosha_id=$(create_ec2_instances "ami-01773ce53581acf22" "m5.metal") -x86_noavx_id=$(create_ec2_instances "ami-01773ce53581acf22" "c5.metal") -instance_ids="${x86_id} ${arm_id} ${x86_nosha_id} ${x86_noavx_id}" - -# if any of the ids are blank, ec2 creation failed -if [[ -z "${x86_id}" ]] || [[ -z "${arm_id}" ]] || [[ -z "${x86_nosha_id}" ]] || [[ -z "${x86_noavx_id}" ]]; then - exit 1 -fi - -# Give a few minutes for the ec2 instances to be ready -sleep 60 - -for i in {1..30}; do - ready=true - for id in ${instance_ids}; do - status=$(aws ssm describe-instance-information --filter Key="InstanceIds",Values="${id}" \ - --query InstanceInformationList[*].PingStatus --output text) - if [ "${status}" != Online ]; then - ready=false - fi - done - if [ "${ready}" = true ]; then - break - fi - echo "Wait for instances to be able to run the SSM commands" - - # if we've hit the 30 minute mark and still aren't ready, then something has gone wrong - if [ "${i}" = 30 ]; then exit 1; fi - sleep 60 -done - -# Create, and run ssm command for arm & x86 -ssm_doc_name=$(create_ssm_document "bm_framework_") -nosha_ssm_doc_name=$(create_ssm_document "bm_framework_nosha") -noavx_ssm_doc_name=$(create_ssm_document "bm_framework_noavx") -ssm_document_names="${ssm_doc_name} ${nosha_ssm_doc_name} ${noavx_ssm_doc_name}" - -# delete contents of 'latest' folders before uploading anything new to them -aws s3 rm s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-pr-bucket/latest-${CODEBUILD_WEBHOOK_TRIGGER}" --recursive -aws s3 rm s3://"${AWS_ACCOUNT_ID}-aws-lc-ci-bm-framework-prod-bucket/latest" --recursive - -cloudwatch_group_name="aws-lc-ci-bm-framework-cw-logs" -x86_ssm_command_id=$(run_ssm_command "${ssm_doc_name}" "${x86_id}" "${cloudwatch_group_name}") -arm_ssm_command_id=$(run_ssm_command "${ssm_doc_name}" "${arm_id}" "${cloudwatch_group_name}") -nosha_ssm_command_id=$(run_ssm_command "${nosha_ssm_doc_name}" "${x86_nosha_id}" "${cloudwatch_group_name}") -noavx_ssm_command_id=$(run_ssm_command "${noavx_ssm_doc_name}" "${x86_noavx_id}" "${cloudwatch_group_name}") -ssm_command_ids="${x86_ssm_command_id} ${arm_ssm_command_id} ${nosha_ssm_command_id} ${noavx_ssm_command_id}" - -# Give some time for the commands to run -for i in {1..30}; do - echo "${i}: Continue to wait 3 min for SSM commands to finish." - sleep 180 - done=true - success=true - # for each command, check its status - for id in ${ssm_command_ids}; do - ssm_command_status="$(aws ssm list-commands --command-id "${id}" --query Commands[*].Status --output text)" - ssm_target_count="$(aws ssm list-commands --command-id "${id}" --query Commands[*].TargetCount --output text)" - ssm_completed_count="$(aws ssm list-commands --command-id "${id}" --query Commands[*].CompletedCount --output text)" - if [[ ${ssm_command_status} == 'Success' && ${ssm_completed_count} == "${ssm_target_count}" ]]; then - echo "SSM command ${id} finished successfully." - elif [[ ${ssm_command_status} == 'Failed' && ${ssm_completed_count} == "${ssm_target_count}" ]]; then - echo "SSM command ${id} failed." - success=false - else - done=false - fi - done - - # if after the loop finish and done is still true, then we're done - if [ "${done}" = true ]; then - echo "All SSM commands have finished." - - # if success is still true here, then none of the commands failed - if [ "${success}" != true ]; then - echo "An SSM command failed!" - exit 1 - fi - break - fi -done