From d0e15ccaca22c5e05b9186aa1a241e744d67c96a Mon Sep 17 00:00:00 2001 From: Niranjan Jayakar Date: Thu, 14 Oct 2021 12:38:53 +0530 Subject: [PATCH 01/37] fix(lambda): docker image function fails when insightsVersion is specified (#16781) Lambda insights is configured as a set of layers that are then auto-added to the lambda function when the `insightsVersion` property is specified. However, Lambda functions deployed as container images cannot contain layers. Instead, the user is expected to bring a container image with the Insights agent pre-installed. Update the CDK code, so that the layer is not added for Lambda functions that use container images. fixes #16642 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/aws-lambda/lib/function.ts | 19 ++++++--- .../aws-lambda/test/lambda-insights.test.ts | 40 +++++++++++++++++++ 2 files changed, 54 insertions(+), 5 deletions(-) diff --git a/packages/@aws-cdk/aws-lambda/lib/function.ts b/packages/@aws-cdk/aws-lambda/lib/function.ts index 7505cdf463a1b..fffd20decd8d9 100644 --- a/packages/@aws-cdk/aws-lambda/lib/function.ts +++ b/packages/@aws-cdk/aws-lambda/lib/function.ts @@ -220,6 +220,10 @@ export interface FunctionOptions extends EventInvokeConfigOptions { * Specify the version of CloudWatch Lambda insights to use for monitoring * @see https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Lambda-Insights.html * + * When used with `DockerImageFunction` or `DockerImageCode`, the Docker image should have + * the Lambda insights agent installed. + * @see https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Lambda-Insights-Getting-Started-docker.html + * * @default - No Lambda Insights */ readonly insightsVersion?: LambdaInsightsVersion; @@ -782,9 +786,7 @@ export class Function extends FunctionBase { } // Configure Lambda insights - if (props.insightsVersion !== undefined) { - this.configureLambdaInsights(props.insightsVersion); - } + this.configureLambdaInsights(props); } /** @@ -912,8 +914,15 @@ Environment variables can be marked for removal when used in Lambda@Edge by sett * * https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Lambda-Insights-extension-versions.html */ - private configureLambdaInsights(insightsVersion: LambdaInsightsVersion): void { - this.addLayers(LayerVersion.fromLayerVersionArn(this, 'LambdaInsightsLayer', insightsVersion.layerVersionArn)); + private configureLambdaInsights(props: FunctionProps): void { + if (props.insightsVersion === undefined) { + return; + } + if (props.runtime !== Runtime.FROM_IMAGE) { + // Layers cannot be added to Lambda container images. The image should have the insights agent installed. + // See https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Lambda-Insights-Getting-Started-docker.html + this.addLayers(LayerVersion.fromLayerVersionArn(this, 'LambdaInsightsLayer', props.insightsVersion.layerVersionArn)); + } this.role?.addManagedPolicy(iam.ManagedPolicy.fromAwsManagedPolicyName('CloudWatchLambdaInsightsExecutionRolePolicy')); } diff --git a/packages/@aws-cdk/aws-lambda/test/lambda-insights.test.ts b/packages/@aws-cdk/aws-lambda/test/lambda-insights.test.ts index 990c0c7fb7405..762df158da6f4 100644 --- a/packages/@aws-cdk/aws-lambda/test/lambda-insights.test.ts +++ b/packages/@aws-cdk/aws-lambda/test/lambda-insights.test.ts @@ -1,5 +1,6 @@ import '@aws-cdk/assert-internal/jest'; import { MatchStyle } from '@aws-cdk/assert-internal'; +import * as ecr from '@aws-cdk/aws-ecr'; import * as cdk from '@aws-cdk/core'; import * as lambda from '../lib'; @@ -313,4 +314,43 @@ describe('lambda-insights', () => { // On synthesis it should not throw an error expect(() => app.synth()).not.toThrow(); }); + + test('insights layer is skipped for container images and the role is updated', () => { + const stack = new cdk.Stack(); + new lambda.DockerImageFunction(stack, 'MyFunction', { + code: lambda.DockerImageCode.fromEcr(ecr.Repository.fromRepositoryArn(stack, 'MyRepo', + 'arn:aws:ecr:us-east-1:0123456789:repository/MyRepo')), + insightsVersion: lambda.LambdaInsightsVersion.VERSION_1_0_98_0, + }); + + expect(stack).toCountResources('AWS::Lambda::LayerVersion', 0); + + expect(stack).toHaveResourceLike('AWS::IAM::Role', { + 'AssumeRolePolicyDocument': { + 'Statement': [ + { + 'Action': 'sts:AssumeRole', + 'Principal': { + 'Service': 'lambda.amazonaws.com', + }, + }, + ], + }, + 'ManagedPolicyArns': [ + { }, + { + 'Fn::Join': [ + '', + [ + 'arn:', + { + 'Ref': 'AWS::Partition', + }, + ':iam::aws:policy/CloudWatchLambdaInsightsExecutionRolePolicy', + ], + ], + }, + ], + }); + }); }); From c6db91eee2cb658ce347c7ac6d6e3c95bc5977dc Mon Sep 17 00:00:00 2001 From: Niranjan Jayakar Date: Thu, 14 Oct 2021 13:28:08 +0530 Subject: [PATCH 02/37] fix(apigatewayv2): unable to retrieve domain url for default stage (#16854) The `defaultStage` prop in `HttpApi` returns `IHttpStage`. The `domainUrl` getter was previously added only to `HttpStage`. Elevate this to the `IHttpStage` level so it's available from the `HttpApi`. closes #16638 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../aws-apigatewayv2/lib/http/stage.ts | 13 +++++++--- .../aws-apigatewayv2/test/http/api.test.ts | 24 ++++++++++++++++++- 2 files changed, 33 insertions(+), 4 deletions(-) diff --git a/packages/@aws-cdk/aws-apigatewayv2/lib/http/stage.ts b/packages/@aws-cdk/aws-apigatewayv2/lib/http/stage.ts index 709f207a04435..ca40349975ec1 100644 --- a/packages/@aws-cdk/aws-apigatewayv2/lib/http/stage.ts +++ b/packages/@aws-cdk/aws-apigatewayv2/lib/http/stage.ts @@ -18,6 +18,11 @@ export interface IHttpStage extends IStage { */ readonly api: IHttpApi; + /** + * The custom domain URL to this stage + */ + readonly domainUrl: string; + /** * Metric for the number of client-side errors captured in a given period. * @@ -96,6 +101,7 @@ export interface HttpStageAttributes extends StageAttributes { } abstract class HttpStageBase extends StageBase implements IHttpStage { + public abstract readonly domainUrl: string; public abstract readonly api: IHttpApi; public metricClientError(props?: MetricOptions): Metric { @@ -140,6 +146,10 @@ export class HttpStage extends HttpStageBase { get url(): string { throw new Error('url is not available for imported stages.'); } + + get domainUrl(): string { + throw new Error('domainUrl is not available for imported stages.'); + } } return new Import(scope, id); } @@ -177,9 +187,6 @@ export class HttpStage extends HttpStageBase { return `https://${this.api.apiId}.execute-api.${s.region}.${s.urlSuffix}/${urlPath}`; } - /** - * The custom domain URL to this stage - */ public get domainUrl(): string { if (!this._apiMapping) { throw new Error('domainUrl is not available when no API mapping is associated with the Stage'); diff --git a/packages/@aws-cdk/aws-apigatewayv2/test/http/api.test.ts b/packages/@aws-cdk/aws-apigatewayv2/test/http/api.test.ts index 5b7f1052bfe35..200933eefbca9 100644 --- a/packages/@aws-cdk/aws-apigatewayv2/test/http/api.test.ts +++ b/packages/@aws-cdk/aws-apigatewayv2/test/http/api.test.ts @@ -1,9 +1,10 @@ import { Match, Template } from '@aws-cdk/assertions'; +import { Certificate } from '@aws-cdk/aws-certificatemanager'; import { Metric } from '@aws-cdk/aws-cloudwatch'; import * as ec2 from '@aws-cdk/aws-ec2'; import { Duration, Stack } from '@aws-cdk/core'; import { - CorsHttpMethod, + CorsHttpMethod, DomainName, HttpApi, HttpAuthorizer, HttpIntegrationType, HttpMethod, HttpRouteAuthorizerBindOptions, HttpRouteAuthorizerConfig, HttpRouteIntegrationBindOptions, HttpRouteIntegrationConfig, IHttpRouteAuthorizer, IHttpRouteIntegration, HttpNoneAuthorizer, PayloadFormatVersion, } from '../../lib'; @@ -374,6 +375,27 @@ describe('HttpApi', () => { expect(() => api.apiEndpoint).toThrow(/apiEndpoint is not configured/); }); + test('domainUrl can be retrieved for default stage', () => { + const stack = new Stack(); + const dn = new DomainName(stack, 'DN', { + domainName: 'example.com', + certificate: Certificate.fromCertificateArn(stack, 'cert', 'arn:aws:acm:us-east-1:111111111111:certificate'), + }); + + const api = new HttpApi(stack, 'Api', { + createDefaultStage: true, + defaultDomainMapping: { + domainName: dn, + }, + }); + + expect(stack.resolve(api.defaultStage?.domainUrl)).toEqual({ + 'Fn::Join': ['', [ + 'https://', { Ref: 'DNFDC76583' }, '/', + ]], + }); + }); + describe('default authorization settings', () => { test('can add default authorizer', () => { From d5c4f9a08e9ec2ad4c36ea99c0ada9da952d7e58 Mon Sep 17 00:00:00 2001 From: Madeline Kusters <80541297+madeline-k@users.noreply.github.com> Date: Thu, 14 Oct 2021 02:55:30 -0700 Subject: [PATCH 03/37] chore(individual-pkg-gen): set the alpha version in alpha module's package.json files if present (#16965) This change was already approved and merged in: https://github.com/aws/aws-cdk/pull/16322. It somehow got removed from the `v2-main` branch since originally being merged. This PR is just a cherry-pick of the original commit. --- This change sets the `version` key in each alpha module's `package.json` file to the alphaVersion that was created in this PR: https://github.com/aws/aws-cdk/pull/16043/ And, also sets the version of each dependency on another alpha module to the same version. Depends on: #16321 Part of: #15591 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- tools/@aws-cdk/individual-pkg-gen/transform-packages.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tools/@aws-cdk/individual-pkg-gen/transform-packages.ts b/tools/@aws-cdk/individual-pkg-gen/transform-packages.ts index ece10f8262a86..a5cbcb556ea1b 100644 --- a/tools/@aws-cdk/individual-pkg-gen/transform-packages.ts +++ b/tools/@aws-cdk/individual-pkg-gen/transform-packages.ts @@ -3,6 +3,8 @@ import * as awsCdkMigration from 'aws-cdk-migration'; import * as fs from 'fs-extra'; // eslint-disable-next-line @typescript-eslint/no-require-imports const lerna_project = require('@lerna/project'); +// eslint-disable-next-line @typescript-eslint/no-require-imports +const ver = require('../../../scripts/resolve-version'); /** * @aws-cdk/ scoped packages that may be present in devDependencies and need to @@ -128,6 +130,9 @@ function transformPackageJson(pkg: any, source: string, destination: string, alp const pkgUnscopedName = `${pkg.name.substring('@aws-cdk/'.length)}`; packageJson.name += '-alpha'; + if (ver.alphaVersion) { + packageJson.version = ver.alphaVersion; + } packageJson.repository.directory = `packages/individual-packages/${pkgUnscopedName}`; // All individual packages are public by default on v1, and private by default on v2. @@ -201,7 +206,7 @@ function transformPackageJsonDependencies(packageJson: any, pkg: any, alphaPacka break; default: if (alphaPackages[dependency]) { - alphaDependencies[alphaPackages[dependency]] = pkg.version; + alphaDependencies[alphaPackages[dependency]] = packageJson.version; } else if (v1BundledDependencies.indexOf(dependency) !== -1) { // ...other than third-party dependencies, which are in bundledDependencies bundledDependencies[dependency] = packageJson.dependencies[dependency]; @@ -221,7 +226,7 @@ function transformPackageJsonDependencies(packageJson: any, pkg: any, alphaPacka break; default: if (alphaPackages[v1DevDependency]) { - alphaDevDependencies[alphaPackages[v1DevDependency]] = pkg.version; + alphaDevDependencies[alphaPackages[v1DevDependency]] = packageJson.version; } else if (!v1DevDependency.startsWith('@aws-cdk/') || isRequiredTool(v1DevDependency)) { devDependencies[v1DevDependency] = packageJson.devDependencies[v1DevDependency]; } From 00a8063f1446fbd2f588b7d740c5ba4f915f7f6c Mon Sep 17 00:00:00 2001 From: kaizen3031593 <36202692+kaizen3031593@users.noreply.github.com> Date: Thu, 14 Oct 2021 12:17:48 -0400 Subject: [PATCH 04/37] docs(pipelines): make examples compile (#16963) Draft PR because I have not compiled the snippets in `ORIGINAL_API.md`. ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/pipelines/ORIGINAL_API.md | 98 +++--- packages/@aws-cdk/pipelines/README.md | 310 ++++++++++++------ .../pipelines/lib/blueprint/shell-step.ts | 8 +- .../lib/codepipeline/codepipeline-source.ts | 9 +- .../pipelines/lib/main/pipeline-base.ts | 6 +- .../pipelines/rosetta/default.ts-fixture | 29 ++ 6 files changed, 304 insertions(+), 156 deletions(-) create mode 100644 packages/@aws-cdk/pipelines/rosetta/default.ts-fixture diff --git a/packages/@aws-cdk/pipelines/ORIGINAL_API.md b/packages/@aws-cdk/pipelines/ORIGINAL_API.md index 73ac108d9c67f..447e39bb09bc2 100644 --- a/packages/@aws-cdk/pipelines/ORIGINAL_API.md +++ b/packages/@aws-cdk/pipelines/ORIGINAL_API.md @@ -41,7 +41,9 @@ all commands necessary to do a full CDK build and synth, so do include installing dependencies and running the CDK CLI. For example, the old API: ```ts -SimpleSynthAction.standardNpmSynth({ +const sourceArtifact = new codepipeline.Artifact(); +const cloudAssemblyArtifact = new codepipeline.Artifact(); +pipelines.SimpleSynthAction.standardNpmSynth({ sourceArtifact, cloudAssemblyArtifact, @@ -54,8 +56,10 @@ SimpleSynthAction.standardNpmSynth({ Becomes: ```ts -new ShellStep('Synth', { - input: /* source */, +new pipelines.ShellStep('Synth', { + input: pipelines.CodePipelineSource.connection('my-org/my-app', 'main', { + connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * });', + }), commands: [ 'npm ci', 'npm run build', @@ -71,7 +75,7 @@ You can use any of the factory functions on `CodePipelineSource`. For example, for a GitHub source, the following old API: ```ts -sourceAction: new codepipeline_actions.GitHubSourceAction({ +sourceAction: new cpactions.GitHubSourceAction({ actionName: 'GitHub', output: sourceArtifact, // Replace these with your actual GitHub project name @@ -84,8 +88,8 @@ sourceAction: new codepipeline_actions.GitHubSourceAction({ Translates into: ```ts -input: CodePipelineSource.gitHub('OWNER/REPO', 'main', { - authentication: SecretValue.secretsManager('GITHUB_TOKEN_NAME'), +input: pipelines.CodePipelineSource.gitHub('OWNER/REPO', 'main', { + authentication: cdk.SecretValue.secretsManager('GITHUB_TOKEN_NAME'), }), ``` @@ -111,8 +115,9 @@ putting manual approvals in `pre` steps, and automated approvals in `post` steps For example, specifying a manual approval on a stage deployment in old API: ```ts +declare const pipeline: pipelines.CdkPipeline; const stage = pipeline.addApplicationStage(...); -stage.addAction(new ManualApprovalAction({ +stage.addAction(new pipelines.ManualApprovalAction({ actionName: 'ManualApproval', runOrder: testingStage.nextSequentialRunOrder(), })); @@ -121,9 +126,10 @@ stage.addAction(new ManualApprovalAction({ Becomes: ```ts -pipeline.addStage(..., { +const stage = new MyApplicationStage(this, 'MyApplication'); +pipeline.addStage(stage, { pre: [ - new ManualApprovalStep('ManualApproval'), + new pipelines.ManualApprovalStep('ManualApproval'), ], }); ``` @@ -139,7 +145,7 @@ For example, specifying an automated approval after a stage is deployed in the f ```ts const stage = pipeline.addApplicationStage(...); -stage.addActions(new ShellScriptAction({ +stage.addActions(new pipelines.ShellScriptAction({ actionName: 'MyValidation', commands: ['curl -Ssf $VAR'], useOutputs: { @@ -153,10 +159,10 @@ stage.addActions(new ShellScriptAction({ Becomes: ```ts -const stage = new MyStage(...); +const stage = new MyApplicationStage(this, 'MyApplication'); pipeline.addStage(stage, { post: [ - new CodeBuildStep('MyValidation', { + new pipelines.CodeBuildStep('MyValidation', { commands: ['curl -Ssf $VAR'], envFromCfnOutput: { VAR: stage.cfnOutput, @@ -174,7 +180,19 @@ customizations (like `buildEnvironment`). #### Change set approvals In the old API, there were two properties that were used to add actions to the pipeline -in between the `CreateChangeSet` and `ExecuteChangeSet` actions: `manualApprovals` and `extraRunOrderSpace`. These are not supported in the new API. +in between the `CreateChangeSet` and `ExecuteChangeSet` actions: `manualApprovals` and `extraRunOrderSpace`. +This can be achieved in the modern API via the `stackSteps` property, which allows steps to be added +at the stack level: + +```ts +const stage = new MyApplicationStage(this, 'MyApplication'); +pipeline.addStage(stage, { + stackSteps: [{ + stack: stage.stack1, + changeSet: [new pipelines.ManualApprovalStep('ChangeSet Approval')], + }], +}); +``` ### Custom CodePipeline Actions @@ -190,7 +208,6 @@ artifacts: ```ts import { Construct, Stage, Stack, StackProps, StageProps } from '@aws-cdk/core'; -import { CdkPipeline } from '@aws-cdk/pipelines'; import * as codepipeline from '@aws-cdk/aws-codepipeline'; /** @@ -203,20 +220,20 @@ class MyPipelineStack extends Stack { const sourceArtifact = new codepipeline.Artifact(); const cloudAssemblyArtifact = new codepipeline.Artifact(); - const pipeline = new CdkPipeline(this, 'Pipeline', { + const pipeline = new pipelines.CdkPipeline(this, 'Pipeline', { cloudAssemblyArtifact, - sourceAction: new codepipeline_actions.GitHubSourceAction({ + sourceAction: new cpactions.GitHubSourceAction({ actionName: 'GitHub', output: sourceArtifact, - oauthToken: SecretValue.secretsManager('GITHUB_TOKEN_NAME'), + oauthToken: cdk.SecretValue.secretsManager('GITHUB_TOKEN_NAME'), // Replace these with your actual GitHub project name owner: 'OWNER', repo: 'REPO', branch: 'main', // default: 'master' }), - synthAction: SimpleSynthAction.standardNpmSynth({ + synthAction: pipelines.SimpleSynthAction.standardNpmSynth({ sourceArtifact, cloudAssemblyArtifact, @@ -274,21 +291,21 @@ class MyPipelineStack extends Stack { const sourceArtifact = new codepipeline.Artifact(); const cloudAssemblyArtifact = new codepipeline.Artifact(); - const pipeline = new CdkPipeline(this, 'Pipeline', { + const pipeline = new pipelines.CdkPipeline(this, 'Pipeline', { pipelineName: 'MyAppPipeline', cloudAssemblyArtifact, - sourceAction: new codepipeline_actions.GitHubSourceAction({ + sourceAction: new cpactions.GitHubSourceAction({ actionName: 'GitHub', output: sourceArtifact, - oauthToken: SecretValue.secretsManager('GITHUB_TOKEN_NAME'), + oauthToken: cdk.SecretValue.secretsManager('GITHUB_TOKEN_NAME'), // Replace these with your actual GitHub project name owner: 'OWNER', repo: 'REPO', branch: 'main', // default: 'master' }), - synthAction: SimpleSynthAction.standardNpmSynth({ + synthAction: pipelines.SimpleSynthAction.standardNpmSynth({ sourceArtifact, cloudAssemblyArtifact, @@ -316,7 +333,7 @@ If you prefer more control over the underlying CodePipeline object, you can create one yourself, including custom Source and Build stages: ```ts -const codePipeline = new cp.Pipeline(pipelineStack, 'CodePipeline', { +const codePipeline = new codepipeline.Pipeline(pipelineStack, 'CodePipeline', { stages: [ { stageName: 'CustomSource', @@ -330,7 +347,7 @@ const codePipeline = new cp.Pipeline(pipelineStack, 'CodePipeline', { }); const app = new App(); -const cdkPipeline = new CdkPipeline(app, 'CdkPipeline', { +const cdkPipeline = new pipelines.CdkPipeline(app, 'CdkPipeline', { codePipeline, cloudAssemblyArtifact, }); @@ -360,9 +377,9 @@ using these, the source repository does not need to have a `buildspec.yml`. An e of using `SimpleSynthAction` to run a Maven build followed by a CDK synth: ```ts -const pipeline = new CdkPipeline(this, 'Pipeline', { +const pipeline = new pipelines.CdkPipeline(this, 'Pipeline', { // ... - synthAction: new SimpleSynthAction({ + synthAction: new pipelines.SimpleSynthAction({ sourceArtifact, cloudAssemblyArtifact, installCommands: ['npm install -g aws-cdk'], @@ -396,16 +413,16 @@ from the CA repo instead of NPM. class MyPipelineStack extends Stack { constructor(scope: Construct, id: string, props?: StackProps) { ... - const pipeline = new CdkPipeline(this, 'Pipeline', { + const pipeline = new pipelines.CdkPipeline(this, 'Pipeline', { ... - synthAction: SimpleSynthAction.standardNpmSynth({ + synthAction: pipelines.SimpleSynthAction.standardNpmSynth({ sourceArtifact, cloudAssemblyArtifact, // Use this to customize and a permissions required for the build // and synth rolePolicyStatements: [ - new PolicyStatement({ + new iam.PolicyStatement({ actions: ['codeartifact:*', 'sts:GetServiceBearerToken'], resources: ['arn:codeartifact:repo:arn'], }), @@ -477,7 +494,7 @@ const testingStage = pipeline.addApplicationStage(new MyApplication(this, 'Testi // Add a action -- in this case, a Manual Approval action // (for illustration purposes: testingStage.addManualApprovalAction() is a // convenience shorthand that does the same) -testingStage.addAction(new ManualApprovalAction({ +testingStage.addAction(new pipelines.ManualApprovalAction({ actionName: 'ManualApproval', runOrder: testingStage.nextSequentialRunOrder(), })); @@ -522,7 +539,7 @@ In its simplest form, adding validation actions looks like this: ```ts const stage = pipeline.addApplicationStage(new MyApplication(/* ... */)); -stage.addActions(new ShellScriptAction({ +stage.addActions(new pipelines.ShellScriptAction({ actionName: 'MyValidation', commands: ['curl -Ssf https://my.webservice.com/'], // Optionally specify a VPC if, for example, the service is deployed with a private load balancer @@ -563,7 +580,7 @@ const lbApp = new MyLbApplication(this, 'MyApp', { env: { /* ... */ } }); const stage = pipeline.addApplicationStage(lbApp); -stage.addActions(new ShellScriptAction({ +stage.addActions(new pipelines.ShellScriptAction({ // ... useOutputs: { // When the test is executed, this will make $URL contain the @@ -594,7 +611,7 @@ two ways. Either pass additional policy statements in the `rolePolicyStatements` property: ```ts -new ShellScriptAction({ +new pipelines.ShellScriptAction({ // ... rolePolicyStatements: [ new iam.PolicyStatement({ @@ -608,7 +625,7 @@ new ShellScriptAction({ The Action can also be used as a Grantable after having been added to a Pipeline: ```ts -const action = new ShellScriptAction({ /* ... */ }); +const action = new pipelines.ShellScriptAction({ /* ... */ }); pipeline.addStage('Test').addActions(action); bucket.grantRead(action); @@ -623,11 +640,11 @@ if they are executable shell scripts themselves). Pass the `sourceArtifact`: ```ts const sourceArtifact = new codepipeline.Artifact(); -const pipeline = new CdkPipeline(this, 'Pipeline', { +const pipeline = new pipelines.CdkPipeline(this, 'Pipeline', { // ... }); -const validationAction = new ShellScriptAction({ +const validationAction = new pipelines.ShellScriptAction({ actionName: 'TestUsingSourceArtifact', additionalArtifacts: [sourceArtifact], @@ -651,8 +668,8 @@ in the `ShellScriptAction`'s `additionalArtifacts`: const cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); const integTestsArtifact = new codepipeline.Artifact('IntegTests'); -const pipeline = new CdkPipeline(this, 'Pipeline', { - synthAction: SimpleSynthAction.standardNpmSynth({ +const pipeline = new pipelines.CdkPipeline(this, 'Pipeline', { + synthAction: pipelines.SimpleSynthAction.standardNpmSynth({ sourceArtifact, cloudAssemblyArtifact, buildCommands: ['npm run build'], @@ -666,7 +683,7 @@ const pipeline = new CdkPipeline(this, 'Pipeline', { // ... }); -const validationAction = new ShellScriptAction({ +const validationAction = new pipelines.ShellScriptAction({ actionName: 'TestUsingBuildArtifact', additionalArtifacts: [integTestsArtifact], // 'test.js' was produced from 'test/test.ts' during the synth step @@ -715,12 +732,11 @@ create an SNS Topic, subscribe your own email address, and pass it in via ```ts import * as sns from '@aws-cdk/aws-sns'; import * as subscriptions from '@aws-cdk/aws-sns-subscriptions'; -import * as pipelines from '@aws-cdk/pipelines'; const topic = new sns.Topic(this, 'SecurityChangesTopic'); topic.addSubscription(new subscriptions.EmailSubscription('test@email.com')); -const pipeline = new CdkPipeline(app, 'Pipeline', { /* ... */ }); +const pipeline = new pipelines.CdkPipeline(app, 'Pipeline', { /* ... */ }); const stage = pipeline.addApplicationStage(new MyApplication(this, 'PreProd'), { confirmBroadeningPermissions: true, securityNotificationTopic: topic, diff --git a/packages/@aws-cdk/pipelines/README.md b/packages/@aws-cdk/pipelines/README.md index 3013140344bef..bd6525542920b 100644 --- a/packages/@aws-cdk/pipelines/README.md +++ b/packages/@aws-cdk/pipelines/README.md @@ -43,14 +43,31 @@ CodePipeline engine, define a `CodePipeline` construct. The following example creates a CodePipeline that deploys an application from GitHub: ```ts -/** The stacks for our app are defined in my-stacks.ts. The internals of these +/** The stacks for our app are minimally defined here. The internals of these * stacks aren't important, except that DatabaseStack exposes an attribute * "table" for a database table it defines, and ComputeStack accepts a reference * to this table in its properties. */ -import { DatabaseStack, ComputeStack } from '../lib/my-stacks'; -import { Construct, Stage, Stack, StackProps, StageProps } from '@aws-cdk/core'; -import { CodePipeline, CodePipelineSource, ShellStep } from '@aws-cdk/pipelines'; +class DatabaseStack extends Stack { + public readonly table: dynamodb.Table; + + constructor(scope: Construct, id: string) { + super(scope, id); + this.table = new dynamodb.Table(this, 'Table', { + partitionKey: { name: 'id', type: dynamodb.AttributeType.STRING } + }); + } +} + +interface ComputeProps { + readonly table: dynamodb.Table; +} + +class ComputeStack extends Stack { + constructor(scope: Construct, id: string, props: ComputeProps) { + super(scope, id); + } +} /** * Stack to hold the pipeline @@ -59,11 +76,11 @@ class MyPipelineStack extends Stack { constructor(scope: Construct, id: string, props?: StackProps) { super(scope, id, props); - const pipeline = new CodePipeline(this, 'Pipeline', { - synth: new ShellStep('Synth', { + const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.ShellStep('Synth', { // Use a connection created using the AWS console to authenticate to GitHub // Other sources are available. - input: CodePipelineSource.connection('my-org/my-app', 'main', { + input: pipelines.CodePipelineSource.connection('my-org/my-app', 'main', { connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * });', }), commands: [ @@ -81,7 +98,7 @@ class MyPipelineStack extends Stack { env: { account: '123456789012', region: 'eu-west-1', - } + }, })); } } @@ -106,7 +123,7 @@ class MyApplication extends Stage { } // In your main file -new MyPipelineStack(app, 'PipelineStack', { +new MyPipelineStack(this, 'PipelineStack', { env: { account: '123456789012', region: 'eu-west-1', @@ -172,15 +189,25 @@ off temporarily, by passing `selfMutation: false` property, example: ```ts // Modern API -const pipeline = new CodePipeline(this, 'Pipeline', { +const modernPipeline = new pipelines.CodePipeline(this, 'Pipeline', { selfMutation: false, - ... + synth: new pipelines.ShellStep('Synth', { + input: pipelines.CodePipelineSource.connection('my-org/my-app', 'main', { + connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * });', + }), + commands: [ + 'npm ci', + 'npm run build', + 'npx cdk synth', + ], + }), }); // Original API -const pipeline = new CdkPipeline(this, 'Pipeline', { +const cloudAssemblyArtifact = new codepipeline.Artifact(); +const originalPipeline = new pipelines.CdkPipeline(this, 'Pipeline', { selfMutating: false, - ... + cloudAssemblyArtifact, }); ``` @@ -204,10 +231,10 @@ commands required will depend on the programming language you are using. For a typical NPM-based project, the synth will look like this: ```ts -const source = /* the repository source */; +declare const source: pipelines.IFileSetProducer; // the repository source -const pipeline = new CodePipeline(this, 'Pipeline', { - synth: new ShellStep('Synth', { +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.ShellStep('Synth', { input: source, commands: [ 'npm ci', @@ -224,8 +251,10 @@ CDK project lives in a subdirectory, be sure to adjust the `primaryOutputDirectory` to match: ```ts -const pipeline = new CodePipeline(this, 'Pipeline', { - synth: new ShellStep('Synth', { +declare const source: pipelines.IFileSetProducer; // the repository source + +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.ShellStep('Synth', { input: source, commands: [ 'cd mysubdir', @@ -254,8 +283,10 @@ look like in a number of different situations. For Yarn, the install commands are different: ```ts -const pipeline = new CodePipeline(this, 'Pipeline', { - synth: new ShellStep('Synth', { +declare const source: pipelines.IFileSetProducer; // the repository source + +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.ShellStep('Synth', { input: source, commands: [ 'yarn install --frozen-lockfile', @@ -270,8 +301,10 @@ For Python projects, remember to install the CDK CLI globally (as there is no `package.json` to automatically install it for you): ```ts -const pipeline = new CodePipeline(this, 'Pipeline', { - synth: new ShellStep('Synth', { +declare const source: pipelines.IFileSetProducer; // the repository source + +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.ShellStep('Synth', { input: source, commands: [ 'pip install -r requirements.txt', @@ -288,8 +321,10 @@ and the Maven compilation step is automatically executed for you as you run `cdk synth`: ```ts -const pipeline = new CodePipeline(this, 'Pipeline', { - synth: new ShellStep('Synth', { +declare const source: pipelines.IFileSetProducer; // the repository source + +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.ShellStep('Synth', { input: source, commands: [ 'npm install -g aws-cdk', @@ -314,7 +349,7 @@ You will first use the AWS Console to authenticate to the source control provider, and then use the connection ARN in your pipeline definition: ```ts -CodePipelineSource.connection('org/repo', 'branch', { +pipelines.CodePipelineSource.connection('org/repo', 'branch', { connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', }); ``` @@ -328,9 +363,9 @@ you can change the name. The token should have the **repo** and **admin:repo_hoo scopes. ```ts -CodePipelineSource.gitHub('org/repo', 'branch', { +pipelines.CodePipelineSource.gitHub('org/repo', 'branch', { // This is optional - authentication: SecretValue.secretsManager('my-token'), + authentication: cdk.SecretValue.secretsManager('my-token'), }); ``` @@ -341,8 +376,8 @@ that the CodeCommit repository and then use `CodePipelineSource.codeCommit` to reference it: ```ts -const repository = codecommit.fromRepositoryName(this, 'Repository', 'my-repository'); -CodePipelineSource.codeCommit(repository); +const repository = codecommit.Repository.fromRepositoryName(this, 'Repository', 'my-repository'); +pipelines.CodePipelineSource.codeCommit(repository, 'main'); ``` ##### S3 @@ -352,7 +387,7 @@ triggered every time the file in S3 is changed: ```ts const bucket = s3.Bucket.fromBucketName(this, 'Bucket', 'my-bucket'); -CodePipelineSource.s3(bucket, 'my/source.zip'); +pipelines.CodePipelineSource.s3(bucket, 'my/source.zip'); ``` #### Additional inputs @@ -363,17 +398,17 @@ output file set can be used as an input, such as a `CodePipelineSource`, but also other `ShellStep`: ```ts -const prebuild = new ShellStep('Prebuild', { - input: CodePipelineSource.gitHub('myorg/repo1'), +const prebuild = new pipelines.ShellStep('Prebuild', { + input: pipelines.CodePipelineSource.gitHub('myorg/repo1', 'main'), primaryOutputDirectory: './build', commands: ['./build.sh'], }); -const pipeline = new CodePipeline(this, 'Pipeline', { - synth: new ShellStep('Synth', { - input: CodePipelineSource.gitHub('myorg/repo2'), +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.ShellStep('Synth', { + input: pipelines.CodePipelineSource.gitHub('myorg/repo2', 'main'), additionalInputs: { - 'subdir': CodePipelineSource.gitHub('myorg/repo3'), + 'subdir': pipelines.CodePipelineSource.gitHub('myorg/repo3', 'main'), '../siblingdir': prebuild, }, @@ -389,6 +424,7 @@ more CDK `Stages` which will be deployed to their target environments. To do so, call `pipeline.addStage()` on the Stage object: ```ts +declare const pipeline: pipelines.CodePipeline; // Do this as many times as necessary with any account and region // Account and region may different from the pipeline's. pipeline.addStage(new MyApplicationStage(this, 'Prod', { @@ -421,6 +457,7 @@ deployed in sequence. For example, the following will deploy two copies of your application to `eu-west-1` and `eu-central-1` in parallel: ```ts +declare const pipeline: pipelines.CodePipeline; const europeWave = pipeline.addWave('Europe'); europeWave.addStage(new MyApplicationStage(this, 'Ireland', { env: { region: 'eu-west-1' } @@ -445,9 +482,19 @@ KMS key. Example: ```ts -const pipeline = new CodePipeline(this, 'Pipeline', { +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { // Encrypt artifacts, required for cross-account deployments crossAccountKeys: true, + synth: new pipelines.ShellStep('Synth', { + input: pipelines.CodePipelineSource.connection('my-org/my-app', 'main', { + connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * });', + }), + commands: [ + 'npm ci', + 'npm run build', + 'npx cdk synth', + ], + }), }); ``` @@ -464,19 +511,20 @@ a manual approval in the form of a `ManualApprovalStep` added to the pipeline. B pass in order to promote from the `PreProd` to the `Prod` environment: ```ts -const preprod = new MyApplicationStage(this, 'PreProd', { ... }); -const prod = new MyApplicationStage(this, 'Prod', { ... }); +declare const pipeline: pipelines.CodePipeline; +const preprod = new MyApplicationStage(this, 'PreProd'); +const prod = new MyApplicationStage(this, 'Prod'); pipeline.addStage(preprod, { post: [ - new ShellStep('Validate Endpoint', { + new pipelines.ShellStep('Validate Endpoint', { commands: ['curl -Ssf https://my.webservice.com/'], }), ], }); pipeline.addStage(prod, { pre: [ - new ManualApprovalStep('PromoteToProd'), + new pipelines.ManualApprovalStep('PromoteToProd'), ], }); ``` @@ -484,15 +532,29 @@ pipeline.addStage(prod, { You can also specify steps to be executed at the stack level. To achieve this, you can specify the stack and step via the `stackSteps` property: ```ts +class MyStacksStage extends Stage { + public readonly stack1: Stack; + public readonly stack2: Stack; + + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + this.stack1 = new Stack(this, 'stack1'); + this.stack2 = new Stack(this, 'stack2'); + } +} + +declare const pipeline: pipelines.CodePipeline; +const prod = new MyStacksStage(this, 'Prod'); + pipeline.addStage(prod, { stackSteps: [{ stack: prod.stack1, - pre: [new ManualApprovalStep('Pre-Stack Check')], // Executed before stack is prepared - changeSet: [new ManualApprovalStep('ChangeSet Approval')], // Executed after stack is prepared but before the stack is deployed - post: [new ManualApprovalStep('Post-Deploy Check')], // Executed after staack is deployed + pre: [new pipelines.ManualApprovalStep('Pre-Stack Check')], // Executed before stack is prepared + changeSet: [new pipelines.ManualApprovalStep('ChangeSet Approval')], // Executed after stack is prepared but before the stack is deployed + post: [new pipelines.ManualApprovalStep('Post-Deploy Check')], // Executed after staack is deployed }, { stack: prod.stack2, - post: [new ManualApprovalStep('Post-Deploy Check')], // Executed after staack is deployed + post: [new pipelines.ManualApprovalStep('Post-Deploy Check')], // Executed after staack is deployed }], }); ``` @@ -507,21 +569,26 @@ To use Stack Outputs, expose the `CfnOutput` object you're interested in, and pass it to `envFromCfnOutputs` of the `ShellStep`: ```ts -class MyApplicationStage extends Stage { +class MyOutputStage extends Stage { public readonly loadBalancerAddress: CfnOutput; - // ... + + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + this.loadBalancerAddress = new CfnOutput(this, 'Output', {value: 'value'}); + } } -const lbApp = new MyApplicationStage(this, 'MyApp', { /* ... */ }); +const lbApp = new MyOutputStage(this, 'MyApp'); +declare const pipeline: pipelines.CodePipeline; pipeline.addStage(lbApp, { post: [ - new ShellStep('HitEndpoint', { + new pipelines.ShellStep('HitEndpoint', { envFromCfnOutputs: { // Make the load balancer address available as $URL inside the commands URL: lbApp.loadBalancerAddress, }, commands: ['curl -Ssf $URL'], - }); + }), ], }); ``` @@ -539,12 +606,13 @@ Here's an example that captures an additional output directory in the synth step and runs tests from there: ```ts -const synth = new ShellStep('Synth', { /* ... */ }); -const pipeline = new CodePipeline(this, 'Pipeline', { synth }); +declare const synth: pipelines.ShellStep; +const stage = new MyApplicationStage(this, 'MyApplication'); +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { synth }); -pipeline.addStage(/* ... */, { +pipeline.addStage(stage, { post: [ - new ShellStep('Approve', { + new pipelines.ShellStep('Approve', { // Use the contents of the 'integ' directory from the synth step as the input input: synth.addOutputDirectory('integ'), commands: ['cd integ && ./run.sh'], @@ -562,7 +630,9 @@ generated, use a `CodeBuildStep` instead of a `ShellStep`. This class has a numb of properties that allow you to customize various aspects of the projects: ```ts -new CodeBuildStep('Synth', { +declare const vpc: ec2.Vpc; +declare const mySecurityGroup: ec2.SecurityGroup; +new pipelines.CodeBuildStep('Synth', { // ...standard ShellStep props... commands: [/* ... */], env: { /* ... */ }, @@ -602,8 +672,20 @@ or just for the synth, asset publishing, and self-mutation projects by passing ` `assetPublishingCodeBuildDefaults`, or `selfMutationCodeBuildDefaults`: ```ts -new CodePipeline(this, 'Pipeline', { - // ... +declare const vpc: ec2.Vpc; +declare const mySecurityGroup: ec2.SecurityGroup; +new pipelines.CodePipeline(this, 'Pipeline', { + // Standard CodePipeline properties + synth: new pipelines.ShellStep('Synth', { + input: pipelines.CodePipelineSource.connection('my-org/my-app', 'main', { + connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * });', + }), + commands: [ + 'npm ci', + 'npm run build', + 'npx cdk synth', + ], + }), // Defaults for all CodeBuild projects codeBuildDefaults: { @@ -644,15 +726,19 @@ doesn't have a matching class yet, you can define your own step class that exten Here's an example that adds a Jenkins step: ```ts -class MyJenkinsStep extends Step implements ICodePipelineActionFactory { - constructor(private readonly provider: codepipeline_actions.JenkinsProvider, private readonly input: FileSet) { +class MyJenkinsStep extends pipelines.Step implements pipelines.ICodePipelineActionFactory { + constructor( + private readonly provider: cpactions.JenkinsProvider, + private readonly input: pipelines.FileSet, + ) { + super('MyJenkinsStep'); } - public produceAction(stage: codepipeline.IStage, options: ProduceActionOptions): CodePipelineActionFactoryResult { + public produceAction(stage: codepipeline.IStage, options: pipelines.ProduceActionOptions): pipelines.CodePipelineActionFactoryResult { // This is where you control what type of Action gets added to the // CodePipeline - stage.addAction(new codepipeline_actions.JenkinsAction({ + stage.addAction(new cpactions.JenkinsAction({ // Copy 'actionName' and 'runOrder' from the options actionName: options.actionName, runOrder: options.runOrder, @@ -700,8 +786,13 @@ stacks the pipeline is deploying), for example by the use of `LinuxBuildImage.fr you need to pass `dockerEnabledForSelfMutation: true` to the pipeline. For example: ```ts -const pipeline = new CodePipeline(this, 'Pipeline', { - // ... +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.ShellStep('Synth', { + input: pipelines.CodePipelineSource.connection('my-org/my-app', 'main', { + connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * });', + }), + commands: ['npm ci','npm run build','npx cdk synth'], + }), // Turn this on because the pipeline uses Docker image assets dockerEnabledForSelfMutation: true, @@ -709,16 +800,16 @@ const pipeline = new CodePipeline(this, 'Pipeline', { pipeline.addWave('MyWave', { post: [ - new CodeBuildStep('RunApproval', { + new pipelines.CodeBuildStep('RunApproval', { commands: ['command-from-image'], buildEnvironment: { // The user of a Docker image asset in the pipeline requires turning on // 'dockerEnabledForSelfMutation'. - buildImage: LinuxBuildImage.fromAsset(this, 'Image', { + buildImage: codebuild.LinuxBuildImage.fromAsset(this, 'Image', { directory: './docker-image', - }) + }), }, - }) + }), ], }); ``` @@ -734,8 +825,13 @@ if you add a construct like `@aws-cdk/aws-lambda-nodejs`), you need to pass `dockerEnabledForSynth: true` to the pipeline. For example: ```ts -const pipeline = new CodePipeline(this, 'Pipeline', { - // ... +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.ShellStep('Synth', { + input: pipelines.CodePipelineSource.connection('my-org/my-app', 'main', { + connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * });', + }), + commands: ['npm ci','npm run build','npx cdk synth'], + }), // Turn this on because the application uses bundled file assets dockerEnabledForSynth: true, @@ -756,16 +852,21 @@ different environment (e.g., ECR repo) or to avoid throttling (e.g., DockerHub). ```ts const dockerHubSecret = secretsmanager.Secret.fromSecretCompleteArn(this, 'DHSecret', 'arn:aws:...'); const customRegSecret = secretsmanager.Secret.fromSecretCompleteArn(this, 'CRSecret', 'arn:aws:...'); -const repo1 = ecr.Repository.fromRepositoryArn(stack, 'Repo', 'arn:aws:ecr:eu-west-1:0123456789012:repository/Repo1'); -const repo2 = ecr.Repository.fromRepositoryArn(stack, 'Repo', 'arn:aws:ecr:eu-west-1:0123456789012:repository/Repo2'); +const repo1 = ecr.Repository.fromRepositoryArn(this, 'Repo', 'arn:aws:ecr:eu-west-1:0123456789012:repository/Repo1'); +const repo2 = ecr.Repository.fromRepositoryArn(this, 'Repo', 'arn:aws:ecr:eu-west-1:0123456789012:repository/Repo2'); -const pipeline = new CodePipeline(this, 'Pipeline', { +const pipeline = new pipelines.CodePipeline(this, 'Pipeline', { dockerCredentials: [ - DockerCredential.dockerHub(dockerHubSecret), - DockerCredential.customRegistry('dockerregistry.example.com', customRegSecret), - DockerCredential.ecr([repo1, repo2]); + pipelines.DockerCredential.dockerHub(dockerHubSecret), + pipelines.DockerCredential.customRegistry('dockerregistry.example.com', customRegSecret), + pipelines.DockerCredential.ecr([repo1, repo2]), ], - // ... + synth: new pipelines.ShellStep('Synth', { + input: pipelines.CodePipelineSource.connection('my-org/my-app', 'main', { + connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * });', + }), + commands: ['npm ci','npm run build','npx cdk synth'], + }), }); ``` @@ -784,7 +885,7 @@ the **Synth**, **Self-Update**, and **Asset Publishing** actions within the ```ts const dockerHubSecret = secretsmanager.Secret.fromSecretCompleteArn(this, 'DHSecret', 'arn:aws:...'); // Only the image asset publishing actions will be granted read access to the secret. -const creds = DockerCredential.dockerHub(dockerHubSecret, { usages: [DockerCredentialUsage.ASSET_PUBLISHING] }); +const creds = pipelines.DockerCredential.dockerHub(dockerHubSecret, { usages: [pipelines.DockerCredentialUsage.ASSET_PUBLISHING] }); ``` ## CDK Environment Bootstrapping @@ -953,9 +1054,11 @@ give the synth CodeBuild execution role permissions to assume the bootstrapped lookup roles. As an example, doing so would look like this: ```ts -new CodePipeline(this, 'Pipeline', { - synth: new CodeBuildStep('Synth', { - input: // ...input... +new pipelines.CodePipeline(this, 'Pipeline', { + synth: new pipelines.CodeBuildStep('Synth', { + input: pipelines.CodePipelineSource.connection('my-org/my-app', 'main', { + connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * });', + }), commands: [ // Commands to load cdk.context.json from somewhere here '...', @@ -1034,10 +1137,11 @@ Pipeline You can insert the security check by using a `ConfirmPermissionsBroadening` step: ```ts +declare const pipeline: pipelines.CodePipeline; const stage = new MyApplicationStage(this, 'MyApplication'); pipeline.addStage(stage, { pre: [ - new ConfirmPermissionsBroadening('Check', { stage }), + new pipelines.ConfirmPermissionsBroadening('Check', { stage }), ], }); ``` @@ -1047,17 +1151,14 @@ create an SNS Topic, subscribe your own email address, and pass it in as as the `notificationTopic` property: ```ts -import * as sns from '@aws-cdk/aws-sns'; -import * as subscriptions from '@aws-cdk/aws-sns-subscriptions'; -import * as pipelines from '@aws-cdk/pipelines'; - +declare const pipeline: pipelines.CodePipeline; const topic = new sns.Topic(this, 'SecurityChangesTopic'); topic.addSubscription(new subscriptions.EmailSubscription('test@email.com')); const stage = new MyApplicationStage(this, 'MyApplication'); pipeline.addStage(stage, { pre: [ - new ConfirmPermissionsBroadening('Check', { + new pipelines.ConfirmPermissionsBroadening('Check', { stage, notificationTopic: topic, }), @@ -1162,19 +1263,19 @@ that bundles asset using tools run via Docker, like `aws-lambda-nodejs`, `aws-la Make sure you set the `privileged` environment variable to `true` in the synth definition: -```typescript - const pipeline = new CdkPipeline(this, 'MyPipeline', { - ... - - synthAction: SimpleSynthAction.standardNpmSynth({ - sourceArtifact: ..., - cloudAssemblyArtifact: ..., - - environment: { - privileged: true, - }, - }), - }); +```ts +const sourceArtifact = new codepipeline.Artifact(); +const cloudAssemblyArtifact = new codepipeline.Artifact(); +const pipeline = new pipelines.CdkPipeline(this, 'MyPipeline', { + cloudAssemblyArtifact, + synthAction: pipelines.SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + environment: { + privileged: true, + }, + }), +}); ``` After turning on `privilegedMode: true`, you will need to do a one-time manual cdk deploy of your @@ -1201,10 +1302,11 @@ This happens because the pipeline is not self-mutating and, as a consequence, th build projects get out-of-sync with the generated templates. To fix this, make sure the `selfMutating` property is set to `true`: -```typescript -const pipeline = new CdkPipeline(this, 'MyPipeline', { +```ts +const cloudAssemblyArtifact = new codepipeline.Artifact(); +const pipeline = new pipelines.CdkPipeline(this, 'MyPipeline', { selfMutating: true, - ... + cloudAssemblyArtifact, }); ``` @@ -1240,9 +1342,9 @@ $ env CDK_NEW_BOOTSTRAP=1 npx cdk bootstrap \ See https://docs.aws.amazon.com/cdk/latest/guide/bootstrapping.html for more info. ```ts -new MyStack(this, 'MyStack', { +new Stack(this, 'MyStack', { // Update this qualifier to match the one used above. - synthesizer: new DefaultStackSynthesizer({ + synthesizer: new cdk.DefaultStackSynthesizer({ qualifier: 'randchars1234', }), }); diff --git a/packages/@aws-cdk/pipelines/lib/blueprint/shell-step.ts b/packages/@aws-cdk/pipelines/lib/blueprint/shell-step.ts index 75c1883d92419..1f03105c78ee9 100644 --- a/packages/@aws-cdk/pipelines/lib/blueprint/shell-step.ts +++ b/packages/@aws-cdk/pipelines/lib/blueprint/shell-step.ts @@ -65,11 +65,11 @@ export interface ShellStepProps { * following configuration: * * ```ts - * const script = new ShellStep('MainScript', { - * // ... - * input: MyEngineSource.gitHub('org/source1'), + * const script = new pipelines.ShellStep('MainScript', { + * commands: ['npm ci','npm run build','npx cdk synth'], + * input: pipelines.CodePipelineSource.gitHub('org/source1', 'main'), * additionalInputs: { - * '../siblingdir': MyEngineSource.gitHub('org/source2'), + * '../siblingdir': pipelines.CodePipelineSource.gitHub('org/source2', 'main'), * } * }); * ``` diff --git a/packages/@aws-cdk/pipelines/lib/codepipeline/codepipeline-source.ts b/packages/@aws-cdk/pipelines/lib/codepipeline/codepipeline-source.ts index b6d10b03f2f67..382bed08028ff 100644 --- a/packages/@aws-cdk/pipelines/lib/codepipeline/codepipeline-source.ts +++ b/packages/@aws-cdk/pipelines/lib/codepipeline/codepipeline-source.ts @@ -26,7 +26,7 @@ export abstract class CodePipelineSource extends Step implements ICodePipelineAc * Pass in the owner and repository in a single string, like this: * * ```ts - * CodePipelineSource.gitHub('owner/repo', 'main'); + * pipelines.CodePipelineSource.gitHub('owner/repo', 'main'); * ``` * * Authentication will be done by a secret called `github-token` in AWS @@ -51,8 +51,8 @@ export abstract class CodePipelineSource extends Step implements ICodePipelineAc * Example: * * ```ts - * const bucket: IBucket = ... - * CodePipelineSource.s3(bucket, { + * declare const bucket: s3.Bucket; + * pipelines.CodePipelineSource.s3(bucket, { * key: 'path/to/file.zip', * }); * ``` @@ -74,7 +74,7 @@ export abstract class CodePipelineSource extends Step implements ICodePipelineAc * Example: * * ```ts - * CodePipelineSource.connection('owner/repo', 'main', { + * pipelines.CodePipelineSource.connection('owner/repo', 'main', { * connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console * }); * ``` @@ -131,7 +131,6 @@ export interface GitHubSourceOptions { * * ```ts * const oauth = cdk.SecretValue.secretsManager('my-github-token'); - * new GitHubSource(this, 'GitHubSource', { authentication: oauth, ... }); * ``` * * The GitHub Personal Access Token should have these scopes: diff --git a/packages/@aws-cdk/pipelines/lib/main/pipeline-base.ts b/packages/@aws-cdk/pipelines/lib/main/pipeline-base.ts index d69c2a6c89ab1..2f90df9de6f1a 100644 --- a/packages/@aws-cdk/pipelines/lib/main/pipeline-base.ts +++ b/packages/@aws-cdk/pipelines/lib/main/pipeline-base.ts @@ -95,9 +95,11 @@ export abstract class PipelineBase extends CoreConstruct { * Example: * * ```ts + * declare const pipeline: pipelines.CodePipeline; + * * const wave = pipeline.addWave('MyWave'); - * wave.addStage(new MyStage('Stage1', ...)); - * wave.addStage(new MyStage('Stage2', ...)); + * wave.addStage(new MyApplicationStage(this, 'Stage1')); + * wave.addStage(new MyApplicationStage(this, 'Stage2')); * ``` */ public addWave(id: string, options?: WaveOptions) { diff --git a/packages/@aws-cdk/pipelines/rosetta/default.ts-fixture b/packages/@aws-cdk/pipelines/rosetta/default.ts-fixture new file mode 100644 index 0000000000000..61a973840f007 --- /dev/null +++ b/packages/@aws-cdk/pipelines/rosetta/default.ts-fixture @@ -0,0 +1,29 @@ +// Fixture with packages imported, but nothing else +import { Construct, CfnOutput, Stage, Stack, StackProps, StageProps } from '@aws-cdk/core'; +import cdk = require('@aws-cdk/core'); +import codepipeline = require('@aws-cdk/aws-codepipeline'); +import cpactions = require('@aws-cdk/aws-codepipeline-actions'); +import codebuild = require('@aws-cdk/aws-codebuild'); +import codecommit = require('@aws-cdk/aws-codecommit'); +import dynamodb = require('@aws-cdk/aws-dynamodb'); +import ecr = require('@aws-cdk/aws-ecr'); +import ec2 = require('@aws-cdk/aws-ec2'); +import iam = require('@aws-cdk/aws-iam'); +import pipelines = require('@aws-cdk/pipelines'); +import secretsmanager = require('@aws-cdk/aws-secretsmanager'); +import sns = require('@aws-cdk/aws-sns'); +import subscriptions = require('@aws-cdk/aws-sns-subscriptions'); +import s3 = require('@aws-cdk/aws-s3'); + +class MyApplicationStage extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + } +} + +class Fixture extends Stack { + constructor(scope: Construct, id: string) { + super(scope, id); + /// here + } +} From 7db5c8cdafe7b9b22b6b40cb25ed8bd1946301f4 Mon Sep 17 00:00:00 2001 From: Julian Michel Date: Thu, 14 Oct 2021 20:11:44 +0200 Subject: [PATCH 05/37] feat(msk): add Kafka version 2.8.1 (#16881) Add support for Apache Kafka version 2.8.1 in Amazon MSK. Announcement: https://aws.amazon.com/about-aws/whats-new/2021/10/amazon-msk-support-apache-kafka-version-2-8-1/ ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/aws-msk/README.md | 2 +- .../@aws-cdk/aws-msk/lib/cluster-version.ts | 5 +++ .../aws-msk/test/integ.cluster.expected.json | 32 +++++++++---------- .../@aws-cdk/aws-msk/test/integ.cluster.ts | 2 +- 4 files changed, 23 insertions(+), 18 deletions(-) diff --git a/packages/@aws-cdk/aws-msk/README.md b/packages/@aws-cdk/aws-msk/README.md index ce86c6a477c45..664ec4f66c973 100644 --- a/packages/@aws-cdk/aws-msk/README.md +++ b/packages/@aws-cdk/aws-msk/README.md @@ -29,7 +29,7 @@ The following example creates an MSK Cluster. import * as msk from '@aws-cdk/aws-msk'; const cluster = new Cluster(this, 'Cluster', { - kafkaVersion: msk.KafkaVersion.V2_6_1, + kafkaVersion: msk.KafkaVersion.V2_8_1, vpc, }); ``` diff --git a/packages/@aws-cdk/aws-msk/lib/cluster-version.ts b/packages/@aws-cdk/aws-msk/lib/cluster-version.ts index 0d3b511aae59e..b04c154b76054 100644 --- a/packages/@aws-cdk/aws-msk/lib/cluster-version.ts +++ b/packages/@aws-cdk/aws-msk/lib/cluster-version.ts @@ -47,6 +47,11 @@ export class KafkaVersion { */ public static readonly V2_8_0 = KafkaVersion.of('2.8.0'); + /** + * Kafka version 2.8.1 + */ + public static readonly V2_8_1 = KafkaVersion.of('2.8.1'); + /** * Custom cluster version * @param version custom version number diff --git a/packages/@aws-cdk/aws-msk/test/integ.cluster.expected.json b/packages/@aws-cdk/aws-msk/test/integ.cluster.expected.json index 9a0d0db6e2325..2b523706bd3e2 100644 --- a/packages/@aws-cdk/aws-msk/test/integ.cluster.expected.json +++ b/packages/@aws-cdk/aws-msk/test/integ.cluster.expected.json @@ -95,15 +95,15 @@ "VPCPublicSubnet1NATGatewayE0556630": { "Type": "AWS::EC2::NatGateway", "Properties": { + "SubnetId": { + "Ref": "VPCPublicSubnet1SubnetB4246D30" + }, "AllocationId": { "Fn::GetAtt": [ "VPCPublicSubnet1EIP6AD938E8", "AllocationId" ] }, - "SubnetId": { - "Ref": "VPCPublicSubnet1SubnetB4246D30" - }, "Tags": [ { "Key": "Name", @@ -192,15 +192,15 @@ "VPCPublicSubnet2NATGateway3C070193": { "Type": "AWS::EC2::NatGateway", "Properties": { + "SubnetId": { + "Ref": "VPCPublicSubnet2Subnet74179F39" + }, "AllocationId": { "Fn::GetAtt": [ "VPCPublicSubnet2EIP4947BC00", "AllocationId" ] }, - "SubnetId": { - "Ref": "VPCPublicSubnet2Subnet74179F39" - }, "Tags": [ { "Key": "Name", @@ -399,7 +399,7 @@ } }, "ClusterName": "integ-test", - "KafkaVersion": "2.6.1", + "KafkaVersion": "2.8.1", "NumberOfBrokerNodes": 2, "EncryptionInfo": { "EncryptionInTransit": { @@ -524,7 +524,7 @@ "Properties": { "Code": { "S3Bucket": { - "Ref": "AssetParameters5c61041c12314e1ad8e67a0107fa3733382a206a78cdc1576fffa7e93caca5b4S3BucketB17E5ABD" + "Ref": "AssetParameters1c4eb88f5a8270f387281dcff6e3493840634113c4d57044f4aff74e3ef94c2dS3Bucket4C71F166" }, "S3Key": { "Fn::Join": [ @@ -537,7 +537,7 @@ "Fn::Split": [ "||", { - "Ref": "AssetParameters5c61041c12314e1ad8e67a0107fa3733382a206a78cdc1576fffa7e93caca5b4S3VersionKey77778F6A" + "Ref": "AssetParameters1c4eb88f5a8270f387281dcff6e3493840634113c4d57044f4aff74e3ef94c2dS3VersionKey0124EFC4" } ] } @@ -550,7 +550,7 @@ "Fn::Split": [ "||", { - "Ref": "AssetParameters5c61041c12314e1ad8e67a0107fa3733382a206a78cdc1576fffa7e93caca5b4S3VersionKey77778F6A" + "Ref": "AssetParameters1c4eb88f5a8270f387281dcff6e3493840634113c4d57044f4aff74e3ef94c2dS3VersionKey0124EFC4" } ] } @@ -576,17 +576,17 @@ } }, "Parameters": { - "AssetParameters5c61041c12314e1ad8e67a0107fa3733382a206a78cdc1576fffa7e93caca5b4S3BucketB17E5ABD": { + "AssetParameters1c4eb88f5a8270f387281dcff6e3493840634113c4d57044f4aff74e3ef94c2dS3Bucket4C71F166": { "Type": "String", - "Description": "S3 bucket for asset \"5c61041c12314e1ad8e67a0107fa3733382a206a78cdc1576fffa7e93caca5b4\"" + "Description": "S3 bucket for asset \"1c4eb88f5a8270f387281dcff6e3493840634113c4d57044f4aff74e3ef94c2d\"" }, - "AssetParameters5c61041c12314e1ad8e67a0107fa3733382a206a78cdc1576fffa7e93caca5b4S3VersionKey77778F6A": { + "AssetParameters1c4eb88f5a8270f387281dcff6e3493840634113c4d57044f4aff74e3ef94c2dS3VersionKey0124EFC4": { "Type": "String", - "Description": "S3 key for asset version \"5c61041c12314e1ad8e67a0107fa3733382a206a78cdc1576fffa7e93caca5b4\"" + "Description": "S3 key for asset version \"1c4eb88f5a8270f387281dcff6e3493840634113c4d57044f4aff74e3ef94c2d\"" }, - "AssetParameters5c61041c12314e1ad8e67a0107fa3733382a206a78cdc1576fffa7e93caca5b4ArtifactHash580E429C": { + "AssetParameters1c4eb88f5a8270f387281dcff6e3493840634113c4d57044f4aff74e3ef94c2dArtifactHash6350D824": { "Type": "String", - "Description": "Artifact hash for asset \"5c61041c12314e1ad8e67a0107fa3733382a206a78cdc1576fffa7e93caca5b4\"" + "Description": "Artifact hash for asset \"1c4eb88f5a8270f387281dcff6e3493840634113c4d57044f4aff74e3ef94c2d\"" } }, "Outputs": { diff --git a/packages/@aws-cdk/aws-msk/test/integ.cluster.ts b/packages/@aws-cdk/aws-msk/test/integ.cluster.ts index c05fa496d7210..06ad0893d20d7 100644 --- a/packages/@aws-cdk/aws-msk/test/integ.cluster.ts +++ b/packages/@aws-cdk/aws-msk/test/integ.cluster.ts @@ -10,7 +10,7 @@ const vpc = new ec2.Vpc(stack, 'VPC', { maxAzs: 2 }); const cluster = new msk.Cluster(stack, 'Cluster', { clusterName: 'integ-test', - kafkaVersion: msk.KafkaVersion.V2_6_1, + kafkaVersion: msk.KafkaVersion.V2_8_1, vpc, removalPolicy: cdk.RemovalPolicy.DESTROY, }); From 55fbc866ef0195fdfc722206e4d69a1f4469cd40 Mon Sep 17 00:00:00 2001 From: Martin Date: Fri, 15 Oct 2021 06:24:56 +1100 Subject: [PATCH 06/37] feat(codebuild): add support for small ARM machine type (#16635) closes #16633 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../@aws-cdk/aws-codebuild/lib/project.ts | 3 +- .../aws-codebuild/test/codebuild.test.ts | 43 ++++++++++++++----- 2 files changed, 34 insertions(+), 12 deletions(-) diff --git a/packages/@aws-cdk/aws-codebuild/lib/project.ts b/packages/@aws-cdk/aws-codebuild/lib/project.ts index 18dbda2cbcd3a..186c58f84138f 100644 --- a/packages/@aws-cdk/aws-codebuild/lib/project.ts +++ b/packages/@aws-cdk/aws-codebuild/lib/project.ts @@ -1656,8 +1656,9 @@ class ArmBuildImage implements IBuildImage { public validate(buildEnvironment: BuildEnvironment): string[] { const ret = []; if (buildEnvironment.computeType && + buildEnvironment.computeType !== ComputeType.SMALL && buildEnvironment.computeType !== ComputeType.LARGE) { - ret.push(`ARM images only support ComputeType '${ComputeType.LARGE}' - ` + + ret.push(`ARM images only support ComputeTypes '${ComputeType.SMALL}' and '${ComputeType.LARGE}' - ` + `'${buildEnvironment.computeType}' was given`); } return ret; diff --git a/packages/@aws-cdk/aws-codebuild/test/codebuild.test.ts b/packages/@aws-cdk/aws-codebuild/test/codebuild.test.ts index cc8f9309bcb44..f5bb3eacda8d1 100644 --- a/packages/@aws-cdk/aws-codebuild/test/codebuild.test.ts +++ b/packages/@aws-cdk/aws-codebuild/test/codebuild.test.ts @@ -1590,17 +1590,21 @@ describe('ARM image', () => { }); }); - test('cannot be used in conjunction with ComputeType SMALL', () => { + test('can be used with ComputeType SMALL', () => { const stack = new cdk.Stack(); + new codebuild.PipelineProject(stack, 'Project', { + environment: { + computeType: codebuild.ComputeType.SMALL, + buildImage: codebuild.LinuxBuildImage.AMAZON_LINUX_2_ARM, + }, + }); - expect(() => { - new codebuild.PipelineProject(stack, 'Project', { - environment: { - buildImage: codebuild.LinuxBuildImage.AMAZON_LINUX_2_ARM, - computeType: codebuild.ComputeType.SMALL, - }, - }); - }).toThrow(/ARM images only support ComputeType 'BUILD_GENERAL1_LARGE' - 'BUILD_GENERAL1_SMALL' was given/); + expect(stack).toHaveResourceLike('AWS::CodeBuild::Project', { + 'Environment': { + 'Type': 'ARM_CONTAINER', + 'ComputeType': 'BUILD_GENERAL1_SMALL', + }, + }); }); test('cannot be used in conjunction with ComputeType MEDIUM', () => { @@ -1613,7 +1617,24 @@ describe('ARM image', () => { computeType: codebuild.ComputeType.MEDIUM, }, }); - }).toThrow(/ARM images only support ComputeType 'BUILD_GENERAL1_LARGE' - 'BUILD_GENERAL1_MEDIUM' was given/); + }).toThrow(/ARM images only support ComputeTypes 'BUILD_GENERAL1_SMALL' and 'BUILD_GENERAL1_LARGE' - 'BUILD_GENERAL1_MEDIUM' was given/); + }); + + test('can be used with ComputeType LARGE', () => { + const stack = new cdk.Stack(); + new codebuild.PipelineProject(stack, 'Project', { + environment: { + computeType: codebuild.ComputeType.LARGE, + buildImage: codebuild.LinuxBuildImage.AMAZON_LINUX_2_ARM, + }, + }); + + expect(stack).toHaveResourceLike('AWS::CodeBuild::Project', { + 'Environment': { + 'Type': 'ARM_CONTAINER', + 'ComputeType': 'BUILD_GENERAL1_LARGE', + }, + }); }); test('cannot be used in conjunction with ComputeType X2_LARGE', () => { @@ -1626,7 +1647,7 @@ describe('ARM image', () => { computeType: codebuild.ComputeType.X2_LARGE, }, }); - }).toThrow(/ARM images only support ComputeType 'BUILD_GENERAL1_LARGE' - 'BUILD_GENERAL1_2XLARGE' was given/); + }).toThrow(/ARM images only support ComputeTypes 'BUILD_GENERAL1_SMALL' and 'BUILD_GENERAL1_LARGE' - 'BUILD_GENERAL1_2XLARGE' was given/); }); }); }); From fbb49fec0e70dbec119bdb98562d68ccc2f0385d Mon Sep 17 00:00:00 2001 From: Niranjan Jayakar Date: Fri, 15 Oct 2021 13:56:54 +0530 Subject: [PATCH 07/37] chore(cfnspec): improve error meesaging when spec validation fails (#16991) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Print the node of the CFN spec where validation fails, so the erroneous part of the spec can be located. Currently, when a Property in a PropertyType has two "Type" fields specified, the message looks like below. Only the resource type being validated is printed. ``` FAIL test/filtered-specification.test.js (8.522 s) filteredSpecification("AWS::Synthetics::Canary") expect(received).toEqual(expected) - Expected - 1 + Received + 3 - Array [] + Array [ + "Type", + ] ``` With this change, the message will look like - ``` FAIL test/filtered-specification.test.js (8.522 s) filteredSpecification("AWS::Synthetics::Canary") › PropertyType AWS::Synthetics::Canary.ArtifactConfig › Property S3Encryption expect(received).toEqual(expected) - Expected - 1 + Received + 3 - Array [] + Array [ + "Type", + ] ``` ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../test/filtered-specification.test.ts | 2 +- .../@aws-cdk/cfnspec/test/spec-validators.ts | 217 +++++++++--------- 2 files changed, 114 insertions(+), 105 deletions(-) diff --git a/packages/@aws-cdk/cfnspec/test/filtered-specification.test.ts b/packages/@aws-cdk/cfnspec/test/filtered-specification.test.ts index a4bd359517da1..e9f18346699ea 100644 --- a/packages/@aws-cdk/cfnspec/test/filtered-specification.test.ts +++ b/packages/@aws-cdk/cfnspec/test/filtered-specification.test.ts @@ -14,7 +14,7 @@ test('filteredSpecification(s => s.startsWith("AWS::S3::")', () => { }); for (const name of resourceTypes().sort()) { - test(`filteredSpecification(${JSON.stringify(name)})`, () => { + describe(`filteredSpecification(${JSON.stringify(name)})`, () => { const filteredSpec = filteredSpecification(name); expect(filteredSpec).not.toEqual(specification); expect(filteredSpec.ResourceTypes).not.toEqual({}); diff --git a/packages/@aws-cdk/cfnspec/test/spec-validators.ts b/packages/@aws-cdk/cfnspec/test/spec-validators.ts index 0c49c6bfa1a03..c8993885332fb 100644 --- a/packages/@aws-cdk/cfnspec/test/spec-validators.ts +++ b/packages/@aws-cdk/cfnspec/test/spec-validators.ts @@ -1,3 +1,4 @@ +/* eslint-disable jest/no-export */ import * as schema from '../lib/schema'; export function validateSpecification(specification: schema.Specification) { @@ -7,26 +8,30 @@ export function validateSpecification(specification: schema.Specification) { function validateResourceTypes(specification: schema.Specification) { for (const typeName of Object.keys(specification.ResourceTypes)) { - expect(typeName).toBeTruthy(); - const type = specification.ResourceTypes[typeName]; - expect(type.Documentation).not.toBeNull(); - if (type.ScrutinyType) { - expect(schema.isResourceScrutinyType(type.ScrutinyType)).toBeTruthy(); - } - if (type.Properties) { validateProperties(typeName, type.Properties, specification); } - if (type.Attributes) { validateAttributes(typeName, type.Attributes, specification); } + describe(typeName, () => { + expect(typeName).toBeTruthy(); + const type = specification.ResourceTypes[typeName]; + expect(type.Documentation).not.toBeNull(); + if (type.ScrutinyType) { + expect(schema.isResourceScrutinyType(type.ScrutinyType)).toBeTruthy(); + } + if (type.Properties) { validateProperties(typeName, type.Properties, specification); } + if (type.Attributes) { validateAttributes(typeName, type.Attributes, specification); } + }); } } function validatePropertyTypes(specification: schema.Specification) { for (const typeName of Object.keys(specification.PropertyTypes)) { - expect(typeName).toBeTruthy(); - const type = specification.PropertyTypes[typeName]; - if (schema.isRecordType(type)) { - validateProperties(typeName, type.Properties, specification); - } else { - validateProperties(typeName, { '': type }, specification); - } + describe(`PropertyType ${typeName}`, () => { + expect(typeName).toBeTruthy(); + const type = specification.PropertyTypes[typeName]; + if (schema.isRecordType(type)) { + validateProperties(typeName, type.Properties, specification); + } else { + validateProperties(typeName, { '': type }, specification); + } + }); } } @@ -37,85 +42,87 @@ function validateProperties( const expectedKeys = ['Documentation', 'Required', 'UpdateType', 'ScrutinyType']; for (const name of Object.keys(properties)) { - const property = properties[name]; - expect(property.Documentation).not.toEqual(''); - expect(!property.UpdateType || schema.isUpdateType(property.UpdateType)).toBeTruthy(); - if (property.ScrutinyType !== undefined) { - expect(schema.isPropertyScrutinyType(property.ScrutinyType)).toBeTruthy(); - } + test(`Property ${name}`, () => { + const property = properties[name]; + expect(property.Documentation).not.toEqual(''); + expect(!property.UpdateType || schema.isUpdateType(property.UpdateType)).toBeTruthy(); + if (property.ScrutinyType !== undefined) { + expect(schema.isPropertyScrutinyType(property.ScrutinyType)).toBeTruthy(); + } + + if (schema.isPrimitiveProperty(property)) { + expect(schema.isPrimitiveType(property.PrimitiveType)).toBeTruthy(); + expectedKeys.push('PrimitiveType'); - if (schema.isPrimitiveProperty(property)) { - expect(schema.isPrimitiveType(property.PrimitiveType)).toBeTruthy(); - expectedKeys.push('PrimitiveType'); + } else if (schema.isPrimitiveListProperty(property)) { + expectedKeys.push('Type', 'DuplicatesAllowed', 'PrimitiveItemType'); + expect(schema.isPrimitiveType(property.PrimitiveItemType)).toBeTruthy(); - } else if (schema.isPrimitiveListProperty(property)) { - expectedKeys.push('Type', 'DuplicatesAllowed', 'PrimitiveItemType'); - expect(schema.isPrimitiveType(property.PrimitiveItemType)).toBeTruthy(); + } else if (schema.isPrimitiveMapProperty(property)) { + expectedKeys.push('Type', 'DuplicatesAllowed', 'PrimitiveItemType', 'Type'); + expect(schema.isPrimitiveType(property.PrimitiveItemType)).toBeTruthy(); + expect(property.DuplicatesAllowed).toBeFalsy(); - } else if (schema.isPrimitiveMapProperty(property)) { - expectedKeys.push('Type', 'DuplicatesAllowed', 'PrimitiveItemType', 'Type'); - expect(schema.isPrimitiveType(property.PrimitiveItemType)).toBeTruthy(); - expect(property.DuplicatesAllowed).toBeFalsy(); + } else if (schema.isComplexListProperty(property)) { + expectedKeys.push('Type', 'DuplicatesAllowed', 'ItemType', 'Type'); + expect(property.ItemType).toBeTruthy(); + if (property.ItemType !== 'Tag') { + const fqn = `${typeName.split('.')[0]}.${property.ItemType}`; + const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; + expect(resolvedType).toBeTruthy(); + } - } else if (schema.isComplexListProperty(property)) { - expectedKeys.push('Type', 'DuplicatesAllowed', 'ItemType', 'Type'); - expect(property.ItemType).toBeTruthy(); - if (property.ItemType !== 'Tag') { + } else if (schema.isMapOfStructsProperty(property)) { + expectedKeys.push('Type', 'DuplicatesAllowed', 'ItemType', 'Type'); + expect(property.ItemType).toBeTruthy(); const fqn = `${typeName.split('.')[0]}.${property.ItemType}`; const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; expect(resolvedType).toBeTruthy(); - } + expect(property.DuplicatesAllowed).toBeFalsy(); + + } else if (schema.isMapOfListsOfPrimitivesProperty(property)) { + expectedKeys.push('Type', 'DuplicatesAllowed', 'ItemType', 'PrimitiveItemItemType', 'Type'); + expect(schema.isPrimitiveType(property.PrimitiveItemItemType)).toBeTruthy(); + expect(property.DuplicatesAllowed).toBeFalsy(); + + } else if (schema.isComplexProperty(property)) { + expectedKeys.push('Type'); + expect(property.Type).toBeTruthy(); + const fqn = `${typeName.split('.')[0]}.${property.Type}`; + const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; + expect(resolvedType).toBeTruthy(); - } else if (schema.isMapOfStructsProperty(property)) { - expectedKeys.push('Type', 'DuplicatesAllowed', 'ItemType', 'Type'); - expect(property.ItemType).toBeTruthy(); - const fqn = `${typeName.split('.')[0]}.${property.ItemType}`; - const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; - expect(resolvedType).toBeTruthy(); - expect(property.DuplicatesAllowed).toBeFalsy(); - - } else if (schema.isMapOfListsOfPrimitivesProperty(property)) { - expectedKeys.push('Type', 'DuplicatesAllowed', 'ItemType', 'PrimitiveItemItemType', 'Type'); - expect(schema.isPrimitiveType(property.PrimitiveItemItemType)).toBeTruthy(); - expect(property.DuplicatesAllowed).toBeFalsy(); - - } else if (schema.isComplexProperty(property)) { - expectedKeys.push('Type'); - expect(property.Type).toBeTruthy(); - const fqn = `${typeName.split('.')[0]}.${property.Type}`; - const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; - expect(resolvedType).toBeTruthy(); - - } else if (schema.isUnionProperty(property)) { - expectedKeys.push('PrimitiveTypes', 'PrimitiveItemTypes', 'ItemTypes', 'Types'); - if (property.PrimitiveTypes) { - for (const type of property.PrimitiveTypes) { - expect(schema.isPrimitiveType(type)).toBeTruthy(); + } else if (schema.isUnionProperty(property)) { + expectedKeys.push('PrimitiveTypes', 'PrimitiveItemTypes', 'ItemTypes', 'Types'); + if (property.PrimitiveTypes) { + for (const type of property.PrimitiveTypes) { + expect(schema.isPrimitiveType(type)).toBeTruthy(); + } } - } - if (property.ItemTypes) { - for (const type of property.ItemTypes) { - const fqn = `${typeName.split('.')[0]}.${type}`; - const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; - expect(resolvedType).toBeTruthy(); + if (property.ItemTypes) { + for (const type of property.ItemTypes) { + const fqn = `${typeName.split('.')[0]}.${type}`; + const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; + expect(resolvedType).toBeTruthy(); + } } - } - if (property.Types) { - for (const type of property.Types) { - const fqn = `${typeName.split('.')[0]}.${type}`; - const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; - expect(resolvedType).toBeTruthy(); + if (property.Types) { + for (const type of property.Types) { + const fqn = `${typeName.split('.')[0]}.${type}`; + const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; + expect(resolvedType).toBeTruthy(); + } } - } - } else { - // eslint-disable-next-line no-console - console.error(`${typeName}.Properties.${name} does not declare a type.` + - `Property definition is: ${JSON.stringify(property, undefined, 2)}`); - expect(false).toBeTruthy(); - } + } else { + // eslint-disable-next-line no-console + console.error(`${typeName}.Properties.${name} does not declare a type.` + + `Property definition is: ${JSON.stringify(property, undefined, 2)}`); + expect(false).toBeTruthy(); + } - expect(without(Object.keys(property), expectedKeys)).toEqual([]); + expect(without(Object.keys(property), expectedKeys)).toEqual([]); + }); } } @@ -124,29 +131,31 @@ function validateAttributes( attributes: { [name: string]: schema.Attribute }, specification: schema.Specification) { for (const name of Object.keys(attributes)) { - const attribute = attributes[name]; - expect(('Type' in attribute)).not.toEqual(('PrimitiveType' in attribute)); - if (schema.isPrimitiveAttribute(attribute)) { - expect(schema.isListAttribute(attribute)).toBeFalsy(); - expect(schema.isPrimitiveType(attribute.PrimitiveType)).toBeTruthy(); - expect(('PrimitiveItemType' in attribute)).toBeFalsy(); - expect(('ItemType' in attribute)).toBeFalsy(); - } else if (schema.isPrimitiveListAttribute(attribute)) { - expect(schema.isComplexListAttribute(attribute)).toBeFalsy(); - expect(schema.isPrimitiveType(attribute.PrimitiveItemType)).toBeTruthy(); - expect(('ItemType' in attribute)).toBeFalsy(); - } else if (schema.isComplexListAttribute(attribute)) { - expect(attribute.ItemType).toBeTruthy(); - const fqn = `${typeName.split('.')[0]}.${attribute.ItemType}`; - const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; - expect(resolvedType).toBeTruthy(); - expect(('PrimitiveItemType' in attribute)).toBeFalsy(); - } else if (schema.isPrimitiveMapAttribute(attribute)) { - expect(schema.isPrimitiveType(attribute.PrimitiveItemType)).toBeTruthy(); - expect(('ItemType' in attribute)).toBeFalsy(); - } else { - expect(false).toBeTruthy(); // `${typeName}.Attributes.${name} has a valid type`); - } + test(`Attribute ${name}`, () => { + const attribute = attributes[name]; + expect(('Type' in attribute)).not.toEqual(('PrimitiveType' in attribute)); + if (schema.isPrimitiveAttribute(attribute)) { + expect(schema.isListAttribute(attribute)).toBeFalsy(); + expect(schema.isPrimitiveType(attribute.PrimitiveType)).toBeTruthy(); + expect(('PrimitiveItemType' in attribute)).toBeFalsy(); + expect(('ItemType' in attribute)).toBeFalsy(); + } else if (schema.isPrimitiveListAttribute(attribute)) { + expect(schema.isComplexListAttribute(attribute)).toBeFalsy(); + expect(schema.isPrimitiveType(attribute.PrimitiveItemType)).toBeTruthy(); + expect(('ItemType' in attribute)).toBeFalsy(); + } else if (schema.isComplexListAttribute(attribute)) { + expect(attribute.ItemType).toBeTruthy(); + const fqn = `${typeName.split('.')[0]}.${attribute.ItemType}`; + const resolvedType = specification.PropertyTypes && specification.PropertyTypes[fqn]; + expect(resolvedType).toBeTruthy(); + expect(('PrimitiveItemType' in attribute)).toBeFalsy(); + } else if (schema.isPrimitiveMapAttribute(attribute)) { + expect(schema.isPrimitiveType(attribute.PrimitiveItemType)).toBeTruthy(); + expect(('ItemType' in attribute)).toBeFalsy(); + } else { + expect(false).toBeTruthy(); // `${typeName}.Attributes.${name} has a valid type`); + } + }); } } From dc6f7433f01b9bc2c8206fb03d72ab8404fe4f6a Mon Sep 17 00:00:00 2001 From: Mark Robinson Date: Fri, 15 Oct 2021 19:59:43 +0100 Subject: [PATCH 08/37] fix(ecs): imported services don't have account & region set correctly (#16997) This is a fix for the region issue raised by #11199 allowing multi regional ecs deployments fixes #11199 supersedes #15944, merged master and added tests ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/aws-ecs/lib/base/from-service-attributes.ts | 4 +++- packages/@aws-cdk/aws-ecs/test/ec2/ec2-service.test.ts | 2 ++ .../@aws-cdk/aws-ecs/test/fargate/fargate-service.test.ts | 2 ++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/@aws-cdk/aws-ecs/lib/base/from-service-attributes.ts b/packages/@aws-cdk/aws-ecs/lib/base/from-service-attributes.ts index 80ec042626153..cfaff590b3eb1 100644 --- a/packages/@aws-cdk/aws-ecs/lib/base/from-service-attributes.ts +++ b/packages/@aws-cdk/aws-ecs/lib/base/from-service-attributes.ts @@ -54,5 +54,7 @@ export function fromServiceAtrributes(scope: Construct, id: string, attrs: Servi public readonly serviceName = name; public readonly cluster = attrs.cluster; } - return new Import(scope, id); + return new Import(scope, id, { + environmentFromArn: arn, + }); } \ No newline at end of file diff --git a/packages/@aws-cdk/aws-ecs/test/ec2/ec2-service.test.ts b/packages/@aws-cdk/aws-ecs/test/ec2/ec2-service.test.ts index 48da04142ff3c..21d4292346974 100644 --- a/packages/@aws-cdk/aws-ecs/test/ec2/ec2-service.test.ts +++ b/packages/@aws-cdk/aws-ecs/test/ec2/ec2-service.test.ts @@ -3292,6 +3292,8 @@ describe('ec2 service', () => { expect(service.serviceArn).toEqual('arn:aws:ecs:us-west-2:123456789012:service/my-http-service'); expect(service.serviceName).toEqual('my-http-service'); + expect(service.env.account).toEqual('123456789012'); + expect(service.env.region).toEqual('us-west-2'); }); diff --git a/packages/@aws-cdk/aws-ecs/test/fargate/fargate-service.test.ts b/packages/@aws-cdk/aws-ecs/test/fargate/fargate-service.test.ts index ef4ceae4ccedc..71866a116443f 100644 --- a/packages/@aws-cdk/aws-ecs/test/fargate/fargate-service.test.ts +++ b/packages/@aws-cdk/aws-ecs/test/fargate/fargate-service.test.ts @@ -2124,6 +2124,8 @@ describe('fargate service', () => { expect(service.serviceArn).toEqual('arn:aws:ecs:us-west-2:123456789012:service/my-http-service'); expect(service.serviceName).toEqual('my-http-service'); + expect(service.env.account).toEqual('123456789012'); + expect(service.env.region).toEqual('us-west-2'); }); From 254601f477a4da309e81f5384140427f1b958bfd Mon Sep 17 00:00:00 2001 From: Shreepad Patil Date: Fri, 15 Oct 2021 16:36:11 -0400 Subject: [PATCH 09/37] feat(dynamodb): add option to skip waiting for global replication to finish (#16983) Motivation - On large tables, replication takes long time to complete. CloudFormation has a hard timeout of 1 hour on the Custom Resources, to bypass this, we want to have the replication continue in background based on a property. Fixes #16611 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../aws-dynamodb/lib/replica-handler/index.ts | 3 +- packages/@aws-cdk/aws-dynamodb/lib/table.ts | 21 +++++++- .../aws-dynamodb/test/dynamodb.test.ts | 54 +++++++++++++++++++ 3 files changed, 75 insertions(+), 3 deletions(-) diff --git a/packages/@aws-cdk/aws-dynamodb/lib/replica-handler/index.ts b/packages/@aws-cdk/aws-dynamodb/lib/replica-handler/index.ts index 1554dcc84004d..38109b1a4a614 100644 --- a/packages/@aws-cdk/aws-dynamodb/lib/replica-handler/index.ts +++ b/packages/@aws-cdk/aws-dynamodb/lib/replica-handler/index.ts @@ -49,12 +49,13 @@ export async function isCompleteHandler(event: IsCompleteRequest): Promise r.RegionName === event.ResourceProperties.Region); const replicaActive = !!(regionReplica?.ReplicaStatus === 'ACTIVE'); + const skipReplicationCompletedWait = event.ResourceProperties.SkipReplicationCompletedWait ?? false; switch (event.RequestType) { case 'Create': case 'Update': // Complete when replica is reported as ACTIVE - return { IsComplete: tableActive && replicaActive }; + return { IsComplete: tableActive && (replicaActive || skipReplicationCompletedWait) }; case 'Delete': // Complete when replica is gone return { IsComplete: tableActive && regionReplica === undefined }; diff --git a/packages/@aws-cdk/aws-dynamodb/lib/table.ts b/packages/@aws-cdk/aws-dynamodb/lib/table.ts index a2b1aae5ee2eb..10a7472be3c0d 100644 --- a/packages/@aws-cdk/aws-dynamodb/lib/table.ts +++ b/packages/@aws-cdk/aws-dynamodb/lib/table.ts @@ -231,6 +231,22 @@ export interface TableOptions extends SchemaOptions { */ readonly replicationTimeout?: Duration; + /** + * Indicates whether CloudFormation stack waits for replication to finish. + * If set to false, the CloudFormation resource will mark the resource as + * created and replication will be completed asynchronously. This property is + * ignored if replicationRegions property is not set. + * + * DO NOT UNSET this property if adding/removing multiple replicationRegions + * in one deployment, as CloudFormation only supports one region replication + * at a time. CDK overcomes this limitation by waiting for replication to + * finish before starting new replicationRegion. + * + * @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dynamodb-globaltable.html#cfn-dynamodb-globaltable-replicas + * @default true + */ + readonly waitForReplicationToFinish?: boolean; + /** * Whether CloudWatch contributor insights is enabled. * @@ -1152,7 +1168,7 @@ export class Table extends TableBase { } if (props.replicationRegions && props.replicationRegions.length > 0) { - this.createReplicaTables(props.replicationRegions, props.replicationTimeout); + this.createReplicaTables(props.replicationRegions, props.replicationTimeout, props.waitForReplicationToFinish); } } @@ -1494,7 +1510,7 @@ export class Table extends TableBase { * * @param regions regions where to create tables */ - private createReplicaTables(regions: string[], timeout?: Duration) { + private createReplicaTables(regions: string[], timeout?: Duration, waitForReplicationToFinish?: boolean) { const stack = Stack.of(this); if (!Token.isUnresolved(stack.region) && regions.includes(stack.region)) { @@ -1524,6 +1540,7 @@ export class Table extends TableBase { properties: { TableName: this.tableName, Region: region, + SkipReplicationCompletedWait: waitForReplicationToFinish === undefined ? undefined : !waitForReplicationToFinish, }, }); currentRegion.node.addDependency( diff --git a/packages/@aws-cdk/aws-dynamodb/test/dynamodb.test.ts b/packages/@aws-cdk/aws-dynamodb/test/dynamodb.test.ts index 56ab48f19f501..a095bff84ca25 100644 --- a/packages/@aws-cdk/aws-dynamodb/test/dynamodb.test.ts +++ b/packages/@aws-cdk/aws-dynamodb/test/dynamodb.test.ts @@ -2438,6 +2438,60 @@ describe('global', () => { }); }); + test('create replicas without waiting to finish replication', () => { + // GIVEN + const stack = new Stack(); + + // WHEN + new Table(stack, 'Table', { + partitionKey: { + name: 'id', + type: AttributeType.STRING, + }, + replicationRegions: [ + 'eu-west-2', + 'eu-central-1', + ], + waitForReplicationToFinish: false, + }); + + // THEN + expect(stack).toHaveResource('Custom::DynamoDBReplica', { + Properties: { + TableName: { + Ref: 'TableCD117FA1', + }, + Region: 'eu-west-2', + SkipReplicationCompletedWait: true, + }, + Condition: 'TableStackRegionNotEqualseuwest2A03859E7', + }, ResourcePart.CompleteDefinition); + + expect(stack).toHaveResource('Custom::DynamoDBReplica', { + Properties: { + TableName: { + Ref: 'TableCD117FA1', + }, + Region: 'eu-central-1', + SkipReplicationCompletedWait: true, + }, + Condition: 'TableStackRegionNotEqualseucentral199D46FC0', + }, ResourcePart.CompleteDefinition); + + expect(SynthUtils.toCloudFormation(stack).Conditions).toEqual({ + TableStackRegionNotEqualseuwest2A03859E7: { + 'Fn::Not': [ + { 'Fn::Equals': ['eu-west-2', { Ref: 'AWS::Region' }] }, + ], + }, + TableStackRegionNotEqualseucentral199D46FC0: { + 'Fn::Not': [ + { 'Fn::Equals': ['eu-central-1', { Ref: 'AWS::Region' }] }, + ], + }, + }); + }); + test('grantReadData', () => { const stack = new Stack(); const table = new Table(stack, 'Table', { From b6cb3821df5280e9d8222551c50ac196d00ee0fc Mon Sep 17 00:00:00 2001 From: kaizen3031593 <36202692+kaizen3031593@users.noreply.github.com> Date: Fri, 15 Oct 2021 17:28:15 -0400 Subject: [PATCH 10/37] docs(codepipeline-actions): make examples compile (#16984) ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../aws-codepipeline-actions/README.md | 202 ++++++++++++------ .../rosetta/default.ts-fixture | 17 ++ ...ambda-deployed-through-codepipeline.lit.ts | 4 +- 3 files changed, 152 insertions(+), 71 deletions(-) create mode 100644 packages/@aws-cdk/aws-codepipeline-actions/rosetta/default.ts-fixture diff --git a/packages/@aws-cdk/aws-codepipeline-actions/README.md b/packages/@aws-cdk/aws-codepipeline-actions/README.md index e228c0669c81a..05860cfe20233 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/README.md +++ b/packages/@aws-cdk/aws-codepipeline-actions/README.md @@ -11,7 +11,7 @@ This package contains Actions that can be used in a CodePipeline. -```ts +```ts nofixture import * as codepipeline from '@aws-cdk/aws-codepipeline'; import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; ``` @@ -23,10 +23,8 @@ import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; To use a CodeCommit Repository in a CodePipeline: ```ts -import * as codecommit from '@aws-cdk/aws-codecommit'; - const repo = new codecommit.Repository(this, 'Repo', { - // ... + repositoryName: 'MyRepo', }); const pipeline = new codepipeline.Pipeline(this, 'MyPipeline', { @@ -49,6 +47,7 @@ You can specify the role object in eventRole property. ```ts const eventRole = iam.Role.fromRoleArn(this, 'Event-role', 'roleArn'); +declare const repo: codecommit.Repository; const sourceAction = new codepipeline_actions.CodeCommitSourceAction({ actionName: 'CodeCommit', repository: repo, @@ -61,6 +60,8 @@ If you want to clone the entire CodeCommit repository (only available for CodeBu you can set the `codeBuildCloneOutput` property to `true`: ```ts +declare const project: codebuild.PipelineProject; +declare const repo: codecommit.Repository; const sourceOutput = new codepipeline.Artifact(); const sourceAction = new codepipeline_actions.CodeCommitSourceAction({ actionName: 'CodeCommit', @@ -80,15 +81,22 @@ const buildAction = new codepipeline_actions.CodeBuildAction({ The CodeCommit source action emits variables: ```ts +declare const project: codebuild.PipelineProject; +declare const repo: codecommit.Repository; +const sourceOutput = new codepipeline.Artifact(); const sourceAction = new codepipeline_actions.CodeCommitSourceAction({ - // ... + actionName: 'CodeCommit', + repository: repo, + output: sourceOutput, variablesNamespace: 'MyNamespace', // optional - by default, a name will be generated for you }); // later: new codepipeline_actions.CodeBuildAction({ - // ... + actionName: 'CodeBuild', + project, + input: sourceOutput, environmentVariables: { COMMIT_ID: { value: sourceAction.variables.commitId, @@ -107,20 +115,21 @@ If you want to use a GitHub repository as the source, you must create: with the value of the **GitHub Access Token**. Pick whatever name you want (for example `my-github-token`). This token can be stored either as Plaintext or as a Secret key/value. If you stored the token as Plaintext, - set `cdk.SecretValue.secretsManager('my-github-token')` as the value of `oauthToken`. + set `SecretValue.secretsManager('my-github-token')` as the value of `oauthToken`. If you stored it as a Secret key/value, - you must set `cdk.SecretValue.secretsManager('my-github-token', { jsonField : 'my-github-token' })` as the value of `oauthToken`. + you must set `SecretValue.secretsManager('my-github-token', { jsonField : 'my-github-token' })` as the value of `oauthToken`. To use GitHub as the source of a CodePipeline: ```ts // Read the secret from Secrets Manager +const pipeline = new codepipeline.Pipeline(this, 'MyPipeline'); const sourceOutput = new codepipeline.Artifact(); const sourceAction = new codepipeline_actions.GitHubSourceAction({ actionName: 'GitHub_Source', owner: 'awslabs', repo: 'aws-cdk', - oauthToken: cdk.SecretValue.secretsManager('my-github-token'), + oauthToken: SecretValue.secretsManager('my-github-token'), output: sourceOutput, branch: 'develop', // default: 'master' }); @@ -133,15 +142,24 @@ pipeline.addStage({ The GitHub source action emits variables: ```ts +declare const sourceOutput: codepipeline.Artifact; +declare const project: codebuild.PipelineProject; + const sourceAction = new codepipeline_actions.GitHubSourceAction({ - // ... + actionName: 'Github_Source', + output: sourceOutput, + owner: 'my-owner', + repo: 'my-repo', + oauthToken: SecretValue.secretsManager('my-github-token'), variablesNamespace: 'MyNamespace', // optional - by default, a name will be generated for you }); // later: new codepipeline_actions.CodeBuildAction({ - // ... + actionName: 'CodeBuild', + project, + input: sourceOutput, environmentVariables: { COMMIT_URL: { value: sourceAction.variables.commitUrl, @@ -185,8 +203,6 @@ You can also use the `CodeStarConnectionsSourceAction` to connect to GitHub, in To use an S3 Bucket as a source in CodePipeline: ```ts -import * as s3 from '@aws-cdk/aws-s3'; - const sourceBucket = new s3.Bucket(this, 'MyBucket', { versioned: true, // a Bucket used as a source in CodePipeline must be versioned }); @@ -229,6 +245,8 @@ You can do it through the CDK: ```ts import * as cloudtrail from '@aws-cdk/aws-cloudtrail'; +declare const sourceBucket: s3.Bucket; +const sourceOutput = new codepipeline.Artifact(); const key = 'some/key.zip'; const trail = new cloudtrail.Trail(this, 'CloudTrail'); trail.addS3EventSelector([{ @@ -249,15 +267,23 @@ const sourceAction = new codepipeline_actions.S3SourceAction({ The S3 source action emits variables: ```ts +const key = 'some/key.zip'; +declare const sourceBucket: s3.Bucket; +const sourceOutput = new codepipeline.Artifact(); const sourceAction = new codepipeline_actions.S3SourceAction({ - // ... + actionName: 'S3Source', + bucketKey: key, + bucket: sourceBucket, + output: sourceOutput, variablesNamespace: 'MyNamespace', // optional - by default, a name will be generated for you }); // later: - +declare const project: codebuild.PipelineProject; new codepipeline_actions.CodeBuildAction({ - // ... + actionName: 'CodeBuild', + project, + input: sourceOutput, environmentVariables: { VERSION_ID: { value: sourceAction.variables.versionId, @@ -273,6 +299,7 @@ To use an ECR Repository as a source in a Pipeline: ```ts import * as ecr from '@aws-cdk/aws-ecr'; +declare const ecrRepository: ecr.Repository; const pipeline = new codepipeline.Pipeline(this, 'MyPipeline'); const sourceOutput = new codepipeline.Artifact(); const sourceAction = new codepipeline_actions.EcrSourceAction({ @@ -290,15 +317,23 @@ pipeline.addStage({ The ECR source action emits variables: ```ts +import * as ecr from '@aws-cdk/aws-ecr'; + +const sourceOutput = new codepipeline.Artifact(); +declare const ecrRepository: ecr.Repository; const sourceAction = new codepipeline_actions.EcrSourceAction({ - // ... + actionName: 'Source', + output: sourceOutput, + repository: ecrRepository, variablesNamespace: 'MyNamespace', // optional - by default, a name will be generated for you }); // later: - +declare const project: codebuild.PipelineProject; new codepipeline_actions.CodeBuildAction({ - // ... + actionName: 'CodeBuild', + project, + input: sourceOutput, environmentVariables: { IMAGE_URI: { value: sourceAction.variables.imageUri, @@ -314,9 +349,7 @@ new codepipeline_actions.CodeBuildAction({ Example of a CodeBuild Project used in a Pipeline, alongside CodeCommit: ```ts -import * as codebuild from '@aws-cdk/aws-codebuild'; -import * as codecommit from '@aws-cdk/aws-codecommit'; - +declare const project: codebuild.PipelineProject; const repository = new codecommit.Repository(this, 'MyRepository', { repositoryName: 'MyRepository', }); @@ -356,6 +389,8 @@ if you want a `Test` Action instead, override the `type` property: ```ts +declare const project: codebuild.PipelineProject; +const sourceOutput = new codepipeline.Artifact(); const testAction = new codepipeline_actions.CodeBuildAction({ actionName: 'IntegrationTest', project, @@ -373,12 +408,14 @@ properties of the `Project` class, you need to use the `extraInputs` and Actions. Example: ```ts +declare const repository1: codecommit.Repository; const sourceOutput1 = new codepipeline.Artifact(); const sourceAction1 = new codepipeline_actions.CodeCommitSourceAction({ actionName: 'Source1', repository: repository1, output: sourceOutput1, }); +declare const repository2: codecommit.Repository; const sourceOutput2 = new codepipeline.Artifact('source2'); const sourceAction2 = new codepipeline_actions.CodeCommitSourceAction({ actionName: 'Source2', @@ -386,6 +423,7 @@ const sourceAction2 = new codepipeline_actions.CodeCommitSourceAction({ output: sourceOutput2, }); +declare const project: codebuild.PipelineProject; const buildAction = new codepipeline_actions.CodeBuildAction({ actionName: 'Build', project, @@ -447,6 +485,7 @@ in the 'exported-variables' subsection of the 'env' section. Example: ```ts +const sourceOutput = new codepipeline.Artifact(); const buildAction = new codepipeline_actions.CodeBuildAction({ actionName: 'Build1', input: sourceOutput, @@ -469,9 +508,11 @@ const buildAction = new codepipeline_actions.CodeBuildAction({ }); // later: - +declare const project: codebuild.PipelineProject; new codepipeline_actions.CodeBuildAction({ - // ... + actionName: 'CodeBuild', + project, + input: sourceOutput, environmentVariables: { MyVar: { value: buildAction.variable('MY_VAR'), @@ -498,7 +539,7 @@ or outside the CDK (in the CodePipeline AWS Console, for example), you can import it: ```ts -const jenkinsProvider = codepipeline_actions.JenkinsProvider.import(this, 'JenkinsProvider', { +const jenkinsProvider = codepipeline_actions.JenkinsProvider.fromJenkinsProviderAttributes(this, 'JenkinsProvider', { providerName: 'MyJenkinsProvider', serverUrl: 'http://my-jenkins.com:8080', version: '2', // optional, default: '1' @@ -514,6 +555,7 @@ With a `JenkinsProvider`, we can create a Jenkins Action: ```ts +declare const jenkinsProvider: codepipeline_actions.JenkinsProvider; const buildAction = new codepipeline_actions.JenkinsAction({ actionName: 'JenkinsBuild', jenkinsProvider: jenkinsProvider, @@ -566,8 +608,12 @@ If you want to update stacks in a different account, pass the `account` property when creating the action: ```ts +const sourceOutput = new codepipeline.Artifact(); new codepipeline_actions.CloudFormationCreateUpdateStackAction({ - // ... + actionName: 'CloudFormationCreateUpdate', + stackName: 'MyStackName', + adminPermissions: true, + templatePath: sourceOutput.atPath('template.yaml'), account: '123456789012', }); ``` @@ -584,15 +630,20 @@ and the action will operate in the same account the role belongs to: import { PhysicalName } from '@aws-cdk/core'; // in stack for account 123456789012... +declare const otherAccountStack: Stack; const actionRole = new iam.Role(otherAccountStack, 'ActionRole', { - assumedBy: new iam.AccountPrincipal(pipelineAccount), + assumedBy: new iam.AccountPrincipal('123456789012'), // the role has to have a physical name set roleName: PhysicalName.GENERATE_IF_NEEDED, }); // in the pipeline stack... +const sourceOutput = new codepipeline.Artifact(); new codepipeline_actions.CloudFormationCreateUpdateStackAction({ - // ... + actionName: 'CloudFormationCreateUpdate', + stackName: 'MyStackName', + adminPermissions: true, + templatePath: sourceOutput.atPath('template.yaml'), role: actionRole, // this action will be cross-account as well }); ``` @@ -604,14 +655,13 @@ new codepipeline_actions.CloudFormationCreateUpdateStackAction({ To use CodeDeploy for EC2/on-premise deployments in a Pipeline: ```ts -import * as codedeploy from '@aws-cdk/aws-codedeploy'; - const pipeline = new codepipeline.Pipeline(this, 'MyPipeline', { pipelineName: 'MyPipeline', }); // add the source and build Stages to the Pipeline... - +const buildOutput = new codepipeline.Artifact(); +declare const deploymentGroup: codedeploy.ServerDeploymentGroup; const deployAction = new codepipeline_actions.CodeDeployServerDeployAction({ actionName: 'CodeDeploy', input: buildOutput, @@ -629,19 +679,19 @@ To use CodeDeploy for blue-green Lambda deployments in a Pipeline: ```ts const lambdaCode = lambda.Code.fromCfnParameters(); -const func = new lambda.Function(lambdaStack, 'Lambda', { +const func = new lambda.Function(this, 'Lambda', { code: lambdaCode, handler: 'index.handler', runtime: lambda.Runtime.NODEJS_12_X, }); // used to make sure each CDK synthesis produces a different Version const version = func.addVersion('NewVersion'); -const alias = new lambda.Alias(lambdaStack, 'LambdaAlias', { +const alias = new lambda.Alias(this, 'LambdaAlias', { aliasName: 'Prod', version, }); -new codedeploy.LambdaDeploymentGroup(lambdaStack, 'DeploymentGroup', { +new codedeploy.LambdaDeploymentGroup(this, 'DeploymentGroup', { alias, deploymentConfig: codedeploy.LambdaDeploymentConfig.LINEAR_10PERCENT_EVERY_1MINUTE, }); @@ -658,6 +708,11 @@ CodePipeline can deploy an ECS service. The deploy Action receives one input Artifact which contains the [image definition file]: ```ts +import * as ecs from '@aws-cdk/aws-ecs'; + +declare const service: ecs.FargateService; +const pipeline = new codepipeline.Pipeline(this, 'MyPipeline'); +const buildOutput = new codepipeline.Artifact(); const deployStage = pipeline.addStage({ stageName: 'Deploy', actions: [ @@ -672,7 +727,7 @@ const deployStage = pipeline.addStage({ // use the `imageFile` property, // and leave out the `input` property imageFile: buildOutput.atPath('imageDef.json'), - deploymentTimeout: cdk.Duration.minutes(60), // optional, default is 60 minutes + deploymentTimeout: Duration.minutes(60), // optional, default is 60 minutes }), ], }); @@ -697,12 +752,12 @@ Here's an example: To use an S3 Bucket as a deployment target in CodePipeline: ```ts -const targetBucket = new s3.Bucket(this, 'MyBucket', {}); +const sourceOutput = new codepipeline.Artifact(); +const targetBucket = new s3.Bucket(this, 'MyBucket'); const pipeline = new codepipeline.Pipeline(this, 'MyPipeline'); const deployAction = new codepipeline_actions.S3DeployAction({ actionName: 'S3Deploy', - stage: deployStage, bucket: targetBucket, input: sourceOutput, }); @@ -720,9 +775,8 @@ and use the AWS CLI to invalidate the cache: ```ts // Create a Cloudfront Web Distribution -const distribution = new cloudfront.Distribution(this, `Distribution`, { - // ... -}); +import * as cloudfront from '@aws-cdk/aws-cloudfront'; +declare const distribution: cloudfront.Distribution; // Create the build project that will invalidate the cache const invalidateBuildProject = new codebuild.PipelineProject(this, `InvalidateProject`, { @@ -752,19 +806,21 @@ invalidateBuildProject.addToRolePolicy(new iam.PolicyStatement({ })); // Create the pipeline (here only the S3 deploy and Invalidate cache build) +const deployBucket = new s3.Bucket(this, 'DeployBucket'); +const deployInput = new codepipeline.Artifact(); new codepipeline.Pipeline(this, 'Pipeline', { stages: [ // ... { stageName: 'Deploy', actions: [ - new codepipelineActions.S3DeployAction({ + new codepipeline_actions.S3DeployAction({ actionName: 'S3Deploy', bucket: deployBucket, input: deployInput, runOrder: 1, }), - new codepipelineActions.CodeBuildAction({ + new codepipeline_actions.CodeBuildAction({ actionName: 'InvalidateCache', project: invalidateBuildProject, input: deployInput, @@ -782,11 +838,12 @@ You can deploy to Alexa using CodePipeline with the following Action: ```ts // Read the secrets from ParameterStore -const clientId = cdk.SecretValue.secretsManager('AlexaClientId'); -const clientSecret = cdk.SecretValue.secretsManager('AlexaClientSecret'); -const refreshToken = cdk.SecretValue.secretsManager('AlexaRefreshToken'); +const clientId = SecretValue.secretsManager('AlexaClientId'); +const clientSecret = SecretValue.secretsManager('AlexaClientSecret'); +const refreshToken = SecretValue.secretsManager('AlexaRefreshToken'); // Add deploy action +const sourceOutput = new codepipeline.Artifact(); new codepipeline_actions.AlexaSkillDeployAction({ actionName: 'DeploySkill', runOrder: 1, @@ -801,20 +858,22 @@ new codepipeline_actions.AlexaSkillDeployAction({ If you need manifest overrides you can specify them as `parameterOverridesArtifact` in the action: ```ts -import * as cloudformation from '@aws-cdk/aws-cloudformation'; - // Deploy some CFN change set and store output const executeOutput = new codepipeline.Artifact('CloudFormation'); const executeChangeSetAction = new codepipeline_actions.CloudFormationExecuteChangeSetAction({ actionName: 'ExecuteChangesTest', runOrder: 2, - stackName, - changeSetName, + stackName: 'MyStack', + changeSetName: 'MyChangeSet', outputFileName: 'overrides.json', output: executeOutput, }); // Provide CFN output as manifest overrides +const clientId = SecretValue.secretsManager('AlexaClientId'); +const clientSecret = SecretValue.secretsManager('AlexaClientSecret'); +const refreshToken = SecretValue.secretsManager('AlexaRefreshToken'); +const sourceOutput = new codepipeline.Artifact(); new codepipeline_actions.AlexaSkillDeployAction({ actionName: 'DeploySkill', runOrder: 1, @@ -832,11 +891,11 @@ new codepipeline_actions.AlexaSkillDeployAction({ You can deploy a CloudFormation template to an existing Service Catalog product with the following Action: ```ts +const cdkBuildOutput = new codepipeline.Artifact(); const serviceCatalogDeployAction = new codepipeline_actions.ServiceCatalogDeployActionBeta1({ actionName: 'ServiceCatalogDeploy', templatePath: cdkBuildOutput.atPath("Sample.template.json"), productVersionName: "Version - " + Date.now.toString, - productType: "CLOUD_FORMATION_TEMPLATE", productVersionDescription: "This is a version from the pipeline with a new description.", productId: "prod-XXXXXXXX", }); @@ -849,6 +908,10 @@ const serviceCatalogDeployAction = new codepipeline_actions.ServiceCatalogDeploy This package contains an Action that stops the Pipeline until someone manually clicks the approve button: ```ts +import * as sns from '@aws-cdk/aws-sns'; + +const pipeline = new codepipeline.Pipeline(this, 'MyPipeline'); +const approveStage = pipeline.addStage({ stageName: 'Approve' }); const manualApprovalAction = new codepipeline_actions.ManualApprovalAction({ actionName: 'Approve', notificationTopic: new sns.Topic(this, 'Topic'), // optional @@ -871,12 +934,14 @@ If you want to grant a principal permissions to approve the changes, you can invoke the method `grantManualApproval` passing it a `IGrantable`: ```ts +const pipeline = new codepipeline.Pipeline(this, 'MyPipeline'); +const approveStage = pipeline.addStage({ stageName: 'Approve' }); const manualApprovalAction = new codepipeline_actions.ManualApprovalAction({ actionName: 'Approve', }); approveStage.addAction(manualApprovalAction); -const role = iam.Role.fromRoleArn(this, 'Admin', Arn.format({ service: 'iam', resource: 'role', resourceName: 'Admin' }, stack)); +const role = iam.Role.fromRoleArn(this, 'Admin', Arn.format({ service: 'iam', resource: 'role', resourceName: 'Admin' }, this)); manualApprovalAction.grantManualApproval(role); ``` @@ -885,8 +950,7 @@ manualApprovalAction.grantManualApproval(role); This module contains an Action that allows you to invoke a Lambda function in a Pipeline: ```ts -import * as lambda from '@aws-cdk/aws-lambda'; - +declare const fn: lambda.Function; const pipeline = new codepipeline.Pipeline(this, 'MyPipeline'); const lambdaAction = new codepipeline_actions.LambdaInvokeAction({ actionName: 'Lambda', @@ -902,7 +966,9 @@ The Lambda Action can have up to 5 inputs, and up to 5 outputs: ```ts - +declare const fn: lambda.Function; +const sourceOutput = new codepipeline.Artifact(); +const buildOutput = new codepipeline.Artifact(); const lambdaAction = new codepipeline_actions.LambdaInvokeAction({ actionName: 'Lambda', inputs: [ @@ -913,7 +979,7 @@ const lambdaAction = new codepipeline_actions.LambdaInvokeAction({ new codepipeline.Artifact('Out1'), new codepipeline.Artifact('Out2'), ], - lambda: fn + lambda: fn, }); ``` @@ -943,8 +1009,6 @@ API with the `outputVariables` property filled with the map of variables Example: ```ts -import * as lambda from '@aws-cdk/aws-lambda'; - const lambdaInvokeAction = new codepipeline_actions.LambdaInvokeAction({ actionName: 'Lambda', lambda: new lambda.Function(this, 'Func', { @@ -968,9 +1032,12 @@ const lambdaInvokeAction = new codepipeline_actions.LambdaInvokeAction({ }); // later: - +declare const project: codebuild.PipelineProject; +const sourceOutput = new codepipeline.Artifact(); new codepipeline_actions.CodeBuildAction({ - // ... + actionName: 'CodeBuild', + project, + input: sourceOutput, environmentVariables: { MyVar: { value: lambdaInvokeAction.variable('MY_VAR'), @@ -987,14 +1054,13 @@ on how to write a Lambda function invoked from CodePipeline. This module contains an Action that allows you to invoke a Step Function in a Pipeline: ```ts -import * as stepfunction from '@aws-cdk/aws-stepfunctions'; - +import * as stepfunctions from '@aws-cdk/aws-stepfunctions'; const pipeline = new codepipeline.Pipeline(this, 'MyPipeline'); -const startState = new stepfunction.Pass(stack, 'StartState'); -const simpleStateMachine = new stepfunction.StateMachine(stack, 'SimpleStateMachine', { +const startState = new stepfunctions.Pass(this, 'StartState'); +const simpleStateMachine = new stepfunctions.StateMachine(this, 'SimpleStateMachine', { definition: startState, }); -const stepFunctionAction = new codepipeline_actions.StepFunctionsInvokeAction({ +const stepFunctionAction = new codepipeline_actions.StepFunctionInvokeAction({ actionName: 'Invoke', stateMachine: simpleStateMachine, stateMachineInput: codepipeline_actions.StateMachineInput.literal({ IsHelloWorldExample: true }), @@ -1009,15 +1075,15 @@ The `StateMachineInput` can be created with one of 2 static factory methods: `literal`, which takes an arbitrary map as its only argument, or `filePath`: ```ts -import * as stepfunction from '@aws-cdk/aws-stepfunctions'; +import * as stepfunctions from '@aws-cdk/aws-stepfunctions'; const pipeline = new codepipeline.Pipeline(this, 'MyPipeline'); const inputArtifact = new codepipeline.Artifact(); -const startState = new stepfunction.Pass(stack, 'StartState'); -const simpleStateMachine = new stepfunction.StateMachine(stack, 'SimpleStateMachine', { +const startState = new stepfunctions.Pass(this, 'StartState'); +const simpleStateMachine = new stepfunctions.StateMachine(this, 'SimpleStateMachine', { definition: startState, }); -const stepFunctionAction = new codepipeline_actions.StepFunctionsInvokeAction({ +const stepFunctionAction = new codepipeline_actions.StepFunctionInvokeAction({ actionName: 'Invoke', stateMachine: simpleStateMachine, stateMachineInput: codepipeline_actions.StateMachineInput.filePath(inputArtifact.atPath('assets/input.json')), diff --git a/packages/@aws-cdk/aws-codepipeline-actions/rosetta/default.ts-fixture b/packages/@aws-cdk/aws-codepipeline-actions/rosetta/default.ts-fixture new file mode 100644 index 0000000000000..aba6086a1683e --- /dev/null +++ b/packages/@aws-cdk/aws-codepipeline-actions/rosetta/default.ts-fixture @@ -0,0 +1,17 @@ +// Fixture with packages imported, but nothing else +import { Arn, Construct, Duration, SecretValue, Stack } from '@aws-cdk/core'; +import codebuild = require('@aws-cdk/aws-codebuild'); +import codedeploy = require('@aws-cdk/aws-codedeploy'); +import codepipeline = require('@aws-cdk/aws-codepipeline'); +import codepipeline_actions = require('@aws-cdk/aws-codepipeline-actions'); +import codecommit = require('@aws-cdk/aws-codecommit'); +import iam = require('@aws-cdk/aws-iam'); +import lambda = require('@aws-cdk/aws-lambda'); +import s3 = require('@aws-cdk/aws-s3'); + +class Fixture extends Stack { + constructor(scope: Construct, id: string) { + super(scope, id); + /// here + } +} diff --git a/packages/@aws-cdk/aws-codepipeline-actions/test/integ.lambda-deployed-through-codepipeline.lit.ts b/packages/@aws-cdk/aws-codepipeline-actions/test/integ.lambda-deployed-through-codepipeline.lit.ts index e631dd6b10c1b..3f9a337333915 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/test/integ.lambda-deployed-through-codepipeline.lit.ts +++ b/packages/@aws-cdk/aws-codepipeline-actions/test/integ.lambda-deployed-through-codepipeline.lit.ts @@ -124,9 +124,7 @@ pipeline.addStage({ templatePath: cdkBuildOutput.atPath('LambdaStack.template.yaml'), stackName: 'LambdaStackDeployedName', adminPermissions: true, - parameterOverrides: { - ...lambdaCode.assign(lambdaBuildOutput.s3Location), - }, + parameterOverrides: lambdaCode.assign(lambdaBuildOutput.s3Location), extraInputs: [ lambdaBuildOutput, ], From 72102c750bfd6564cd51c1a5d8abc79b1ba1d3ce Mon Sep 17 00:00:00 2001 From: Ayush Goyal Date: Sun, 17 Oct 2021 20:32:03 +0530 Subject: [PATCH 11/37] feat(eks): configure serviceIpv4Cidr on the cluster (#16957) Refs: 1. https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eks-cluster-kubernetesnetworkconfig.html 2. https://docs.aws.amazon.com/eks/latest/APIReference/API_KubernetesNetworkConfigRequest.html#AmazonEKS-Type-KubernetesNetworkConfigRequest-serviceIpv4Cidr Notes: 1. Currently I have not updated the integ tests since the deployed takes a lot of time and it requires inferentia service limit increase. Do you think this change needs an integ tests updating (tried it out locally and it succeeded till auto-scaling)? 2. Couldn't find a good place in the Readme to add this feature. Would really help if we could come up with a good explanation and place for the same. Closes #16541 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../@aws-cdk/aws-eks/lib/cluster-resource.ts | 2 ++ packages/@aws-cdk/aws-eks/lib/cluster.ts | 12 ++++++++++ .../@aws-cdk/aws-eks/test/cluster.test.ts | 22 +++++++++++++++++++ 3 files changed, 36 insertions(+) diff --git a/packages/@aws-cdk/aws-eks/lib/cluster-resource.ts b/packages/@aws-cdk/aws-eks/lib/cluster-resource.ts index 662f4e345a24c..88f3cd0138344 100644 --- a/packages/@aws-cdk/aws-eks/lib/cluster-resource.ts +++ b/packages/@aws-cdk/aws-eks/lib/cluster-resource.ts @@ -16,6 +16,7 @@ export interface ClusterResourceProps { readonly resourcesVpcConfig: CfnCluster.ResourcesVpcConfigProperty; readonly roleArn: string; readonly encryptionConfig?: Array; + readonly kubernetesNetworkConfig?: CfnCluster.KubernetesNetworkConfigProperty; readonly name: string; readonly version?: string; readonly endpointPrivateAccess: boolean; @@ -78,6 +79,7 @@ export class ClusterResource extends CoreConstruct { version: props.version, roleArn: props.roleArn, encryptionConfig: props.encryptionConfig, + kubernetesNetworkConfig: props.kubernetesNetworkConfig, resourcesVpcConfig: { subnetIds: (props.resourcesVpcConfig as CfnCluster.ResourcesVpcConfigProperty).subnetIds, securityGroupIds: (props.resourcesVpcConfig as CfnCluster.ResourcesVpcConfigProperty).securityGroupIds, diff --git a/packages/@aws-cdk/aws-eks/lib/cluster.ts b/packages/@aws-cdk/aws-eks/lib/cluster.ts index 2db0438537f97..76c59f740f6f2 100644 --- a/packages/@aws-cdk/aws-eks/lib/cluster.ts +++ b/packages/@aws-cdk/aws-eks/lib/cluster.ts @@ -540,6 +540,15 @@ export interface ClusterOptions extends CommonClusterOptions { * using AWS-Managed encryption keys. */ readonly secretsEncryptionKey?: kms.IKey; + + /** + * The CIDR block to assign Kubernetes service IP addresses from. + * + * @default - Kubernetes assigns addresses from either the + * 10.100.0.0/16 or 172.20.0.0/16 CIDR blocks + * @see https://docs.aws.amazon.com/eks/latest/APIReference/API_KubernetesNetworkConfigRequest.html#AmazonEKS-Type-KubernetesNetworkConfigRequest-serviceIpv4Cidr + */ + readonly serviceIpv4Cidr?: string; } /** @@ -1223,6 +1232,9 @@ export class Cluster extends ClusterBase { resources: ['secrets'], }], } : {}), + kubernetesNetworkConfig: props.serviceIpv4Cidr ? { + serviceIpv4Cidr: props.serviceIpv4Cidr, + } : undefined, endpointPrivateAccess: this.endpointAccess._config.privateAccess, endpointPublicAccess: this.endpointAccess._config.publicAccess, publicAccessCidrs: this.endpointAccess._config.publicCidrs, diff --git a/packages/@aws-cdk/aws-eks/test/cluster.test.ts b/packages/@aws-cdk/aws-eks/test/cluster.test.ts index 14e2bfad0746b..295092509ffd2 100644 --- a/packages/@aws-cdk/aws-eks/test/cluster.test.ts +++ b/packages/@aws-cdk/aws-eks/test/cluster.test.ts @@ -2888,4 +2888,26 @@ describe('cluster', () => { expect(providerNestedStackTemplate?.Resources?.Handler886CB40B?.Properties?.MemorySize).toEqual(4096); }); + + test('create a cluster using custom kubernetes network config', () => { + // GIVEN + const { stack } = testFixture(); + const customCidr = '172.16.0.0/12'; + + // WHEN + new eks.Cluster(stack, 'Cluster', { + version: CLUSTER_VERSION, + serviceIpv4Cidr: customCidr, + }); + + // THEN + expect(stack).toHaveResourceLike('Custom::AWSCDK-EKS-Cluster', { + Config: { + kubernetesNetworkConfig: { + serviceIpv4Cidr: customCidr, + }, + }, + }); + + }); }); From 999e99924804d8ee2c2962fa7ed86023b3f7d590 Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Mon, 18 Oct 2021 11:10:24 +0200 Subject: [PATCH 12/37] chore(pipelines): deprecate legacy API (#17034) The presence of the legacy API for CDK Pipelines confuses people who didn't read the README or the blog post, but go from a copy/pasted example they found on the internet and then proceed to explore the API. Clearly deprecate the legacy classes to avoid confusion. ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/cx-api/lib/features.ts | 6 +++--- .../legacy/actions/deploy-cdk-stack-action.ts | 10 ++++++++++ .../legacy/actions/publish-assets-action.ts | 4 ++++ .../legacy/actions/update-pipeline-action.ts | 4 ++++ .../@aws-cdk/pipelines/lib/legacy/pipeline.ts | 4 ++++ .../@aws-cdk/pipelines/lib/legacy/stage.ts | 18 ++++++++++++++++++ .../lib/legacy/synths/simple-synth-action.ts | 12 ++++++++++++ .../legacy/validation/shell-script-action.ts | 4 ++++ 8 files changed, 59 insertions(+), 3 deletions(-) diff --git a/packages/@aws-cdk/cx-api/lib/features.ts b/packages/@aws-cdk/cx-api/lib/features.ts index 554c41929651d..986be7f4e136b 100644 --- a/packages/@aws-cdk/cx-api/lib/features.ts +++ b/packages/@aws-cdk/cx-api/lib/features.ts @@ -175,9 +175,9 @@ export const CLOUDFRONT_DEFAULT_SECURITY_POLICY_TLS_V1_2_2021 = '@aws-cdk/aws-cl */ export const FUTURE_FLAGS: { [key: string]: any } = { [APIGATEWAY_USAGEPLANKEY_ORDERINSENSITIVE_ID]: true, - [ENABLE_STACK_NAME_DUPLICATES_CONTEXT]: 'true', - [ENABLE_DIFF_NO_FAIL_CONTEXT]: 'true', - [STACK_RELATIVE_EXPORTS_CONTEXT]: 'true', + [ENABLE_STACK_NAME_DUPLICATES_CONTEXT]: true, + [ENABLE_DIFF_NO_FAIL_CONTEXT]: true, + [STACK_RELATIVE_EXPORTS_CONTEXT]: true, [DOCKER_IGNORE_SUPPORT]: true, [SECRETS_MANAGER_PARSE_OWNED_SECRET_NAME]: true, [KMS_DEFAULT_KEY_POLICIES]: true, diff --git a/packages/@aws-cdk/pipelines/lib/legacy/actions/deploy-cdk-stack-action.ts b/packages/@aws-cdk/pipelines/lib/legacy/actions/deploy-cdk-stack-action.ts index af6b7821a308d..1fc92da472ea1 100644 --- a/packages/@aws-cdk/pipelines/lib/legacy/actions/deploy-cdk-stack-action.ts +++ b/packages/@aws-cdk/pipelines/lib/legacy/actions/deploy-cdk-stack-action.ts @@ -16,6 +16,8 @@ import { Construct as CoreConstruct } from '@aws-cdk/core'; /** * Customization options for a DeployCdkStackAction + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface DeployCdkStackActionOptions { /** @@ -68,6 +70,8 @@ export interface DeployCdkStackActionOptions { /** * Properties for a DeployCdkStackAction + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface DeployCdkStackActionProps extends DeployCdkStackActionOptions { /** @@ -129,6 +133,8 @@ export interface DeployCdkStackActionProps extends DeployCdkStackActionOptions { /** * Options for the 'fromStackArtifact' operation + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface CdkStackActionFromArtifactOptions extends DeployCdkStackActionOptions { /** @@ -147,6 +153,8 @@ export interface CdkStackActionFromArtifactOptions extends DeployCdkStackActionO * * You do not need to instantiate this action yourself -- it will automatically * be added by the pipeline when you add stack artifacts or entire stages. + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export class DeployCdkStackAction implements codepipeline.IAction { /** @@ -317,6 +325,8 @@ function roleFromPlaceholderArn(scope: Construct, region: string | undefined, /** * Options for CdkDeployAction.fromStackArtifact + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface FromStackArtifactOptions { /** diff --git a/packages/@aws-cdk/pipelines/lib/legacy/actions/publish-assets-action.ts b/packages/@aws-cdk/pipelines/lib/legacy/actions/publish-assets-action.ts index 1c112aec9afbe..055744cb971c2 100644 --- a/packages/@aws-cdk/pipelines/lib/legacy/actions/publish-assets-action.ts +++ b/packages/@aws-cdk/pipelines/lib/legacy/actions/publish-assets-action.ts @@ -17,6 +17,8 @@ import { Construct as CoreConstruct } from '@aws-cdk/core'; /** * Props for a PublishAssetsAction + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface PublishAssetsActionProps { /** @@ -111,6 +113,8 @@ export interface PublishAssetsActionProps { * * You do not need to instantiate this action -- it will automatically * be added by the pipeline when you add stacks that use assets. + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export class PublishAssetsAction extends CoreConstruct implements codepipeline.IAction { private readonly action: codepipeline.IAction; diff --git a/packages/@aws-cdk/pipelines/lib/legacy/actions/update-pipeline-action.ts b/packages/@aws-cdk/pipelines/lib/legacy/actions/update-pipeline-action.ts index bc50e80c0aa56..da4278dd6faf2 100644 --- a/packages/@aws-cdk/pipelines/lib/legacy/actions/update-pipeline-action.ts +++ b/packages/@aws-cdk/pipelines/lib/legacy/actions/update-pipeline-action.ts @@ -14,6 +14,8 @@ import { Construct as CoreConstruct } from '@aws-cdk/core'; /** * Props for the UpdatePipelineAction + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface UpdatePipelineActionProps { /** @@ -79,6 +81,8 @@ export interface UpdatePipelineActionProps { * * You do not need to instantiate this action -- it will automatically * be added by the pipeline. + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export class UpdatePipelineAction extends CoreConstruct implements codepipeline.IAction { private readonly action: codepipeline.IAction; diff --git a/packages/@aws-cdk/pipelines/lib/legacy/pipeline.ts b/packages/@aws-cdk/pipelines/lib/legacy/pipeline.ts index 60f158f298fce..f4910d1916771 100644 --- a/packages/@aws-cdk/pipelines/lib/legacy/pipeline.ts +++ b/packages/@aws-cdk/pipelines/lib/legacy/pipeline.ts @@ -21,6 +21,8 @@ import { Construct as CoreConstruct } from '@aws-cdk/core'; const CODE_BUILD_LENGTH_LIMIT = 100; /** * Properties for a CdkPipeline + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface CdkPipelineProps { /** @@ -206,6 +208,8 @@ export interface CdkPipelineProps { * - Asset publishing. * - Keeping the pipeline up-to-date as the CDK apps change. * - Using stack outputs later on in the pipeline. + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export class CdkPipeline extends CoreConstruct { private readonly _pipeline: codepipeline.Pipeline; diff --git a/packages/@aws-cdk/pipelines/lib/legacy/stage.ts b/packages/@aws-cdk/pipelines/lib/legacy/stage.ts index bfb997e908196..c054e9a0592fb 100644 --- a/packages/@aws-cdk/pipelines/lib/legacy/stage.ts +++ b/packages/@aws-cdk/pipelines/lib/legacy/stage.ts @@ -20,6 +20,8 @@ import { Construct as CoreConstruct } from '@aws-cdk/core'; /** * Construction properties for a CdkStage + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface CdkStageProps { /** @@ -70,6 +72,8 @@ export interface CdkStageProps { * * You don't need to instantiate this class directly. Use * `cdkPipeline.addStage()` instead. + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export class CdkStage extends CoreConstruct { private _nextSequentialRunOrder = 1; // Must start at 1 eh @@ -396,6 +400,8 @@ export class CdkStage extends CoreConstruct { /** * Additional options for adding a stack deployment + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface AddStackOptions { /** @@ -415,6 +421,8 @@ export interface AddStackOptions { /** * A single output of a Stack + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export class StackOutput { /** @@ -448,6 +456,8 @@ function isAssetManifest(s: cxapi.CloudArtifact): s is cxapi.AssetManifestArtifa /** * Features that the Stage needs from its environment + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface IStageHost { /** @@ -463,6 +473,8 @@ export interface IStageHost { /** * Instructions to publish certain assets + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface AssetPublishingCommand { /** @@ -493,6 +505,8 @@ export interface AssetPublishingCommand { /** * Base options for a pipelines stage + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface BaseStageOptions { /** @@ -522,6 +536,8 @@ export interface BaseStageOptions { /** * Options for adding an application stage to a pipeline + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface AddStageOptions extends BaseStageOptions { /** @@ -546,6 +562,8 @@ export interface AddStageOptions extends BaseStageOptions { /** * Options for addManualApproval + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface AddManualApprovalOptions { /** diff --git a/packages/@aws-cdk/pipelines/lib/legacy/synths/simple-synth-action.ts b/packages/@aws-cdk/pipelines/lib/legacy/synths/simple-synth-action.ts index 1fd0b96bb2db6..226a75d2ed23c 100644 --- a/packages/@aws-cdk/pipelines/lib/legacy/synths/simple-synth-action.ts +++ b/packages/@aws-cdk/pipelines/lib/legacy/synths/simple-synth-action.ts @@ -19,6 +19,8 @@ import { Construct } from '@aws-cdk/core'; /** * Configuration options for a SimpleSynth + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface SimpleSynthOptions { /** @@ -128,6 +130,8 @@ export interface SimpleSynthOptions { /** * Construction props for SimpleSynthAction + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface SimpleSynthActionProps extends SimpleSynthOptions { /** @@ -192,6 +196,8 @@ export interface SimpleSynthActionProps extends SimpleSynthOptions { /** * Specification of an additional artifact to generate + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface AdditionalArtifact { /** @@ -207,6 +213,8 @@ export interface AdditionalArtifact { /** * A standard synth with a generated buildspec + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export class SimpleSynthAction implements codepipeline.IAction, iam.IGrantable { @@ -479,6 +487,8 @@ export class SimpleSynthAction implements codepipeline.IAction, iam.IGrantable { /** * Options for a convention-based synth using NPM + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface StandardNpmSynthOptions extends SimpleSynthOptions { /** @@ -520,6 +530,8 @@ export interface StandardNpmSynthOptions extends SimpleSynthOptions { /** * Options for a convention-based synth using Yarn + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface StandardYarnSynthOptions extends SimpleSynthOptions { /** diff --git a/packages/@aws-cdk/pipelines/lib/legacy/validation/shell-script-action.ts b/packages/@aws-cdk/pipelines/lib/legacy/validation/shell-script-action.ts index 436f099d3c67d..78f223919cafd 100644 --- a/packages/@aws-cdk/pipelines/lib/legacy/validation/shell-script-action.ts +++ b/packages/@aws-cdk/pipelines/lib/legacy/validation/shell-script-action.ts @@ -13,6 +13,8 @@ import { Construct } from '@aws-cdk/core'; /** * Properties for ShellScriptAction + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export interface ShellScriptActionProps { /** @@ -122,6 +124,8 @@ export interface ShellScriptActionProps { /** * Validate a revision using shell commands + * + * @deprecated This class is part of the old API. Use the API based on the `CodePipeline` class instead */ export class ShellScriptAction implements codepipeline.IAction, iam.IGrantable { private _project?: codebuild.IProject; From 69ac520452b219bf242f2fbb4740f6b1b8b8790f Mon Sep 17 00:00:00 2001 From: TakaakiFuruse Date: Mon, 18 Oct 2021 23:44:39 +0900 Subject: [PATCH 13/37] feat(stepfunctions-tasks): add `enableNetworkIsolation` property to `SageMakerCreateTrainingJobProps` (#16792) Closes #16779. This PR adds an option of `EnableNetworkIsolation` to `SageMakerCreateTrainingJob` class and enables a user to use AWS Marketplace. [Doc](https://docs.aws.amazon.com/sagemaker/latest/dg/mkt-algo-model-internet-free.html) says a user must turn the option to True when they attempt to use a machine learning model from AWS Marketplace. *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/aws-stepfunctions-tasks/README.md | 4 ++++ .../lib/sagemaker/create-training-job.ts | 8 ++++++++ .../test/sagemaker/create-training-job.test.ts | 2 ++ 3 files changed, 14 insertions(+) diff --git a/packages/@aws-cdk/aws-stepfunctions-tasks/README.md b/packages/@aws-cdk/aws-stepfunctions-tasks/README.md index d33d3c7ff801e..a0f47ee7519c1 100644 --- a/packages/@aws-cdk/aws-stepfunctions-tasks/README.md +++ b/packages/@aws-cdk/aws-stepfunctions-tasks/README.md @@ -969,6 +969,10 @@ disable this behavior. Step Functions supports [AWS SageMaker](https://docs.aws.amazon.com/step-functions/latest/dg/connect-sagemaker.html) through the service integration pattern. +If your training job or model uses resources from AWS Marketplace, +[network isolation is required](https://docs.aws.amazon.com/sagemaker/latest/dg/mkt-algo-model-internet-free.html). +To do so, set the `enableNetworkIsolation` property to `true` for `SageMakerCreateModel` or `SageMakerCreateTrainingJob`. + ### Create Training Job You can call the [`CreateTrainingJob`](https://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateTrainingJob.html) API from a `Task` state. diff --git a/packages/@aws-cdk/aws-stepfunctions-tasks/lib/sagemaker/create-training-job.ts b/packages/@aws-cdk/aws-stepfunctions-tasks/lib/sagemaker/create-training-job.ts index 0aa9954e859d5..64680dc357747 100644 --- a/packages/@aws-cdk/aws-stepfunctions-tasks/lib/sagemaker/create-training-job.ts +++ b/packages/@aws-cdk/aws-stepfunctions-tasks/lib/sagemaker/create-training-job.ts @@ -32,6 +32,13 @@ export interface SageMakerCreateTrainingJobProps extends sfn.TaskStateBaseProps */ readonly algorithmSpecification: AlgorithmSpecification; + /** + * Isolates the training container. No inbound or outbound network calls can be made to or from the training container. + * + * @default false + */ + readonly enableNetworkIsolation?: boolean; + /** * Algorithm-specific parameters that influence the quality of the model. Set hyperparameters before you start the learning process. * For a list of hyperparameters provided by Amazon SageMaker @@ -217,6 +224,7 @@ export class SageMakerCreateTrainingJob extends sfn.TaskStateBase implements iam private renderParameters(): { [key: string]: any } { return { TrainingJobName: this.props.trainingJobName, + EnableNetworkIsolation: this.props.enableNetworkIsolation, RoleArn: this._role!.roleArn, ...this.renderAlgorithmSpecification(this.algorithmSpecification), ...this.renderInputDataConfig(this.inputDataConfig), diff --git a/packages/@aws-cdk/aws-stepfunctions-tasks/test/sagemaker/create-training-job.test.ts b/packages/@aws-cdk/aws-stepfunctions-tasks/test/sagemaker/create-training-job.test.ts index 59f5a1bb36229..e16d20f94cfeb 100644 --- a/packages/@aws-cdk/aws-stepfunctions-tasks/test/sagemaker/create-training-job.test.ts +++ b/packages/@aws-cdk/aws-stepfunctions-tasks/test/sagemaker/create-training-job.test.ts @@ -142,6 +142,7 @@ test('create complex training job', () => { }, ], }, + enableNetworkIsolation: true, hyperparameters: { lr: '0.1', }, @@ -221,6 +222,7 @@ test('create complex training job', () => { { Name: 'mymetric', Regex: 'regex_pattern' }, ], }, + EnableNetworkIsolation: true, HyperParameters: { lr: '0.1', }, From 30f45847b21e57011f3b1dddd713d16d5f4aad30 Mon Sep 17 00:00:00 2001 From: Grzegorz Kozub Date: Mon, 18 Oct 2021 18:25:51 +0200 Subject: [PATCH 14/37] docs(pipelines): fix typo in `CdkPipelineProps` (#16920) Removed duplicate word in documentation. ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/pipelines/lib/legacy/pipeline.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/@aws-cdk/pipelines/lib/legacy/pipeline.ts b/packages/@aws-cdk/pipelines/lib/legacy/pipeline.ts index f4910d1916771..ef0d1d209619b 100644 --- a/packages/@aws-cdk/pipelines/lib/legacy/pipeline.ts +++ b/packages/@aws-cdk/pipelines/lib/legacy/pipeline.ts @@ -51,7 +51,7 @@ export interface CdkPipelineProps { * You can choose to not pass this value, in which case a new CodePipeline is * created with default settings. * - * If you pass an existing CodePipeline, it should should have been created + * If you pass an existing CodePipeline, it should have been created * with `restartExecutionOnUpdate: true`. * * [disable-awslint:ref-via-interface] From d0a27c15d66c00aef9288d514498d68e8f0d886a Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Mon, 18 Oct 2021 19:26:10 +0200 Subject: [PATCH 15/37] chore(v2): minimum supported node version is now v14 (#17036) This is the current active LTS version and is a good minimum requirement. Re-introduction of #11964. Fixes #16438 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .yarnrc | 1 + tools/@aws-cdk/pkglint/lib/rules.ts | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.yarnrc b/.yarnrc index ac05beb3696fd..019f345f39305 100644 --- a/.yarnrc +++ b/.yarnrc @@ -7,3 +7,4 @@ # be forced to registry.yarnpkg.com. # https://github.com/npm/cli/issues/3783 registry "https://registry.npmjs.org" +ignore-engines true # the 'engines' key for 'aws-cdk-lib' has specifies node14 as min while v1 will remain at node10 diff --git a/tools/@aws-cdk/pkglint/lib/rules.ts b/tools/@aws-cdk/pkglint/lib/rules.ts index f991f7b0a2441..e18fcb1086fdb 100644 --- a/tools/@aws-cdk/pkglint/lib/rules.ts +++ b/tools/@aws-cdk/pkglint/lib/rules.ts @@ -1129,7 +1129,11 @@ export class MustHaveNodeEnginesDeclaration extends ValidationRule { public readonly name = 'package-info/engines'; public validate(pkg: PackageJson): void { - expectJSON(this.name, pkg, 'engines.node', '>= 10.13.0 <13 || >=13.7.0'); + if (cdkMajorVersion() === 2) { + expectJSON(this.name, pkg, 'engines.node', '>= 14.15.0'); + } else { + expectJSON(this.name, pkg, 'engines.node', '>= 10.13.0 <13 || >=13.7.0'); + } } } From 83cf9b8770880ba5ec448bb7389606025543a692 Mon Sep 17 00:00:00 2001 From: Tom Keller <1083460+kellertk@users.noreply.github.com> Date: Mon, 18 Oct 2021 16:49:43 -0700 Subject: [PATCH 16/37] chore: minor GitHub action changes (#17022) * chore: run issue cleanup action more frequently This changes the issue cleanup action to run 6 times per day (once every four hours) insted of only once per day. * chore: create action documentation Documents each of the GitHub actions in the repo and their owners --- .github/workflows/README.md | 47 ++++++++++++++++++++++++ .github/workflows/close-stale-issues.yml | 2 +- 2 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/README.md diff --git a/.github/workflows/README.md b/.github/workflows/README.md new file mode 100644 index 0000000000000..9d54ed7236ef7 --- /dev/null +++ b/.github/workflows/README.md @@ -0,0 +1,47 @@ +# AWS CDK GitHub Actions + +These workflows and actions are configured in the AWS CDK GitHub repository. + +## Pull Request Triggered + +### Auto Approve +[auto-approve.yml](auto-approve.yml): Approves merging PRs with the +`pr/auto-approve` label. +Owner: Core CDK team + +### PR Linter +[pr-linter.yml](pr-linter.yml): Runs `tools/@aws-cdk-prlint` on each PR to +check for correctness. +Owner: Core CDK team + +### v2-main PR automation +[v2-pull-request.yml](v2-pull-request.yml): Runs `pkglint` on merge forward PRs +and commits the results. +Owner: Core CDK team + +### Label Assigner +[issue-label-assign.yml](issue-label-assign.yml): Github action for automatically adding labels and/or setting assignees when an Issue or PR is opened or edited based on user-defined Area +Owner: CDK support team + +## Issue Triggered + +### Closed Issue Message +[closed-issue-message.yml](closed-issue-message.yml): Adds a reminder message +to issues that are closed. +Owner: CDK support team + +### Label Assigner +[issue-label-assign.yml](issue-label-assign.yml): Github action for automatically adding labels and/or setting assignees when an Issue or PR is opened or edited based on user-defined Area +Owner: CDK support team + +## Scheduled Actions + +### Issue Lifecycle Handling +[close-stale-issues.yml](close-stale-issues.yml): Handles labeling issues and +PRs with `closing-soon`, `response-requested`, etc. +Owner: CDK support team + +### Yarn Upgrader +[yarn-upgrade.yml](yarn-upgrade.yml): Upgrades yarn dependencies and creates a +patch file for downloading. +Owner: Core CDK team \ No newline at end of file diff --git a/.github/workflows/close-stale-issues.yml b/.github/workflows/close-stale-issues.yml index 487a095e0c372..ee427df90ef79 100644 --- a/.github/workflows/close-stale-issues.yml +++ b/.github/workflows/close-stale-issues.yml @@ -4,7 +4,7 @@ name: "Close Stale Issues" on: workflow_dispatch: schedule: - - cron: "0 6 * * *" + - cron: "0 */4 * * *" jobs: cleanup: From ef7e20df08b4321f210bfc050afa42d7b4901931 Mon Sep 17 00:00:00 2001 From: Jericho Tolentino <68654047+jericht@users.noreply.github.com> Date: Tue, 19 Oct 2021 04:07:27 -0500 Subject: [PATCH 17/37] feat(aws-autoscaling): add flag and aspect to require imdsv2 (#16052) Partially fixes: https://github.com/aws/aws-cdk/issues/5137 Related PR: https://github.com/aws/aws-cdk/pull/16051 **Note:** I have some concerns about duplicated code between this and the above linked PR. Please see that PR for more details. ### Changes Adds an aspect that can enable/disable IMDSv1 on AutoScalingGroups ### Testing Added unit tests ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/aws-autoscaling/README.md | 26 ++++++ .../aws-autoscaling/lib/aspects/index.ts | 1 + .../lib/aspects/require-imdsv2-aspect.ts | 38 +++++++++ .../aws-autoscaling/lib/auto-scaling-group.ts | 13 +++ .../@aws-cdk/aws-autoscaling/lib/index.ts | 1 + .../aspects/require-imdsv2-aspect.test.ts | 79 +++++++++++++++++++ .../test/auto-scaling-group.test.ts | 21 +++++ 7 files changed, 179 insertions(+) create mode 100644 packages/@aws-cdk/aws-autoscaling/lib/aspects/index.ts create mode 100644 packages/@aws-cdk/aws-autoscaling/lib/aspects/require-imdsv2-aspect.ts create mode 100644 packages/@aws-cdk/aws-autoscaling/test/aspects/require-imdsv2-aspect.test.ts diff --git a/packages/@aws-cdk/aws-autoscaling/README.md b/packages/@aws-cdk/aws-autoscaling/README.md index 67e55eee91a9f..75aa4f66807e2 100644 --- a/packages/@aws-cdk/aws-autoscaling/README.md +++ b/packages/@aws-cdk/aws-autoscaling/README.md @@ -378,6 +378,32 @@ new autoscaling.AutoScalingGroup(stack, 'ASG', { }); ``` +## Configuring Instance Metadata Service (IMDS) + +### Toggling IMDSv1 + +You can configure [EC2 Instance Metadata Service](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html) options to either +allow both IMDSv1 and IMDSv2 or enforce IMDSv2 when interacting with the IMDS. + +To do this for a single `AutoScalingGroup`, you can use set the `requireImdsv2` property. +The example below demonstrates IMDSv2 being required on a single `AutoScalingGroup`: + +```ts +new autoscaling.AutoScalingGroup(stack, 'ASG', { + requireImdsv2: true, + // ... +}); +``` + +You can also use `AutoScalingGroupRequireImdsv2Aspect` to apply the operation to multiple AutoScalingGroups. +The example below demonstrates the `AutoScalingGroupRequireImdsv2Aspect` being used to require IMDSv2 for all AutoScalingGroups in a stack: + +```ts +const aspect = new autoscaling.AutoScalingGroupRequireImdsv2Aspect(); + +Aspects.of(stack).add(aspect); +``` + ## Future work * [ ] CloudWatch Events (impossible to add currently as the AutoScalingGroup ARN is diff --git a/packages/@aws-cdk/aws-autoscaling/lib/aspects/index.ts b/packages/@aws-cdk/aws-autoscaling/lib/aspects/index.ts new file mode 100644 index 0000000000000..31fc534776144 --- /dev/null +++ b/packages/@aws-cdk/aws-autoscaling/lib/aspects/index.ts @@ -0,0 +1 @@ +export * from './require-imdsv2-aspect'; \ No newline at end of file diff --git a/packages/@aws-cdk/aws-autoscaling/lib/aspects/require-imdsv2-aspect.ts b/packages/@aws-cdk/aws-autoscaling/lib/aspects/require-imdsv2-aspect.ts new file mode 100644 index 0000000000000..e399dce585d79 --- /dev/null +++ b/packages/@aws-cdk/aws-autoscaling/lib/aspects/require-imdsv2-aspect.ts @@ -0,0 +1,38 @@ +import * as cdk from '@aws-cdk/core'; +import { AutoScalingGroup } from '../auto-scaling-group'; +import { CfnLaunchConfiguration } from '../autoscaling.generated'; + +/** + * Aspect that makes IMDSv2 required on instances deployed by AutoScalingGroups. + */ +export class AutoScalingGroupRequireImdsv2Aspect implements cdk.IAspect { + constructor() { + } + + public visit(node: cdk.IConstruct): void { + if (!(node instanceof AutoScalingGroup)) { + return; + } + + const launchConfig = node.node.tryFindChild('LaunchConfig') as CfnLaunchConfiguration; + if (cdk.isResolvableObject(launchConfig.metadataOptions)) { + this.warn(node, 'CfnLaunchConfiguration.MetadataOptions field is a CDK token.'); + return; + } + + launchConfig.metadataOptions = { + ...launchConfig.metadataOptions, + httpTokens: 'required', + }; + } + + /** + * Adds a warning annotation to a node. + * + * @param node The scope to add the warning to. + * @param message The warning message. + */ + protected warn(node: cdk.IConstruct, message: string) { + cdk.Annotations.of(node).addWarning(`${AutoScalingGroupRequireImdsv2Aspect.name} failed on node ${node.node.id}: ${message}`); + } +} diff --git a/packages/@aws-cdk/aws-autoscaling/lib/auto-scaling-group.ts b/packages/@aws-cdk/aws-autoscaling/lib/auto-scaling-group.ts index 027034249c4dd..45fd06c478dfc 100644 --- a/packages/@aws-cdk/aws-autoscaling/lib/auto-scaling-group.ts +++ b/packages/@aws-cdk/aws-autoscaling/lib/auto-scaling-group.ts @@ -7,6 +7,7 @@ import * as sns from '@aws-cdk/aws-sns'; import { Annotations, + Aspects, Aws, CfnAutoScalingRollingUpdate, CfnCreationPolicy, CfnUpdatePolicy, Duration, Fn, IResource, Lazy, PhysicalName, Resource, Stack, Tags, @@ -14,6 +15,7 @@ import { Tokenization, withResolved, } from '@aws-cdk/core'; import { Construct } from 'constructs'; +import { AutoScalingGroupRequireImdsv2Aspect } from './aspects'; import { CfnAutoScalingGroup, CfnAutoScalingGroupProps, CfnLaunchConfiguration } from './autoscaling.generated'; import { BasicLifecycleHookProps, LifecycleHook } from './lifecycle-hook'; import { BasicScheduledActionProps, ScheduledAction } from './scheduled-action'; @@ -384,6 +386,13 @@ export interface AutoScalingGroupProps extends CommonAutoScalingGroupProps { * @default - default options */ readonly initOptions?: ApplyCloudFormationInitOptions; + + /** + * Whether IMDSv2 should be required on launched instances. + * + * @default - false + */ + readonly requireImdsv2?: boolean; } /** @@ -1065,6 +1074,10 @@ export class AutoScalingGroup extends AutoScalingGroupBase implements } this.spotPrice = props.spotPrice; + + if (props.requireImdsv2) { + Aspects.of(this).add(new AutoScalingGroupRequireImdsv2Aspect()); + } } /** diff --git a/packages/@aws-cdk/aws-autoscaling/lib/index.ts b/packages/@aws-cdk/aws-autoscaling/lib/index.ts index 69fede92e300b..186d1a3058fae 100644 --- a/packages/@aws-cdk/aws-autoscaling/lib/index.ts +++ b/packages/@aws-cdk/aws-autoscaling/lib/index.ts @@ -1,3 +1,4 @@ +export * from './aspects'; export * from './auto-scaling-group'; export * from './schedule'; export * from './lifecycle-hook'; diff --git a/packages/@aws-cdk/aws-autoscaling/test/aspects/require-imdsv2-aspect.test.ts b/packages/@aws-cdk/aws-autoscaling/test/aspects/require-imdsv2-aspect.test.ts new file mode 100644 index 0000000000000..22a58f097a98b --- /dev/null +++ b/packages/@aws-cdk/aws-autoscaling/test/aspects/require-imdsv2-aspect.test.ts @@ -0,0 +1,79 @@ +import { + expect as expectCDK, + haveResourceLike, +} from '@aws-cdk/assert-internal'; +import '@aws-cdk/assert-internal/jest'; +import * as ec2 from '@aws-cdk/aws-ec2'; +import * as cdk from '@aws-cdk/core'; +import { + AutoScalingGroup, + AutoScalingGroupRequireImdsv2Aspect, + CfnLaunchConfiguration, +} from '../../lib'; + +describe('AutoScalingGroupRequireImdsv2Aspect', () => { + let app: cdk.App; + let stack: cdk.Stack; + let vpc: ec2.Vpc; + + beforeEach(() => { + app = new cdk.App(); + stack = new cdk.Stack(app, 'Stack'); + vpc = new ec2.Vpc(stack, 'Vpc'); + }); + + test('warns when metadataOptions is a token', () => { + // GIVEN + const asg = new AutoScalingGroup(stack, 'AutoScalingGroup', { + vpc, + instanceType: new ec2.InstanceType('t2.micro'), + machineImage: ec2.MachineImage.latestAmazonLinux(), + }); + const launchConfig = asg.node.tryFindChild('LaunchConfig') as CfnLaunchConfiguration; + launchConfig.metadataOptions = fakeToken(); + const aspect = new AutoScalingGroupRequireImdsv2Aspect(); + + // WHEN + cdk.Aspects.of(stack).add(aspect); + + // THEN + expectCDK(stack).notTo(haveResourceLike('AWS::AutoScaling::LaunchConfiguration', { + MetadataOptions: { + HttpTokens: 'required', + }, + })); + expect(asg.node.metadataEntry).toContainEqual({ + data: expect.stringContaining('CfnLaunchConfiguration.MetadataOptions field is a CDK token.'), + type: 'aws:cdk:warning', + trace: undefined, + }); + }); + + test('requires IMDSv2', () => { + // GIVEN + new AutoScalingGroup(stack, 'AutoScalingGroup', { + vpc, + instanceType: new ec2.InstanceType('t2.micro'), + machineImage: ec2.MachineImage.latestAmazonLinux(), + }); + const aspect = new AutoScalingGroupRequireImdsv2Aspect(); + + // WHEN + cdk.Aspects.of(stack).add(aspect); + + // THEN + expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::LaunchConfiguration', { + MetadataOptions: { + HttpTokens: 'required', + }, + })); + }); +}); + +function fakeToken(): cdk.IResolvable { + return { + creationStack: [], + resolve: (_c) => {}, + toString: () => '', + }; +} diff --git a/packages/@aws-cdk/aws-autoscaling/test/auto-scaling-group.test.ts b/packages/@aws-cdk/aws-autoscaling/test/auto-scaling-group.test.ts index 64795593e8ec4..d74860638fd30 100644 --- a/packages/@aws-cdk/aws-autoscaling/test/auto-scaling-group.test.ts +++ b/packages/@aws-cdk/aws-autoscaling/test/auto-scaling-group.test.ts @@ -1364,6 +1364,27 @@ describe('auto scaling group', () => { }); + + test('requires imdsv2', () => { + // GIVEN + const stack = new cdk.Stack(); + const vpc = mockVpc(stack); + + // WHEN + new autoscaling.AutoScalingGroup(stack, 'MyASG', { + vpc, + instanceType: new ec2.InstanceType('t2.micro'), + machineImage: ec2.MachineImage.latestAmazonLinux(), + requireImdsv2: true, + }); + + // THEN + expect(stack).toHaveResourceLike('AWS::AutoScaling::LaunchConfiguration', { + MetadataOptions: { + HttpTokens: 'required', + }, + }); + }); }); function mockVpc(stack: cdk.Stack) { From 86f2714613f06aaf2bcee27da2f66066c8e863d0 Mon Sep 17 00:00:00 2001 From: Adam Ruka Date: Tue, 19 Oct 2021 03:01:47 -0700 Subject: [PATCH 18/37] fix(cfn-diff): correctly handle Date strings in diff (#16591) Turns out, `parseFloat()` in JavaScript is even crazier than we thought, and returns nonsense like `2021` for a string containing a Date like `'2021-10-25'`. For that reason, add an explicit check that the string parsed looks like a number before calling `parseFloat()`. Fixes #16444 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../cloudformation-diff/lib/diff-template.ts | 8 ++- .../cloudformation-diff/lib/diff/util.ts | 18 ++----- .../test/diff-template.test.ts | 51 +++++++------------ 3 files changed, 27 insertions(+), 50 deletions(-) diff --git a/packages/@aws-cdk/cloudformation-diff/lib/diff-template.ts b/packages/@aws-cdk/cloudformation-diff/lib/diff-template.ts index b3f282802b675..6e06e56f90af1 100644 --- a/packages/@aws-cdk/cloudformation-diff/lib/diff-template.ts +++ b/packages/@aws-cdk/cloudformation-diff/lib/diff-template.ts @@ -99,13 +99,17 @@ function calculateTemplateDiff(currentTemplate: { [key: string]: any }, newTempl for (const key of unionOf(Object.keys(currentTemplate), Object.keys(newTemplate)).sort()) { const oldValue = currentTemplate[key]; const newValue = newTemplate[key]; - if (deepEqual(oldValue, newValue)) { continue; } + if (deepEqual(oldValue, newValue)) { + continue; + } const handler: DiffHandler = DIFF_HANDLERS[key] || ((_diff, oldV, newV) => unknown[key] = impl.diffUnknown(oldV, newV)); handler(differences, oldValue, newValue); } - if (Object.keys(unknown).length > 0) { differences.unknown = new types.DifferenceCollection(unknown); } + if (Object.keys(unknown).length > 0) { + differences.unknown = new types.DifferenceCollection(unknown); + } return new types.TemplateDiff(differences); } diff --git a/packages/@aws-cdk/cloudformation-diff/lib/diff/util.ts b/packages/@aws-cdk/cloudformation-diff/lib/diff/util.ts index 59c8606be0a35..1cbd4b1a111d7 100644 --- a/packages/@aws-cdk/cloudformation-diff/lib/diff/util.ts +++ b/packages/@aws-cdk/cloudformation-diff/lib/diff/util.ts @@ -138,20 +138,10 @@ export function unionOf(lv: string[] | Set, rv: string[] | Set): * A parseFloat implementation that does the right thing for * strings like '0.0.0' * (for which JavaScript's parseFloat() returns 0). + * We return NaN for all of these strings that do not represent numbers, + * and so comparing them fails, + * and doesn't short-circuit the diff logic. */ function safeParseFloat(str: string): number { - const ret = parseFloat(str); - const nonNumericRegex = /\d*\.\d+\./; - if (ret === 0) { - // if the str is exactly '0', that's OK; - // but parseFloat() also returns 0 for things like '0.0'; - // in this case, return NaN, so we'll fall back to string comparison - return str === '0' ? ret : NaN; - } else if (nonNumericRegex.test(str)) { - // if the str contains non-numeric characters, - // return NaN, so we'll fall back to string comparison - return NaN; - } else { - return ret; - } + return Number(str); } diff --git a/packages/@aws-cdk/cloudformation-diff/test/diff-template.test.ts b/packages/@aws-cdk/cloudformation-diff/test/diff-template.test.ts index 9241d0e8e28eb..d43ec99808d31 100644 --- a/packages/@aws-cdk/cloudformation-diff/test/diff-template.test.ts +++ b/packages/@aws-cdk/cloudformation-diff/test/diff-template.test.ts @@ -582,70 +582,57 @@ test('when a property changes including equivalent DependsOn', () => { expect(differences.resources.differenceCount).toBe(1); }); -test('when a property with a number-like format changes', () => { - const bucketName = 'ShineyBucketName'; - const tagChanges = { - '0.31.1-prod': '0.31.2-prod', - '8.0.5.5.4-identifier': '8.0.5.5.5-identifier', - '1.1.1.1': '1.1.2.2', - '1.2.3': '1.2.4', - '2.2.2.2': '2.2.3.2', - '3.3.3.3': '3.4.3.3', - }; - const oldTags = Object.keys(tagChanges); - const newTags = Object.values(tagChanges); +test.each([ + ['0.31.1-prod', '0.31.2-prod'], + ['8.0.5.5.4-identifier', '8.0.5.5.5-identifier'], + ['1.1.1.1', '1.1.1.2'], + ['1.2.3', '1.2.4'], + ['2.2.2.2', '2.2.3.2'], + ['3.3.3.3', '3.4.3.3'], + ['2021-10-23T06:07:08.000Z', '2021-10-23T09:10:11.123Z'], +])("reports a change when a string property with a number-like format changes from '%s' to '%s'", (oldValue, newValue) => { + // GIVEN const currentTemplate = { Resources: { - QueueResource: { - Type: 'AWS::SQS::Queue', - }, BucketResource: { Type: 'AWS::S3::Bucket', Properties: { - BucketName: bucketName, - Tags: oldTags, + Tags: [oldValue], }, }, }, }; const newTemplate = { Resources: { - QueueResource: { - Type: 'AWS::SQS::Queue', - }, BucketResource: { Type: 'AWS::S3::Bucket', Properties: { - BucketName: bucketName, - Tags: newTags, + Tags: [newValue], }, }, }, }; - + // WHEN const differences = diffTemplate(currentTemplate, newTemplate); + + // THEN expect(differences.differenceCount).toBe(1); expect(differences.resources.differenceCount).toBe(1); const difference = differences.resources.changes.BucketResource; expect(difference).not.toBeUndefined(); expect(difference?.oldResourceType).toEqual('AWS::S3::Bucket'); expect(difference?.propertyUpdates).toEqual({ - Tags: { oldValue: oldTags, newValue: newTags, changeImpact: ResourceImpact.WILL_UPDATE, isDifferent: true }, + Tags: { oldValue: [oldValue], newValue: [newValue], changeImpact: ResourceImpact.WILL_UPDATE, isDifferent: true }, }); }); test('when a property with a number-like format doesn\'t change', () => { - const bucketName = 'ShineyBucketName'; const tags = ['0.31.1-prod', '8.0.5.5.4-identifier', '1.1.1.1', '1.2.3']; const currentTemplate = { Resources: { - QueueResource: { - Type: 'AWS::SQS::Queue', - }, BucketResource: { Type: 'AWS::S3::Bucket', Properties: { - BucketName: bucketName, Tags: tags, }, }, @@ -653,13 +640,9 @@ test('when a property with a number-like format doesn\'t change', () => { }; const newTemplate = { Resources: { - QueueResource: { - Type: 'AWS::SQS::Queue', - }, BucketResource: { Type: 'AWS::S3::Bucket', Properties: { - BucketName: bucketName, Tags: tags, }, }, @@ -671,4 +654,4 @@ test('when a property with a number-like format doesn\'t change', () => { expect(differences.resources.differenceCount).toBe(0); const difference = differences.resources.changes.BucketResource; expect(difference).toBeUndefined(); -}); \ No newline at end of file +}); From 605027fc471b007ee63cd3abdcb0fd09f81c58a5 Mon Sep 17 00:00:00 2001 From: Pat Myron Date: Tue, 19 Oct 2021 03:56:35 -0700 Subject: [PATCH 19/37] chore(region-info): cn-northwest-1 (Ningxia) ROUTE_53_BUCKET_WEBSITE_ZONE_ID (#17024) https://docs.aws.amazon.com/general/latest/gr/s3.html#s3_region, https://github.com/hashicorp/terraform-provider-aws/pull/21337 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../aws-route53-targets/test/bucket-website-target.test.ts | 4 ++-- packages/@aws-cdk/region-info/build-tools/fact-tables.ts | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/@aws-cdk/aws-route53-targets/test/bucket-website-target.test.ts b/packages/@aws-cdk/aws-route53-targets/test/bucket-website-target.test.ts index eebb53b2d488a..ff1938e66447b 100644 --- a/packages/@aws-cdk/aws-route53-targets/test/bucket-website-target.test.ts +++ b/packages/@aws-cdk/aws-route53-targets/test/bucket-website-target.test.ts @@ -74,10 +74,10 @@ test('throws if region agnostic', () => { }).toThrow(/Cannot use an S3 record alias in region-agnostic stacks/); }); -test('throws if bucket website hosting is unavailable (cn-northwest-1)', () => { +test('throws if bucket website hosting is unavailable (cn-north-1)', () => { // GIVEN const app = new App(); - const stack = new Stack(app, 'test', { env: { region: 'cn-northwest-1' } }); + const stack = new Stack(app, 'test', { env: { region: 'cn-north-1' } }); const bucketWebsite = new s3.Bucket(stack, 'Bucket'); diff --git a/packages/@aws-cdk/region-info/build-tools/fact-tables.ts b/packages/@aws-cdk/region-info/build-tools/fact-tables.ts index 28d61f7fd9387..3ce1d5de6b55b 100644 --- a/packages/@aws-cdk/region-info/build-tools/fact-tables.ts +++ b/packages/@aws-cdk/region-info/build-tools/fact-tables.ts @@ -43,7 +43,7 @@ export const AWS_CDK_METADATA = new Set([ /** * The hosted zone Id if using an alias record in Route53. * - * @see https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_website_region_endpoints + * @see https://docs.aws.amazon.com/general/latest/gr/s3.html#s3_region */ export const ROUTE_53_BUCKET_WEBSITE_ZONE_IDS: { [region: string]: string } = { 'af-south-1': 'Z11KHD8FBVPUYU', @@ -55,6 +55,7 @@ export const ROUTE_53_BUCKET_WEBSITE_ZONE_IDS: { [region: string]: string } = { 'ap-southeast-1': 'Z3O0J2DXBE1FTB', 'ap-southeast-2': 'Z1WCIGYICN2BYD', 'ca-central-1': 'Z1QDHH18159H29', + 'cn-northwest-1': 'Z282HJ1KT0DH03', 'eu-central-1': 'Z21DNDUVLTQW6Q', 'eu-north-1': 'Z3BAZG2TWCNX0D', 'eu-south-1': 'Z3IXVV8C73GIO3', From b5f5182123928f66c7eab4465f1b75bce0328be8 Mon Sep 17 00:00:00 2001 From: Pat Myron Date: Tue, 19 Oct 2021 04:50:27 -0700 Subject: [PATCH 20/37] chore(region-info): cn-north-1/cn-northwest-1 (China) APPMESH_ECR_ACCOUNTS (#17025) https://docs.amazonaws.cn/app-mesh/latest/userguide/envoy.html https://aws.amazon.com/about-aws/whats-new/2021/09/aws-app-mesh-aws-china-regions/ https://github.com/aws/aws-app-mesh-roadmap/issues/1 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/region-info/build-tools/fact-tables.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/@aws-cdk/region-info/build-tools/fact-tables.ts b/packages/@aws-cdk/region-info/build-tools/fact-tables.ts index 3ce1d5de6b55b..0e1f923dc2a02 100644 --- a/packages/@aws-cdk/region-info/build-tools/fact-tables.ts +++ b/packages/@aws-cdk/region-info/build-tools/fact-tables.ts @@ -136,6 +136,7 @@ export const DLC_REPOSITORY_ACCOUNTS: { [region: string]: string } = { }; // https://docs.aws.amazon.com/app-mesh/latest/userguide/envoy.html +// https://docs.amazonaws.cn/app-mesh/latest/userguide/envoy.html export const APPMESH_ECR_ACCOUNTS: { [region: string]: string } = { 'af-south-1': '924023996002', 'ap-east-1': '856666278305', @@ -146,6 +147,8 @@ export const APPMESH_ECR_ACCOUNTS: { [region: string]: string } = { 'ap-southeast-1': '840364872350', 'ap-southeast-2': '840364872350', 'ca-central-1': '840364872350', + 'cn-north-1': '919366029133', + 'cn-northwest-1': '919830735681', 'eu-central-1': '840364872350', 'eu-north-1': '840364872350', 'eu-south-1': '422531588944', From d1cee62b5b9960860e1ff59dc095ba52d3f9de31 Mon Sep 17 00:00:00 2001 From: kaylanm <1063516+kaylanm@users.noreply.github.com> Date: Tue, 19 Oct 2021 08:43:33 -0400 Subject: [PATCH 21/37] chore: fix typo in 'subnet' (#17046) Fix typographical errors in the spelling of 'subnet' in the ec2 & eks modules. ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/aws-ec2/test/vpc.test.ts | 2 +- packages/@aws-cdk/aws-eks/lib/cluster.ts | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/@aws-cdk/aws-ec2/test/vpc.test.ts b/packages/@aws-cdk/aws-ec2/test/vpc.test.ts index ca375dc7fe321..90942056865c7 100644 --- a/packages/@aws-cdk/aws-ec2/test/vpc.test.ts +++ b/packages/@aws-cdk/aws-ec2/test/vpc.test.ts @@ -361,7 +361,7 @@ describe('vpc', () => { } }); - test('with custom subents and natGateways = 2 there should be only two NATGW', () => { + test('with custom subnets and natGateways = 2 there should be only two NATGW', () => { const stack = getTestStack(); new Vpc(stack, 'TheVPC', { cidr: '10.0.0.0/21', diff --git a/packages/@aws-cdk/aws-eks/lib/cluster.ts b/packages/@aws-cdk/aws-eks/lib/cluster.ts index 76c59f740f6f2..c1d00a9dcd767 100644 --- a/packages/@aws-cdk/aws-eks/lib/cluster.ts +++ b/packages/@aws-cdk/aws-eks/lib/cluster.ts @@ -1191,7 +1191,7 @@ export class Cluster extends ClusterBase { this.onEventLayer = props.onEventLayer; this.kubectlMemory = props.kubectlMemory; - const privateSubents = this.selectPrivateSubnets().slice(0, 16); + const privateSubnets = this.selectPrivateSubnets().slice(0, 16); const publicAccessDisabled = !this.endpointAccess._config.publicAccess; const publicAccessRestricted = !publicAccessDisabled && this.endpointAccess._config.publicCidrs @@ -1199,19 +1199,19 @@ export class Cluster extends ClusterBase { // validate endpoint access configuration - if (privateSubents.length === 0 && publicAccessDisabled) { + if (privateSubnets.length === 0 && publicAccessDisabled) { // no private subnets and no public access at all, no good. throw new Error('Vpc must contain private subnets when public endpoint access is disabled'); } - if (privateSubents.length === 0 && publicAccessRestricted) { - // no private subents and public access is restricted, no good. + if (privateSubnets.length === 0 && publicAccessRestricted) { + // no private subnets and public access is restricted, no good. throw new Error('Vpc must contain private subnets when public endpoint access is restricted'); } const placeClusterHandlerInVpc = props.placeClusterHandlerInVpc ?? false; - if (placeClusterHandlerInVpc && privateSubents.length === 0) { + if (placeClusterHandlerInVpc && privateSubnets.length === 0) { throw new Error('Cannot place cluster handler in the VPC since no private subnets could be selected'); } @@ -1240,11 +1240,11 @@ export class Cluster extends ClusterBase { publicAccessCidrs: this.endpointAccess._config.publicCidrs, secretsEncryptionKey: props.secretsEncryptionKey, vpc: this.vpc, - subnets: placeClusterHandlerInVpc ? privateSubents : undefined, + subnets: placeClusterHandlerInVpc ? privateSubnets : undefined, onEventLayer: this.onEventLayer, }); - if (this.endpointAccess._config.privateAccess && privateSubents.length !== 0) { + if (this.endpointAccess._config.privateAccess && privateSubnets.length !== 0) { // when private access is enabled and the vpc has private subnets, lets connect // the provider to the vpc so that it will work even when restricting public access. @@ -1254,7 +1254,7 @@ export class Cluster extends ClusterBase { throw new Error('Private endpoint access requires the VPC to have DNS support and DNS hostnames enabled. Use `enableDnsHostnames: true` and `enableDnsSupport: true` when creating the VPC.'); } - this.kubectlPrivateSubnets = privateSubents; + this.kubectlPrivateSubnets = privateSubnets; // the vpc must exist in order to properly delete the cluster (since we run `kubectl delete`). // this ensures that. From 707fa003a458039878a1ae5173b6665a84c1170b Mon Sep 17 00:00:00 2001 From: Cory Hall <43035978+corymhall@users.noreply.github.com> Date: Tue, 19 Oct 2021 09:35:20 -0400 Subject: [PATCH 22/37] fix(events): PhysicalName.GENERATE_IF_NEEDED does not work for EventBus (#17008) fixes an issue where the generate when needed marker was not being passed through to the physicalName fix #14337 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/aws-events/lib/event-bus.ts | 4 ++- .../aws-events/test/event-bus.test.ts | 30 ++++++++++++++++++- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/packages/@aws-cdk/aws-events/lib/event-bus.ts b/packages/@aws-cdk/aws-events/lib/event-bus.ts index df0859b3d3259..bdddbc9cceb61 100644 --- a/packages/@aws-cdk/aws-events/lib/event-bus.ts +++ b/packages/@aws-cdk/aws-events/lib/event-bus.ts @@ -276,6 +276,8 @@ export class EventBus extends EventBusBase { ); } return { eventBusName: eventSourceName, eventSourceName }; + } else { + return { eventBusName: props.eventBusName }; } } return { eventBusName: defaultEventBusName }; @@ -311,7 +313,7 @@ export class EventBus extends EventBusBase { super(scope, id, { physicalName: eventBusName }); const eventBus = new CfnEventBus(this, 'Resource', { - name: eventBusName, + name: this.physicalName, eventSourceName, }); diff --git a/packages/@aws-cdk/aws-events/test/event-bus.test.ts b/packages/@aws-cdk/aws-events/test/event-bus.test.ts index b4384255ea7b4..e50f0b24db771 100644 --- a/packages/@aws-cdk/aws-events/test/event-bus.test.ts +++ b/packages/@aws-cdk/aws-events/test/event-bus.test.ts @@ -1,6 +1,6 @@ import '@aws-cdk/assert-internal/jest'; import * as iam from '@aws-cdk/aws-iam'; -import { Aws, CfnResource, Stack, Arn } from '@aws-cdk/core'; +import { Aws, CfnResource, Stack, Arn, App, PhysicalName, CfnOutput } from '@aws-cdk/core'; import { EventBus } from '../lib'; describe('event bus', () => { @@ -515,4 +515,32 @@ describe('event bus', () => { }); + test('cross account event bus uses generated physical name', () => { + // GIVEN + const app = new App(); + const stack1 = new Stack(app, 'Stack1', { + env: { + account: '11111111111', + region: 'us-east-1', + }, + }); + const stack2 = new Stack(app, 'Stack2', { + env: { + account: '22222222222', + region: 'us-east-1', + }, + }); + + // WHEN + const bus1 = new EventBus(stack1, 'Bus', { + eventBusName: PhysicalName.GENERATE_IF_NEEDED, + }); + + new CfnOutput(stack2, 'BusName', { value: bus1.eventBusName }); + + // THEN + expect(stack1).toHaveResource('AWS::Events::EventBus', { + Name: 'stack1stack1busca19bdf8ab2e51b62a5a', + }); + }); }); From b3c00c026deda7be50bd68dbdba516185ec14e9f Mon Sep 17 00:00:00 2001 From: Nick Lynch Date: Tue, 19 Oct 2021 15:27:48 +0100 Subject: [PATCH 23/37] chore(codepipeline-actions): remove merge conflict marker (#17054) This causes a rather weird effect in the docs for this class: https://docs.aws.amazon.com/cdk/api/latest/docs/@aws-cdk_aws-codepipeline-actions.ServiceCatalogDeployActionBeta1.html ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../lib/servicecatalog/deploy-action-beta1.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/packages/@aws-cdk/aws-codepipeline-actions/lib/servicecatalog/deploy-action-beta1.ts b/packages/@aws-cdk/aws-codepipeline-actions/lib/servicecatalog/deploy-action-beta1.ts index 34b055c14735b..302b82b2a8725 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/lib/servicecatalog/deploy-action-beta1.ts +++ b/packages/@aws-cdk/aws-codepipeline-actions/lib/servicecatalog/deploy-action-beta1.ts @@ -35,12 +35,8 @@ export interface ServiceCatalogDeployActionBeta1Props extends codepipeline.Commo /** * CodePipeline action to connect to an existing ServiceCatalog product. -<<<<<<< HEAD:packages/@aws-cdk/aws-codepipeline-actions/lib/servicecatalog/deploy-action.ts * * **Note**: this class is still experimental, and may have breaking changes in the future! - * -======= ->>>>>>> master:packages/@aws-cdk/aws-codepipeline-actions/lib/servicecatalog/deploy-action-beta1.ts */ export class ServiceCatalogDeployActionBeta1 extends Action { private readonly templatePath: string; From 7fda90318e18b3a5d126b040e35a0146634d5f2d Mon Sep 17 00:00:00 2001 From: Ayush Goyal Date: Tue, 19 Oct 2021 20:51:50 +0530 Subject: [PATCH 24/37] feat(events): Add DLQ support for SQS target (#16916) feat(events-targets): Add DLQ support for SQS target closes #16417 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../@aws-cdk/aws-events-targets/README.md | 2 +- .../@aws-cdk/aws-events-targets/lib/sqs.ts | 9 ++- .../integ.sqs-event-rule-target.expected.json | 52 ++++++++++++++ .../test/sqs/integ.sqs-event-rule-target.ts | 6 +- .../aws-events-targets/test/sqs/sqs.test.ts | 72 +++++++++++++++++++ 5 files changed, 137 insertions(+), 4 deletions(-) diff --git a/packages/@aws-cdk/aws-events-targets/README.md b/packages/@aws-cdk/aws-events-targets/README.md index 994c24ab8a0a5..a5c8fde1d9d98 100644 --- a/packages/@aws-cdk/aws-events-targets/README.md +++ b/packages/@aws-cdk/aws-events-targets/README.md @@ -35,7 +35,7 @@ EventBridge. ## Event retry policy and using dead-letter queues -The Codebuild, CodePipeline, Lambda, StepFunctions and LogGroup targets support attaching a [dead letter queue and setting retry policies](https://docs.aws.amazon.com/eventbridge/latest/userguide/rule-dlq.html). See the [lambda example](#invoke-a-lambda-function). +The Codebuild, CodePipeline, Lambda, StepFunctions, LogGroup and SQSQueue targets support attaching a [dead letter queue and setting retry policies](https://docs.aws.amazon.com/eventbridge/latest/userguide/rule-dlq.html). See the [lambda example](#invoke-a-lambda-function). Use [escape hatches](https://docs.aws.amazon.com/cdk/latest/guide/cfn_layer.html) for the other target types. ## Invoke a Lambda function diff --git a/packages/@aws-cdk/aws-events-targets/lib/sqs.ts b/packages/@aws-cdk/aws-events-targets/lib/sqs.ts index 43fb9b8ed15d0..465f0355516cf 100644 --- a/packages/@aws-cdk/aws-events-targets/lib/sqs.ts +++ b/packages/@aws-cdk/aws-events-targets/lib/sqs.ts @@ -1,11 +1,12 @@ import * as events from '@aws-cdk/aws-events'; import * as iam from '@aws-cdk/aws-iam'; import * as sqs from '@aws-cdk/aws-sqs'; +import { addToDeadLetterQueueResourcePolicy, TargetBaseProps, bindBaseTargetConfig } from './util'; /** * Customize the SQS Queue Event Target */ -export interface SqsQueueProps { +export interface SqsQueueProps extends TargetBaseProps { /** * Message Group ID for messages sent to this queue @@ -24,7 +25,6 @@ export interface SqsQueueProps { * @default the entire EventBridge event */ readonly message?: events.RuleTargetInput; - } /** @@ -62,7 +62,12 @@ export class SqsQueue implements events.IRuleTarget { // deduplicated automatically this.queue.grantSendMessages(new iam.ServicePrincipal('events.amazonaws.com', principalOpts)); + if (this.props.deadLetterQueue) { + addToDeadLetterQueueResourcePolicy(rule, this.props.deadLetterQueue); + } + return { + ...bindBaseTargetConfig(this.props), arn: this.queue.queueArn, input: this.props.message, targetResource: this.queue, diff --git a/packages/@aws-cdk/aws-events-targets/test/sqs/integ.sqs-event-rule-target.expected.json b/packages/@aws-cdk/aws-events-targets/test/sqs/integ.sqs-event-rule-target.expected.json index eb2a7dd26ef5f..f35a7a93b9e42 100644 --- a/packages/@aws-cdk/aws-events-targets/test/sqs/integ.sqs-event-rule-target.expected.json +++ b/packages/@aws-cdk/aws-events-targets/test/sqs/integ.sqs-event-rule-target.expected.json @@ -61,6 +61,14 @@ "Arn" ] }, + "DeadLetterConfig": { + "Arn": { + "Fn::GetAtt": [ + "MyDeadLetterQueueD997968A", + "Arn" + ] + } + }, "Id": "Target0" } ] @@ -110,6 +118,50 @@ } ] } + }, + "MyDeadLetterQueueD997968A": { + "Type": "AWS::SQS::Queue", + "UpdateReplacePolicy": "Delete", + "DeletionPolicy": "Delete" + }, + "MyDeadLetterQueuePolicyCC35D52C": { + "Type": "AWS::SQS::QueuePolicy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": "sqs:SendMessage", + "Condition": { + "ArnEquals": { + "aws:SourceArn": { + "Fn::GetAtt": [ + "MyRuleA44AB831", + "Arn" + ] + } + } + }, + "Effect": "Allow", + "Principal": { + "Service": "events.amazonaws.com" + }, + "Resource": { + "Fn::GetAtt": [ + "MyDeadLetterQueueD997968A", + "Arn" + ] + }, + "Sid": "AllowEventRuleawscdksqseventtargetMyRule0027A8F4" + } + ], + "Version": "2012-10-17" + }, + "Queues": [ + { + "Ref": "MyDeadLetterQueueD997968A" + } + ] + } } } } \ No newline at end of file diff --git a/packages/@aws-cdk/aws-events-targets/test/sqs/integ.sqs-event-rule-target.ts b/packages/@aws-cdk/aws-events-targets/test/sqs/integ.sqs-event-rule-target.ts index b2b8fb334bff6..f2375dd7c2a37 100644 --- a/packages/@aws-cdk/aws-events-targets/test/sqs/integ.sqs-event-rule-target.ts +++ b/packages/@aws-cdk/aws-events-targets/test/sqs/integ.sqs-event-rule-target.ts @@ -24,6 +24,10 @@ const queue = new sqs.Queue(stack, 'MyQueue', { encryptionMasterKey: key, }); -event.addTarget(new targets.SqsQueue(queue)); +const deadLetterQueue = new sqs.Queue(stack, 'MyDeadLetterQueue'); + +event.addTarget(new targets.SqsQueue(queue, { + deadLetterQueue, +})); app.synth(); diff --git a/packages/@aws-cdk/aws-events-targets/test/sqs/sqs.test.ts b/packages/@aws-cdk/aws-events-targets/test/sqs/sqs.test.ts index 8893a8820f37e..ad2a5296714e6 100644 --- a/packages/@aws-cdk/aws-events-targets/test/sqs/sqs.test.ts +++ b/packages/@aws-cdk/aws-events-targets/test/sqs/sqs.test.ts @@ -180,3 +180,75 @@ test('fifo queues are synthesized correctly', () => { ], })); }); + +test('dead letter queue is configured correctly', () => { + const stack = new Stack(); + const queue = new sqs.Queue(stack, 'MyQueue', { fifo: true }); + const deadLetterQueue = new sqs.Queue(stack, 'MyDeadLetterQueue'); + const rule = new events.Rule(stack, 'MyRule', { + schedule: events.Schedule.rate(Duration.hours(1)), + }); + + // WHEN + rule.addTarget(new targets.SqsQueue(queue, { + deadLetterQueue, + })); + + cdkExpect(stack).to(haveResource('AWS::Events::Rule', { + ScheduleExpression: 'rate(1 hour)', + State: 'ENABLED', + Targets: [ + { + Arn: { + 'Fn::GetAtt': [ + 'MyQueueE6CA6235', + 'Arn', + ], + }, + Id: 'Target0', + DeadLetterConfig: { + Arn: { + 'Fn::GetAtt': [ + 'MyDeadLetterQueueD997968A', + 'Arn', + ], + }, + }, + }, + ], + })); +}); + +test('specifying retry policy', () => { + const stack = new Stack(); + const queue = new sqs.Queue(stack, 'MyQueue', { fifo: true }); + const rule = new events.Rule(stack, 'MyRule', { + schedule: events.Schedule.rate(Duration.hours(1)), + }); + + // WHEN + rule.addTarget(new targets.SqsQueue(queue, { + retryAttempts: 2, + maxEventAge: Duration.hours(2), + })); + + cdkExpect(stack).to(haveResource('AWS::Events::Rule', { + ScheduleExpression: 'rate(1 hour)', + State: 'ENABLED', + Targets: [ + { + Arn: { + 'Fn::GetAtt': [ + 'MyQueueE6CA6235', + 'Arn', + ], + }, + Id: 'Target0', + RetryPolicy: { + MaximumEventAgeInSeconds: 7200, + MaximumRetryAttempts: 2, + }, + }, + ], + })); +}); From f681f29714254df1b72e05acb3a8d50f7a834e51 Mon Sep 17 00:00:00 2001 From: kaizen3031593 <36202692+kaizen3031593@users.noreply.github.com> Date: Tue, 19 Oct 2021 13:47:42 -0400 Subject: [PATCH 25/37] docs(appsync): make examples compile (#17045) ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/aws-appsync/README.md | 260 ++++++++++-------- .../@aws-cdk/aws-appsync/lib/graphqlapi.ts | 2 +- .../aws-appsync/rosetta/default.ts-fixture | 15 + .../rosetta/with-objects.ts-fixture | 49 ++++ 4 files changed, 205 insertions(+), 121 deletions(-) create mode 100644 packages/@aws-cdk/aws-appsync/rosetta/default.ts-fixture create mode 100644 packages/@aws-cdk/aws-appsync/rosetta/with-objects.ts-fixture diff --git a/packages/@aws-cdk/aws-appsync/README.md b/packages/@aws-cdk/aws-appsync/README.md index a191b51a86483..8e78c92bd4b7b 100644 --- a/packages/@aws-cdk/aws-appsync/README.md +++ b/packages/@aws-cdk/aws-appsync/README.md @@ -24,6 +24,10 @@ The `@aws-cdk/aws-appsync` package contains constructs for building flexible APIs that use GraphQL. +```ts nofixture +import * as appsync from '@aws-cdk/aws-appsync'; +``` + ## Example ### DynamoDB @@ -52,24 +56,21 @@ type Mutation { CDK stack file `app-stack.ts`: ```ts -import * as appsync from '@aws-cdk/aws-appsync'; -import * as db from '@aws-cdk/aws-dynamodb'; - -const api = new appsync.GraphqlApi(stack, 'Api', { +const api = new appsync.GraphqlApi(this, 'Api', { name: 'demo', - schema: appsync.Schema.fromAsset(join(__dirname, 'schema.graphql')), + schema: appsync.Schema.fromAsset(path.join(__dirname, 'schema.graphql')), authorizationConfig: { defaultAuthorization: { - authorizationType: appsync.AuthorizationType.IAM + authorizationType: appsync.AuthorizationType.IAM, }, }, xrayEnabled: true, }); -const demoTable = new db.Table(stack, 'DemoTable', { +const demoTable = new dynamodb.Table(this, 'DemoTable', { partitionKey: { name: 'id', - type: db.AttributeType.STRING, + type: dynamodb.AttributeType.STRING, }, }); @@ -89,7 +90,7 @@ demoDS.createResolver({ fieldName: 'addDemo', requestMappingTemplate: appsync.MappingTemplate.dynamoDbPutItem( appsync.PrimaryKey.partition('id').auto(), - appsync.Values.projecting('input') + appsync.Values.projecting('input'), ), responseMappingTemplate: appsync.MappingTemplate.dynamoDbResultItem(), }); @@ -103,15 +104,15 @@ against the Data API with GraphQL queries, mutations, and subscriptions. ```ts // Create username and password secret for DB Cluster -const secret = new rds.DatabaseSecret(stack, 'AuroraSecret', { +const secret = new rds.DatabaseSecret(this, 'AuroraSecret', { username: 'clusteradmin', }); // The VPC to place the cluster in -const vpc = new ec2.Vpc(stack, 'AuroraVpc'); +const vpc = new ec2.Vpc(this, 'AuroraVpc'); // Create the serverless cluster, provide all values needed to customise the database. -const cluster = new rds.ServerlessCluster(stack, 'AuroraCluster', { +const cluster = new rds.ServerlessCluster(this, 'AuroraCluster', { engine: rds.DatabaseClusterEngine.AURORA_MYSQL, vpc, credentials: { username: 'clusteradmin' }, @@ -120,13 +121,14 @@ const cluster = new rds.ServerlessCluster(stack, 'AuroraCluster', { }); // Build a data source for AppSync to access the database. +declare const api: appsync.GraphqlApi; const rdsDS = api.addRdsDataSource('rds', cluster, secret, 'demos'); // Set up a resolver for an RDS query. rdsDS.createResolver({ typeName: 'Query', fieldName: 'getDemosRds', - requestMappingTemplate: MappingTemplate.fromString(` + requestMappingTemplate: appsync.MappingTemplate.fromString(` { "version": "2018-05-29", "statements": [ @@ -134,7 +136,7 @@ rdsDS.createResolver({ ] } `), - responseMappingTemplate: MappingTemplate.fromString(` + responseMappingTemplate: appsync.MappingTemplate.fromString(` $utils.toJson($utils.rds.toJsonObject($ctx.result)[0]) `), }); @@ -143,7 +145,7 @@ rdsDS.createResolver({ rdsDS.createResolver({ typeName: 'Mutation', fieldName: 'addDemoRds', - requestMappingTemplate: MappingTemplate.fromString(` + requestMappingTemplate: appsync.MappingTemplate.fromString(` { "version": "2018-05-29", "statements": [ @@ -156,7 +158,7 @@ rdsDS.createResolver({ } } `), - responseMappingTemplate: MappingTemplate.fromString(` + responseMappingTemplate: appsync.MappingTemplate.fromString(` $utils.toJson($utils.rds.toJsonObject($ctx.result)[1][0]) `), }); @@ -212,11 +214,9 @@ GraphQL response mapping template `response.vtl`: CDK stack file `app-stack.ts`: ```ts -import * as appsync from '@aws-cdk/aws-appsync'; - -const api = new appsync.GraphqlApi(scope, 'api', { +const api = new appsync.GraphqlApi(this, 'api', { name: 'api', - schema: appsync.Schema.fromFile(join(__dirname, 'schema.graphql')), + schema: appsync.Schema.fromAsset(path.join(__dirname, 'schema.graphql')), }); const httpDs = api.addHttpDataSource( @@ -227,7 +227,7 @@ const httpDs = api.addHttpDataSource( description: 'from appsync to StepFunctions Workflow', authorizationConfig: { signingRegion: 'us-east-1', - signingServiceName: 'states' + signingServiceName: 'states', } } ); @@ -235,8 +235,8 @@ const httpDs = api.addHttpDataSource( httpDs.createResolver({ typeName: 'Mutation', fieldName: 'callStepFunction', - requestMappingTemplate: MappingTemplate.fromFile('request.vtl'), - responseMappingTemplate: MappingTemplate.fromFile('response.vtl') + requestMappingTemplate: appsync.MappingTemplate.fromFile('request.vtl'), + responseMappingTemplate: appsync.MappingTemplate.fromFile('response.vtl'), }); ``` @@ -247,16 +247,19 @@ through your AWS account. You can use AppSync resolvers to perform GraphQL opera such as queries, mutations, and subscriptions. ```ts -const user = new User(stack, 'User'); -const domain = new es.Domain(stack, 'Domain', { +import * as es from '@aws-cdk/aws-elasticsearch'; + +const user = new iam.User(this, 'User'); +const domain = new es.Domain(this, 'Domain', { version: es.ElasticsearchVersion.V7_1, - removalPolicy: cdk.RemovalPolicy.DESTROY, + removalPolicy: RemovalPolicy.DESTROY, fineGrainedAccessControl: { masterUserArn: user.userArn }, encryptionAtRest: { enabled: true }, nodeToNodeEncryption: true, enforceHttps: true, }); +declare const api: appsync.GraphqlApi; const ds = api.addElasticsearchDataSource('ds', domain); ds.createResolver({ @@ -293,23 +296,23 @@ When declaring your GraphQL Api, CDK defaults to a code-first approach if the `schema` property is not configured. ```ts -const api = new appsync.GraphqlApi(stack, 'api', { name: 'myApi' }); +const api = new appsync.GraphqlApi(this, 'api', { name: 'myApi' }); ``` CDK will declare a `Schema` class that will give your Api access functions to -define your schema code-first: `addType`, `addObjectType`, `addToSchema`, etc. +define your schema code-first: `addType`, `addToSchema`, etc. You can also declare your `Schema` class outside of your CDK stack, to define your schema externally. ```ts const schema = new appsync.Schema(); -schema.addObjectType('demo', { +schema.addType(new appsync.ObjectType('demo', { definition: { id: appsync.GraphqlType.id() }, -}); -const api = new appsync.GraphqlApi(stack, 'api', { +})); +const api = new appsync.GraphqlApi(this, 'api', { name: 'myApi', - schema + schema, }); ``` @@ -321,9 +324,9 @@ You can define your GraphQL Schema from a file on disk. For convenience, use the `appsync.Schema.fromAsset` to specify the file representing your schema. ```ts -const api = appsync.GraphqlApi(stack, 'api', { +const api = new appsync.GraphqlApi(this, 'api', { name: 'myApi', - schema: appsync.Schema.fromAsset(join(__dirname, 'schema.graphl')), + schema: appsync.Schema.fromAsset(path.join(__dirname, 'schema.graphl')), }); ``` @@ -334,9 +337,11 @@ another stack into your CDK app. Utilizing the `fromXxx` function, you have the ability to add data sources and resolvers through a `IGraphqlApi` interface. ```ts -const importedApi = appsync.GraphqlApi.fromGraphqlApiAttributes(stack, 'IApi', { +declare const api: appsync.GraphqlApi; +declare const table: dynamodb.Table; +const importedApi = appsync.GraphqlApi.fromGraphqlApiAttributes(this, 'IApi', { graphqlApiId: api.apiId, - graphqlArn: api.arn, + graphqlApiArn: api.arn, }); importedApi.addDynamoDbDataSource('TableDataSource', table); ``` @@ -362,9 +367,10 @@ authorization mode to finish defining your authorization. For example, this is a with AWS Lambda Authorization. ```ts -authFunction = new lambda.Function(stack, 'auth-function', {}); +import * as lambda from '@aws-cdk/aws-lambda'; +declare const authFunction: lambda.Function; -new appsync.GraphqlApi(stack, 'api', { +new appsync.GraphqlApi(this, 'api', { name: 'api', schema: appsync.Schema.fromAsset(path.join(__dirname, 'appsync.test.graphql')), authorizationConfig: { @@ -390,7 +396,7 @@ for `IAM` authorized access you would configure the following. In `schema.graphql`: -```ts +```gql type Mutation { updateExample(...): ... @aws_iam @@ -401,18 +407,18 @@ In `IAM`: ```json { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Action": [ - "appsync:GraphQL" - ], - "Resource": [ - "arn:aws:appsync:REGION:ACCOUNT_ID:apis/GRAPHQL_ID/types/Mutation/fields/updateExample" - ] - } - ] + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "appsync:GraphQL" + ], + "Resource": [ + "arn:aws:appsync:REGION:ACCOUNT_ID:apis/GRAPHQL_ID/types/Mutation/fields/updateExample" + ] + } + ] } ``` @@ -423,14 +429,12 @@ To make this easier, CDK provides `grant` API. Use the `grant` function for more granular authorization. ```ts -const role = new iam.Role(stack, 'Role', { +const role = new iam.Role(this, 'Role', { assumedBy: new iam.ServicePrincipal('lambda.amazonaws.com'), }); -const api = new appsync.GraphqlApi(stack, 'API', { - definition -}); +declare const api: appsync.GraphqlApi; -api.grant(role, appsync.IamResource.custom('types/Mutation/fields/updateExample'), 'appsync:GraphQL') +api.grant(role, appsync.IamResource.custom('types/Mutation/fields/updateExample'), 'appsync:GraphQL'); ``` ### IamResource @@ -454,6 +458,9 @@ These include: - grantSubscription (use to grant access to Subscription fields) ```ts +declare const api: appsync.GraphqlApi; +declare const role: iam.Role; + // For generic types api.grantMutation(role, 'updateExample'); @@ -468,10 +475,12 @@ backend data source. Developers can compose operations (Functions) and execute them in sequence with Pipeline Resolvers. ```ts -const appsyncFunction = new appsync.AppsyncFunction(stack, 'function', { +declare const api: appsync.GraphqlApi; + +const appsyncFunction = new appsync.AppsyncFunction(this, 'function', { name: 'appsync_function', - api: api, - dataSource: apiDataSource, + api, + dataSource: api.addNoneDataSource('none'), requestMappingTemplate: appsync.MappingTemplate.fromFile('request.vtl'), responseMappingTemplate: appsync.MappingTemplate.fromFile('response.vtl'), }); @@ -481,10 +490,14 @@ AppSync Functions are used in tandem with pipeline resolvers to compose multiple operations. ```ts -const pipelineResolver = new appsync.Resolver(stack, 'pipeline', { - name: 'pipeline_resolver', - api: api, - dataSource: apiDataSource, +declare const api: appsync.GraphqlApi; +declare const appsyncFunction: appsync.AppsyncFunction; + +const pipelineResolver = new appsync.Resolver(this, 'pipeline', { + api, + dataSource: api.addNoneDataSource('none'), + typeName: 'typeName', + fieldName: 'fieldName', requestMappingTemplate: appsync.MappingTemplate.fromFile('beforeRequest.vtl'), pipelineConfig: [appsyncFunction], responseMappingTemplate: appsync.MappingTemplate.fromFile('afterResponse.vtl'), @@ -537,48 +550,38 @@ Above we see a schema that allows for generating paginated responses. For exampl we can query `allFilms(first: 100)` since `FilmConnection` acts as an intermediary for holding `FilmEdges` we can write a resolver to return the first 100 films. -In a separate file, we can declare our scalar types: `scalar-types.ts`. - -```ts -import { GraphqlType } from '@aws-cdk/aws-appsync'; - -export const string = appsync.GraphqlType.string(); -export const int = appsync.GraphqlType.int(); -``` - -In another separate file, we can declare our object types and related functions. +In a separate file, we can declare our object types and related functions. We will call this file `object-types.ts` and we will have created it in a way that allows us to generate other `XxxConnection` and `XxxEdges` in the future. -```ts -const pluralize = require('pluralize'); -import * as scalar from './scalar-types.ts'; +```ts nofixture import * as appsync from '@aws-cdk/aws-appsync'; +const pluralize = require('pluralize'); export const args = { - after: scalar.string, - first: scalar.int, - before: scalar.string, - last: scalar.int, + after: appsync.GraphqlType.string(), + first: appsync.GraphqlType.int(), + before: appsync.GraphqlType.string(), + last: appsync.GraphqlType.int(), }; export const Node = new appsync.InterfaceType('Node', { - definition: { id: scalar.string } + definition: { id: appsync.GraphqlType.string() } }); -export const FilmNode = new appsync.ObjectType.implementInterface('FilmNode', { +export const FilmNode = new appsync.ObjectType('FilmNode', { interfaceTypes: [Node], - definition: { filmName: scalar.string } + definition: { filmName: appsync.GraphqlType.string() } }); export function generateEdgeAndConnection(base: appsync.ObjectType) { const edge = new appsync.ObjectType(`${base.name}Edge`, { - definition: { node: base.attribute(), cursor: scalar.string } + definition: { node: base.attribute(), cursor: appsync.GraphqlType.string() } }); const connection = new appsync.ObjectType(`${base.name}Connection`, { definition: { - edges: edges.attribute({ isList: true }), + edges: edge.attribute({ isList: true }), [pluralize(base.name)]: base.attribute({ isList: true }), - totalCount: scalar.int, + totalCount: appsync.GraphqlType.int(), } }); return { edge: edge, connection: connection }; @@ -588,29 +591,30 @@ export function generateEdgeAndConnection(base: appsync.ObjectType) { Finally, we will go to our `cdk-stack` and combine everything together to generate our schema. -```ts -import * as appsync from '@aws-cdk/aws-appsync'; -import * as schema from './object-types'; +```ts fixture=with-objects +declare const dummyRequest: appsync.MappingTemplate; +declare const dummyResponse: appsync.MappingTemplate; -const api = new appsync.GraphqlApi(stack, 'Api', { +const api = new appsync.GraphqlApi(this, 'Api', { name: 'demo', }); -this.objectTypes = [ schema.Node, schema.Film ]; +const objectTypes = [ Node, FilmNode ]; -const filmConnections = schema.generateEdgeAndConnection(schema.Film); +const filmConnections = generateEdgeAndConnection(FilmNode); api.addQuery('allFilms', new appsync.ResolvableField({ - returnType: filmConnections.connection.attribute(), - args: schema.args, - dataSource: dummyDataSource, - requestMappingTemplate: dummyRequest, - responseMappingTemplate: dummyResponse, - }), -}); + returnType: filmConnections.connection.attribute(), + args: args, + dataSource: api.addNoneDataSource('none'), + requestMappingTemplate: dummyRequest, + responseMappingTemplate: dummyResponse, +})); -this.objectTypes.map((t) => api.addType(t)); -Object.keys(filmConnections).forEach((key) => api.addType(filmConnections[key])); +api.addType(Node); +api.addType(FilmNode); +api.addType(filmConnections.edge); +api.addType(filmConnections.connection); ``` Notice how we can utilize the `generateEdgeAndConnection` function to generate @@ -701,6 +705,9 @@ type Info { The CDK code required would be: ```ts +declare const api: appsync.GraphqlApi; +declare const dummyRequest: appsync.MappingTemplate; +declare const dummyResponse: appsync.MappingTemplate; const info = new appsync.ObjectType('Info', { definition: { node: new appsync.ResolvableField({ @@ -729,6 +736,9 @@ type Query { The CDK code required would be: ```ts +declare const api: appsync.GraphqlApi; +declare const dummyRequest: appsync.MappingTemplate; +declare const dummyResponse: appsync.MappingTemplate; const query = new appsync.ObjectType('Query', { definition: { get: new appsync.ResolvableField({ @@ -784,12 +794,12 @@ To learn more about **Interface Types**, read the docs [here](https://graphql.or the `demo` variable is an **Object Type**. **Object Types** are defined by GraphQL Types and are only usable when linked to a GraphQL Api. -You can create Object Types in three ways: +You can create Object Types in two ways: 1. Object Types can be created ***externally***. ```ts - const api = new appsync.GraphqlApi(stack, 'Api', { + const api = new appsync.GraphqlApi(this, 'Api', { name: 'demo', }); const demo = new appsync.ObjectType('Demo', { @@ -799,34 +809,28 @@ You can create Object Types in three ways: }, }); - api.addType(object); + api.addType(demo); ``` > This method allows for reusability and modularity, ideal for larger projects. For example, imagine moving all Object Type definition outside the stack. - `scalar-types.ts` - a file for scalar type definitions - - ```ts - export const required_string = appsync.GraphqlType.string({ isRequired: true }); - ``` - `object-types.ts` - a file for object type definitions - ```ts - import { required_string } from './scalar-types'; + ```ts nofixture + import * as appsync from '@aws-cdk/aws-appsync'; export const demo = new appsync.ObjectType('Demo', { definition: { - id: required_string, - version: required_string, + id: appsync.GraphqlType.string({ isRequired: true }), + version: appsync.GraphqlType.string({ isRequired: true }), }, }); ``` `cdk-stack.ts` - a file containing our cdk stack - ```ts - import { demo } from './object-types'; + ```ts fixture=with-objects + declare const api: appsync.GraphqlApi; api.addType(demo); ``` @@ -869,6 +873,7 @@ enum Episode { The above GraphQL Enumeration Type can be expressed in CDK as the following: ```ts +declare const api: appsync.GraphqlApi; const episode = new appsync.EnumType('Episode', { definition: [ 'NEWHOPE', @@ -896,10 +901,11 @@ input Review { The above GraphQL Input Type can be expressed in CDK as the following: ```ts +declare const api: appsync.GraphqlApi; const review = new appsync.InputType('Review', { definition: { - stars: GraphqlType.int({ isRequired: true }), - commentary: GraphqlType.string(), + stars: appsync.GraphqlType.int({ isRequired: true }), + commentary: appsync.GraphqlType.string(), }, }); api.addType(review); @@ -923,6 +929,7 @@ The above GraphQL Union Type encompasses the Object Types of Human, Droid and St can be expressed in CDK as the following: ```ts +declare const api: appsync.GraphqlApi; const string = appsync.GraphqlType.string(); const human = new appsync.ObjectType('Human', { definition: { name: string } }); const droid = new appsync.ObjectType('Droid', { definition: { name: string } }); @@ -945,6 +952,11 @@ To add fields for these queries, we can simply run the `addQuery` function to ad to the schema's `Query` type. ```ts +declare const api: appsync.GraphqlApi; +declare const filmConnection: appsync.InterfaceType; +declare const dummyRequest: appsync.MappingTemplate; +declare const dummyResponse: appsync.MappingTemplate; + const string = appsync.GraphqlType.string(); const int = appsync.GraphqlType.int(); api.addQuery('allFilms', new appsync.ResolvableField({ @@ -968,10 +980,15 @@ To add fields for these mutations, we can simply run the `addMutation` function to the schema's `Mutation` type. ```ts +declare const api: appsync.GraphqlApi; +declare const filmNode: appsync.ObjectType; +declare const dummyRequest: appsync.MappingTemplate; +declare const dummyResponse: appsync.MappingTemplate; + const string = appsync.GraphqlType.string(); const int = appsync.GraphqlType.int(); api.addMutation('addFilm', new appsync.ResolvableField({ - returnType: film.attribute(), + returnType: filmNode.attribute(), args: { name: string, film_number: int }, dataSource: api.addNoneDataSource('none'), requestMappingTemplate: dummyRequest, @@ -994,10 +1011,13 @@ To add fields for these subscriptions, we can simply run the `addSubscription` f to the schema's `Subscription` type. ```ts +declare const api: appsync.GraphqlApi; +declare const film: appsync.InterfaceType; + api.addSubscription('addedFilm', new appsync.Field({ returnType: film.attribute(), args: { id: appsync.GraphqlType.id({ isRequired: true }) }, - directive: [appsync.Directive.subscribe('addFilm')], + directives: [appsync.Directive.subscribe('addFilm')], })); ``` diff --git a/packages/@aws-cdk/aws-appsync/lib/graphqlapi.ts b/packages/@aws-cdk/aws-appsync/lib/graphqlapi.ts index 93c9078e32358..71cd6e32060c1 100644 --- a/packages/@aws-cdk/aws-appsync/lib/graphqlapi.ts +++ b/packages/@aws-cdk/aws-appsync/lib/graphqlapi.ts @@ -150,7 +150,7 @@ export interface OpenIdConnectConfig { /** * The client identifier of the Relying party at the OpenID identity provider. * A regular expression can be specified so AppSync can validate against multiple client identifiers at a time. - * @example - 'ABCD|CDEF' where ABCD and CDEF are two different clientId + * @example - 'ABCD|CDEF' // where ABCD and CDEF are two different clientId * @default - * (All) */ readonly clientId?: string; diff --git a/packages/@aws-cdk/aws-appsync/rosetta/default.ts-fixture b/packages/@aws-cdk/aws-appsync/rosetta/default.ts-fixture new file mode 100644 index 0000000000000..2b84336958cc7 --- /dev/null +++ b/packages/@aws-cdk/aws-appsync/rosetta/default.ts-fixture @@ -0,0 +1,15 @@ +// Fixture with packages imported, but nothing else +import { Construct, RemovalPolicy, Stack } from '@aws-cdk/core'; +import appsync = require('@aws-cdk/aws-appsync'); +import ec2 = require('@aws-cdk/aws-ec2'); +import dynamodb = require('@aws-cdk/aws-dynamodb'); +import iam = require('@aws-cdk/aws-iam'); +import rds = require('@aws-cdk/aws-rds'); +import path = require('path'); + +class Fixture extends Stack { + constructor(scope: Construct, id: string) { + super(scope, id); + /// here + } +} diff --git a/packages/@aws-cdk/aws-appsync/rosetta/with-objects.ts-fixture b/packages/@aws-cdk/aws-appsync/rosetta/with-objects.ts-fixture new file mode 100644 index 0000000000000..1251aad728423 --- /dev/null +++ b/packages/@aws-cdk/aws-appsync/rosetta/with-objects.ts-fixture @@ -0,0 +1,49 @@ +// Fixture with packages imported, but nothing else +import { Construct, Stack } from '@aws-cdk/core'; +import appsync = require('@aws-cdk/aws-appsync'); +const pluralize = require('pluralize'); + +const args = { + after: appsync.GraphqlType.string(), + first: appsync.GraphqlType.int(), + before: appsync.GraphqlType.string(), + last: appsync.GraphqlType.int(), +}; + +const Node = new appsync.InterfaceType('Node', { + definition: { id: appsync.GraphqlType.string() } +}); + +const FilmNode = new appsync.ObjectType('FilmNode', { + interfaceTypes: [Node], + definition: { filmName: appsync.GraphqlType.string() } +}); + +function generateEdgeAndConnection(base: appsync.ObjectType) { + const edge = new appsync.ObjectType(`${base.name}Edge`, { + definition: { node: base.attribute(), cursor: appsync.GraphqlType.string() } + }); + const connection = new appsync.ObjectType(`${base.name}Connection`, { + definition: { + edges: edge.attribute({ isList: true }), + [pluralize(base.name)]: base.attribute({ isList: true }), + totalCount: appsync.GraphqlType.int(), + } + }); + return { edge: edge, connection: connection }; +} + +const demo = new appsync.ObjectType('Demo', { + definition: { + id: appsync.GraphqlType.string({ isRequired: true }), + version: appsync.GraphqlType.string({ isRequired: true }), + }, +}); + +class Fixture extends Stack { + constructor(scope: Construct, id: string) { + super(scope, id); + + /// here + } +} From 3ec683283e96159d588797bd46d33c82ff3076f1 Mon Sep 17 00:00:00 2001 From: nom3ad <19239479+nom3ad@users.noreply.github.com> Date: Wed, 20 Oct 2021 00:17:46 +0530 Subject: [PATCH 26/37] fix(opensearch): add validation to domainName property (#17017) Add validation to domainName property as per https://docs.aws.amazon.com/opensearch-service/latest/developerguide/configuration-api.html#configuration-api-datatypes-domainname Fixes #17016 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../@aws-cdk/aws-elasticsearch/lib/domain.ts | 22 +++++++++++++++---- .../aws-elasticsearch/test/domain.test.ts | 15 +++++++++++++ .../aws-opensearchservice/lib/domain.ts | 22 +++++++++++++++---- .../aws-opensearchservice/test/domain.test.ts | 15 +++++++++++++ 4 files changed, 66 insertions(+), 8 deletions(-) diff --git a/packages/@aws-cdk/aws-elasticsearch/lib/domain.ts b/packages/@aws-cdk/aws-elasticsearch/lib/domain.ts index 842071fa0ec68..173b13834d233 100644 --- a/packages/@aws-cdk/aws-elasticsearch/lib/domain.ts +++ b/packages/@aws-cdk/aws-elasticsearch/lib/domain.ts @@ -1540,9 +1540,9 @@ export class Domain extends DomainBase implements IDomain, ec2.IConnectable { if (props.logging?.auditLogEnabled) { this.auditLogGroup = props.logging.auditLogGroup ?? - new logs.LogGroup(this, 'AuditLogs', { - retention: logs.RetentionDays.ONE_MONTH, - }); + new logs.LogGroup(this, 'AuditLogs', { + retention: logs.RetentionDays.ONE_MONTH, + }); logGroups.push(this.auditLogGroup); }; @@ -1692,7 +1692,21 @@ export class Domain extends DomainBase implements IDomain, ec2.IConnectable { if (logGroupResourcePolicy) { this.domain.node.addDependency(logGroupResourcePolicy); } - if (props.domainName) { this.node.addMetadata('aws:cdk:hasPhysicalName', props.domainName); } + if (props.domainName) { + if (!cdk.Token.isUnresolved(props.domainName)) { + // https://docs.aws.amazon.com/opensearch-service/latest/developerguide/configuration-api.html#configuration-api-datatypes-domainname + if (!props.domainName.match(/^[a-z0-9\-]+$/)) { + throw new Error(`Invalid domainName '${props.domainName}'. Valid characters are a-z (lowercase only), 0-9, and – (hyphen).`); + } + if (props.domainName.length < 3 || props.domainName.length > 28) { + throw new Error(`Invalid domainName '${props.domainName}'. It must be between 3 and 28 characters`); + } + if (props.domainName[0] < 'a' || props.domainName[0] > 'z') { + throw new Error(`Invalid domainName '${props.domainName}'. It must start with a lowercase letter`); + } + } + this.node.addMetadata('aws:cdk:hasPhysicalName', props.domainName); + } this.domainName = this.getResourceNameAttribute(this.domain.ref); diff --git a/packages/@aws-cdk/aws-elasticsearch/test/domain.test.ts b/packages/@aws-cdk/aws-elasticsearch/test/domain.test.ts index 6966882c55549..f1de6b640898b 100644 --- a/packages/@aws-cdk/aws-elasticsearch/test/domain.test.ts +++ b/packages/@aws-cdk/aws-elasticsearch/test/domain.test.ts @@ -1317,6 +1317,21 @@ describe('custom error responses', () => { })).toThrow(/Unknown Elasticsearch version: 5\.4/); }); + test('error when invalid domain name is given', () => { + expect(() => new Domain(stack, 'Domain1', { + version: ElasticsearchVersion.V7_4, + domainName: 'InvalidName', + })).toThrow(/Valid characters are a-z/); + expect(() => new Domain(stack, 'Domain2', { + version: ElasticsearchVersion.V7_4, + domainName: 'a'.repeat(29), + })).toThrow(/It must be between 3 and 28 characters/); + expect(() => new Domain(stack, 'Domain3', { + version: ElasticsearchVersion.V7_4, + domainName: '123domain', + })).toThrow(/It must start with a lowercase letter/); + }); + test('error when error log publishing is enabled for elasticsearch version < 5.1', () => { const error = /Error logs publishing requires Elasticsearch version 5.1 or later/; expect(() => new Domain(stack, 'Domain1', { diff --git a/packages/@aws-cdk/aws-opensearchservice/lib/domain.ts b/packages/@aws-cdk/aws-opensearchservice/lib/domain.ts index 1279d9525f831..600e1bee8f49a 100644 --- a/packages/@aws-cdk/aws-opensearchservice/lib/domain.ts +++ b/packages/@aws-cdk/aws-opensearchservice/lib/domain.ts @@ -1472,9 +1472,9 @@ export class Domain extends DomainBase implements IDomain, ec2.IConnectable { if (props.logging?.auditLogEnabled) { this.auditLogGroup = props.logging.auditLogGroup ?? - new logs.LogGroup(this, 'AuditLogs', { - retention: logs.RetentionDays.ONE_MONTH, - }); + new logs.LogGroup(this, 'AuditLogs', { + retention: logs.RetentionDays.ONE_MONTH, + }); logGroups.push(this.auditLogGroup); }; @@ -1624,7 +1624,21 @@ export class Domain extends DomainBase implements IDomain, ec2.IConnectable { if (logGroupResourcePolicy) { this.domain.node.addDependency(logGroupResourcePolicy); } - if (props.domainName) { this.node.addMetadata('aws:cdk:hasPhysicalName', props.domainName); } + if (props.domainName) { + if (!cdk.Token.isUnresolved(props.domainName)) { + // https://docs.aws.amazon.com/opensearch-service/latest/developerguide/configuration-api.html#configuration-api-datatypes-domainname + if (!props.domainName.match(/^[a-z0-9\-]+$/)) { + throw new Error(`Invalid domainName '${props.domainName}'. Valid characters are a-z (lowercase only), 0-9, and – (hyphen).`); + } + if (props.domainName.length < 3 || props.domainName.length > 28) { + throw new Error(`Invalid domainName '${props.domainName}'. It must be between 3 and 28 characters`); + } + if (props.domainName[0] < 'a' || props.domainName[0] > 'z') { + throw new Error(`Invalid domainName '${props.domainName}'. It must start with a lowercase letter`); + } + } + this.node.addMetadata('aws:cdk:hasPhysicalName', props.domainName); + } this.domainName = this.getResourceNameAttribute(this.domain.ref); diff --git a/packages/@aws-cdk/aws-opensearchservice/test/domain.test.ts b/packages/@aws-cdk/aws-opensearchservice/test/domain.test.ts index 4ad11ac372f97..80cc7a1473b40 100644 --- a/packages/@aws-cdk/aws-opensearchservice/test/domain.test.ts +++ b/packages/@aws-cdk/aws-opensearchservice/test/domain.test.ts @@ -1319,6 +1319,21 @@ describe('custom error responses', () => { })).toThrow('Unknown Elasticsearch version: 5.4'); }); + test('error when invalid domain name is given', () => { + expect(() => new Domain(stack, 'Domain1', { + version: EngineVersion.OPENSEARCH_1_0, + domainName: 'InvalidName', + })).toThrow(/Valid characters are a-z/); + expect(() => new Domain(stack, 'Domain2', { + version: EngineVersion.OPENSEARCH_1_0, + domainName: 'a'.repeat(29), + })).toThrow(/It must be between 3 and 28 characters/); + expect(() => new Domain(stack, 'Domain3', { + version: EngineVersion.OPENSEARCH_1_0, + domainName: '123domain', + })).toThrow(/It must start with a lowercase letter/); + }); + test('error when error log publishing is enabled for Elasticsearch version < 5.1', () => { const error = /Error logs publishing requires Elasticsearch version 5.1 or later or OpenSearch version 1.0 or later/; expect(() => new Domain(stack, 'Domain1', { From 13def19c66c7c82fadb8ad4c0b148ab4ec62700f Mon Sep 17 00:00:00 2001 From: Peter Woodworth <44349620+peterwoodworth@users.noreply.github.com> Date: Tue, 19 Oct 2021 15:56:27 -0700 Subject: [PATCH 27/37] chore: assign PRs on submission (#17063) ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .github/workflows/issue-label-assign.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/issue-label-assign.yml b/.github/workflows/issue-label-assign.yml index 1c9c5997781bd..b82a4040658d8 100644 --- a/.github/workflows/issue-label-assign.yml +++ b/.github/workflows/issue-label-assign.yml @@ -11,6 +11,7 @@ jobs: test: permissions: issues: write + pull-requests: write runs-on: ubuntu-latest steps: - uses: peterwoodworth/issue-action@main From 583813c623bbf17be4b51a6ea0adbe451a6027cc Mon Sep 17 00:00:00 2001 From: Shweta Sahu <34386180+shwetasahuit@users.noreply.github.com> Date: Wed, 20 Oct 2021 01:25:49 -0700 Subject: [PATCH 28/37] refactor(region-info): AppMesh ECR accounts for cn-north-1 and cn-northwest-1 (#16836) ---- AppMesh is launched in 'cn-noth-1' and 'cn-northwest-1' regions. These regions have separate accounts for storing Envoy images in ECR. Added the ECR account information. These regions belong to new partition 'aws-cn', removed the harcoded 'aws' partition and modified it to work for all partitions. *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .gitallowed | 2 ++ .../lib/extensions/appmesh.ts | 5 ++- .../integ.all-service-addons.expected.json | 36 +++++++++++++++++-- .../integ.multiple-environments.expected.json | 24 +++++++++++-- 4 files changed, 61 insertions(+), 6 deletions(-) diff --git a/.gitallowed b/.gitallowed index 43827f7ad99b7..abe8ccb913ca4 100644 --- a/.gitallowed +++ b/.gitallowed @@ -23,6 +23,8 @@ account: '856666278305' account: '840364872350' account: '422531588944' account: '924023996002' +account: '919366029133' #cn-north-1 +account: '919830735681' #cn-northwest-1 # The account IDs of password rotation applications of Serverless Application Repository # https://docs.aws.amazon.com/secretsmanager/latest/userguide/enable-rotation-rds.html diff --git a/packages/@aws-cdk-containers/ecs-service-extensions/lib/extensions/appmesh.ts b/packages/@aws-cdk-containers/ecs-service-extensions/lib/extensions/appmesh.ts index ccd9a2ece2bc2..14cd5aaecfdac 100644 --- a/packages/@aws-cdk-containers/ecs-service-extensions/lib/extensions/appmesh.ts +++ b/packages/@aws-cdk-containers/ecs-service-extensions/lib/extensions/appmesh.ts @@ -138,6 +138,7 @@ export class AppMeshExtension extends ServiceExtension { public useTaskDefinition(taskDefinition: ecs.TaskDefinition) { var region = cdk.Stack.of(this.scope).region; + var partition = cdk.Stack.of(this.scope).partition; var appMeshRepo; // This is currently necessary because App Mesh has different images in each region, @@ -151,6 +152,8 @@ export class AppMeshExtension extends ServiceExtension { 'ap-southeast-1': this.accountIdForRegion('ap-southeast-1'), 'ap-southeast-2': this.accountIdForRegion('ap-southeast-1'), 'ca-central-1': this.accountIdForRegion('ca-central-1'), + 'cn-north-1': this.accountIdForRegion('cn-north-1'), + 'cn-northwest-1': this.accountIdForRegion('cn-northwest-1'), 'eu-central-1': this.accountIdForRegion('eu-central-1'), 'eu-north-1': this.accountIdForRegion('eu-north-1'), 'eu-south-1': this.accountIdForRegion('eu-south-1'), @@ -177,7 +180,7 @@ export class AppMeshExtension extends ServiceExtension { `${this.parentService.id}-envoy-repo`, { repositoryName: 'aws-appmesh-envoy', - repositoryArn: `arn:aws:ecr:${region}:${ownerAccount}:repository/aws-appmesh-envoy`, + repositoryArn: `arn:${partition}:ecr:${region}:${ownerAccount}:repository/aws-appmesh-envoy`, }, ); diff --git a/packages/@aws-cdk-containers/ecs-service-extensions/test/integ.all-service-addons.expected.json b/packages/@aws-cdk-containers/ecs-service-extensions/test/integ.all-service-addons.expected.json index 47ba97c3ee10d..afdf0319a819a 100644 --- a/packages/@aws-cdk-containers/ecs-service-extensions/test/integ.all-service-addons.expected.json +++ b/packages/@aws-cdk-containers/ecs-service-extensions/test/integ.all-service-addons.expected.json @@ -949,7 +949,11 @@ "Fn::Join": [ "", [ - "arn:aws:ecr:", + "arn:", + { + "Ref": "AWS::Partition" + }, + ":ecr:", { "Ref": "AWS::Region" }, @@ -1818,7 +1822,11 @@ "Fn::Join": [ "", [ - "arn:aws:ecr:", + "arn:", + { + "Ref": "AWS::Partition" + }, + ":ecr:", { "Ref": "AWS::Region" }, @@ -2806,7 +2814,11 @@ "Fn::Join": [ "", [ - "arn:aws:ecr:", + "arn:", + { + "Ref": "AWS::Partition" + }, + ":ecr:", { "Ref": "AWS::Region" }, @@ -3316,6 +3328,12 @@ "ca-central-1": { "ecrRepo": "840364872350" }, + "cn-north-1": { + "ecrRepo": "919366029133" + }, + "cn-northwest-1": { + "ecrRepo": "919830735681" + }, "eu-central-1": { "ecrRepo": "840364872350" }, @@ -3378,6 +3396,12 @@ "ca-central-1": { "ecrRepo": "840364872350" }, + "cn-north-1": { + "ecrRepo": "919366029133" + }, + "cn-northwest-1": { + "ecrRepo": "919830735681" + }, "eu-central-1": { "ecrRepo": "840364872350" }, @@ -3440,6 +3464,12 @@ "ca-central-1": { "ecrRepo": "840364872350" }, + "cn-north-1": { + "ecrRepo": "919366029133" + }, + "cn-northwest-1": { + "ecrRepo": "919830735681" + }, "eu-central-1": { "ecrRepo": "840364872350" }, diff --git a/packages/@aws-cdk-containers/ecs-service-extensions/test/integ.multiple-environments.expected.json b/packages/@aws-cdk-containers/ecs-service-extensions/test/integ.multiple-environments.expected.json index 3a3aaddac6fce..d5f00cb5708c7 100644 --- a/packages/@aws-cdk-containers/ecs-service-extensions/test/integ.multiple-environments.expected.json +++ b/packages/@aws-cdk-containers/ecs-service-extensions/test/integ.multiple-environments.expected.json @@ -1304,7 +1304,11 @@ "Fn::Join": [ "", [ - "arn:aws:ecr:", + "arn:", + { + "Ref": "AWS::Partition" + }, + ":ecr:", { "Ref": "AWS::Region" }, @@ -1828,7 +1832,11 @@ "Fn::Join": [ "", [ - "arn:aws:ecr:", + "arn:", + { + "Ref": "AWS::Partition" + }, + ":ecr:", { "Ref": "AWS::Region" }, @@ -2135,6 +2143,12 @@ "ca-central-1": { "ecrRepo": "840364872350" }, + "cn-north-1": { + "ecrRepo": "919366029133" + }, + "cn-northwest-1": { + "ecrRepo": "919830735681" + }, "eu-central-1": { "ecrRepo": "840364872350" }, @@ -2197,6 +2211,12 @@ "ca-central-1": { "ecrRepo": "840364872350" }, + "cn-north-1": { + "ecrRepo": "919366029133" + }, + "cn-northwest-1": { + "ecrRepo": "919830735681" + }, "eu-central-1": { "ecrRepo": "840364872350" }, From 0947b21c1e3186042324820ec5ab433237246f58 Mon Sep 17 00:00:00 2001 From: Jericho Tolentino <68654047+jericht@users.noreply.github.com> Date: Wed, 20 Oct 2021 04:17:58 -0500 Subject: [PATCH 29/37] feat(ec2): add aspect to require imdsv2 (#16051) Partially fixes: https://github.com/aws/aws-cdk/issues/5137 Related PR: https://github.com/aws/aws-cdk/pull/16052 **Note:** This PR and the above related PR have common code that has been duplicated across these two PRs because I decided it made more sense for these Aspects to be in the same package with the constructs they work with. However, it means I had to duplicate some of the base class code across the two PRs. Looking for an opinion on what's better here: - Should we keep it as is (2 PRs) so these Aspects are cleanly separated? or, - Does it make sense to either combine them in some way (e.g. a new package `@aws-cdk/aspects`) or have one reference the other (maybe the AutoScalingGroup aspect can reference the code in this PR since it already depends on this package). ### Changes Adds an aspect that can enable/disable IMDSv1 on Instances and Launch Templates. ### Testing Added unit tests ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/aws-ec2/README.md | 27 +++ .../@aws-cdk/aws-ec2/lib/aspects/index.ts | 1 + .../lib/aspects/require-imdsv2-aspect.ts | 150 +++++++++++++ packages/@aws-cdk/aws-ec2/lib/index.ts | 1 + packages/@aws-cdk/aws-ec2/lib/instance.ts | 14 +- .../@aws-cdk/aws-ec2/lib/launch-template.ts | 13 ++ .../aspects/require-imdsv2-aspect.test.ts | 205 ++++++++++++++++++ .../@aws-cdk/aws-ec2/test/instance.test.ts | 32 ++- .../aws-ec2/test/launch-template.test.ts | 16 ++ 9 files changed, 457 insertions(+), 2 deletions(-) create mode 100644 packages/@aws-cdk/aws-ec2/lib/aspects/index.ts create mode 100644 packages/@aws-cdk/aws-ec2/lib/aspects/require-imdsv2-aspect.ts create mode 100644 packages/@aws-cdk/aws-ec2/test/aspects/require-imdsv2-aspect.test.ts diff --git a/packages/@aws-cdk/aws-ec2/README.md b/packages/@aws-cdk/aws-ec2/README.md index a9ac3794580b4..eecc6f3857da4 100644 --- a/packages/@aws-cdk/aws-ec2/README.md +++ b/packages/@aws-cdk/aws-ec2/README.md @@ -994,6 +994,33 @@ instance.userData.addCommands( ); ``` +### Configuring Instance Metadata Service (IMDS) + +#### Toggling IMDSv1 + +You can configure [EC2 Instance Metadata Service](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html) options to either +allow both IMDSv1 and IMDSv2 or enforce IMDSv2 when interacting with the IMDS. + +To do this for a single `Instance`, you can use the `requireImdsv2` property. +The example below demonstrates IMDSv2 being required on a single `Instance`: + +```ts +new ec2.Instance(this, 'Instance', { + requireImdsv2: true, + // ... +}); +``` + +You can also use the either the `InstanceRequireImdsv2Aspect` for EC2 instances or the `LaunchTemplateRequireImdsv2Aspect` for EC2 launch templates +to apply the operation to multiple instances or launch templates, respectively. + +The following example demonstrates how to use the `InstanceRequireImdsv2Aspect` to require IMDSv2 for all EC2 instances in a stack: + +```ts +const aspect = new ec2.InstanceRequireImdsv2Aspect(); +Aspects.of(stack).add(aspect); +``` + ## VPC Flow Logs VPC Flow Logs is a feature that enables you to capture information about the IP traffic going to and from network interfaces in your VPC. Flow log data can be published to Amazon CloudWatch Logs and Amazon S3. After you've created a flow log, you can retrieve and view its data in the chosen destination. (). diff --git a/packages/@aws-cdk/aws-ec2/lib/aspects/index.ts b/packages/@aws-cdk/aws-ec2/lib/aspects/index.ts new file mode 100644 index 0000000000000..5685e9b46d036 --- /dev/null +++ b/packages/@aws-cdk/aws-ec2/lib/aspects/index.ts @@ -0,0 +1 @@ +export * from './require-imdsv2-aspect'; diff --git a/packages/@aws-cdk/aws-ec2/lib/aspects/require-imdsv2-aspect.ts b/packages/@aws-cdk/aws-ec2/lib/aspects/require-imdsv2-aspect.ts new file mode 100644 index 0000000000000..f1a5270f1fb08 --- /dev/null +++ b/packages/@aws-cdk/aws-ec2/lib/aspects/require-imdsv2-aspect.ts @@ -0,0 +1,150 @@ +import * as cdk from '@aws-cdk/core'; +import { CfnLaunchTemplate } from '../ec2.generated'; +import { Instance } from '../instance'; +import { LaunchTemplate } from '../launch-template'; + +/** + * Properties for `RequireImdsv2Aspect`. + */ +interface RequireImdsv2AspectProps { + /** + * Whether warning annotations from this Aspect should be suppressed or not. + * + * @default - false + */ + readonly suppressWarnings?: boolean; +} + +/** + * Base class for Aspect that makes IMDSv2 required. + */ +abstract class RequireImdsv2Aspect implements cdk.IAspect { + protected readonly suppressWarnings: boolean; + + constructor(props?: RequireImdsv2AspectProps) { + this.suppressWarnings = props?.suppressWarnings ?? false; + } + + abstract visit(node: cdk.IConstruct): void; + + /** + * Adds a warning annotation to a node, unless `suppressWarnings` is true. + * + * @param node The scope to add the warning to. + * @param message The warning message. + */ + protected warn(node: cdk.IConstruct, message: string) { + if (this.suppressWarnings !== true) { + cdk.Annotations.of(node).addWarning(`${RequireImdsv2Aspect.name} failed on node ${node.node.id}: ${message}`); + } + } +} + +/** + * Properties for `InstanceRequireImdsv2Aspect`. + */ +export interface InstanceRequireImdsv2AspectProps extends RequireImdsv2AspectProps { + /** + * Whether warnings that would be raised when an Instance is associated with an existing Launch Template + * should be suppressed or not. + * + * You can set this to `true` if `LaunchTemplateImdsAspect` is being used alongside this Aspect to + * suppress false-positive warnings because any Launch Templates associated with Instances will still be covered. + * + * @default - false + */ + readonly suppressLaunchTemplateWarning?: boolean; +} + +/** + * Aspect that applies IMDS configuration on EC2 Instance constructs. + * + * This aspect configures IMDS on an EC2 instance by creating a Launch Template with the + * IMDS configuration and associating that Launch Template with the instance. If an Instance + * is already associated with a Launch Template, a warning will (optionally) be added to the + * construct node and it will be skipped. + * + * To cover Instances already associated with Launch Templates, use `LaunchTemplateImdsAspect`. + */ +export class InstanceRequireImdsv2Aspect extends RequireImdsv2Aspect { + private readonly suppressLaunchTemplateWarning: boolean; + + constructor(props?: InstanceRequireImdsv2AspectProps) { + super(props); + this.suppressLaunchTemplateWarning = props?.suppressLaunchTemplateWarning ?? false; + } + + visit(node: cdk.IConstruct): void { + if (!(node instanceof Instance)) { + return; + } + if (node.instance.launchTemplate !== undefined) { + this.warn(node, 'Cannot toggle IMDSv1 because this Instance is associated with an existing Launch Template.'); + return; + } + + const name = `${node.node.id}LaunchTemplate`; + const launchTemplate = new CfnLaunchTemplate(node, 'LaunchTemplate', { + launchTemplateData: { + metadataOptions: { + httpTokens: 'required', + }, + }, + launchTemplateName: name, + }); + node.instance.launchTemplate = { + launchTemplateName: name, + version: launchTemplate.getAtt('LatestVersionNumber').toString(), + }; + } + + protected warn(node: cdk.IConstruct, message: string) { + if (this.suppressLaunchTemplateWarning !== true) { + super.warn(node, message); + } + } +} + +/** + * Properties for `LaunchTemplateRequireImdsv2Aspect`. + */ +export interface LaunchTemplateRequireImdsv2AspectProps extends RequireImdsv2AspectProps {} + +/** + * Aspect that applies IMDS configuration on EC2 Launch Template constructs. + * + * @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-launchtemplate-launchtemplatedata-metadataoptions.html + */ +export class LaunchTemplateRequireImdsv2Aspect extends RequireImdsv2Aspect { + constructor(props?: LaunchTemplateRequireImdsv2AspectProps) { + super(props); + } + + visit(node: cdk.IConstruct): void { + if (!(node instanceof LaunchTemplate)) { + return; + } + + const launchTemplate = node.node.tryFindChild('Resource') as CfnLaunchTemplate; + const data = launchTemplate.launchTemplateData; + if (cdk.isResolvableObject(data)) { + this.warn(node, 'LaunchTemplateData is a CDK token.'); + return; + } + + const metadataOptions = (data as CfnLaunchTemplate.LaunchTemplateDataProperty).metadataOptions; + if (cdk.isResolvableObject(metadataOptions)) { + this.warn(node, 'LaunchTemplateData.MetadataOptions is a CDK token.'); + return; + } + + const newData: CfnLaunchTemplate.LaunchTemplateDataProperty = { + ...data, + metadataOptions: { + ...metadataOptions, + httpTokens: 'required', + }, + }; + launchTemplate.launchTemplateData = newData; + } +} diff --git a/packages/@aws-cdk/aws-ec2/lib/index.ts b/packages/@aws-cdk/aws-ec2/lib/index.ts index 1b10e6fa1d566..4b0741044e4dd 100644 --- a/packages/@aws-cdk/aws-ec2/lib/index.ts +++ b/packages/@aws-cdk/aws-ec2/lib/index.ts @@ -1,3 +1,4 @@ +export * from './aspects'; export * from './bastion-host'; export * from './connections'; export * from './cfn-init'; diff --git a/packages/@aws-cdk/aws-ec2/lib/instance.ts b/packages/@aws-cdk/aws-ec2/lib/instance.ts index 813d4d5f43880..85b05fc71734b 100644 --- a/packages/@aws-cdk/aws-ec2/lib/instance.ts +++ b/packages/@aws-cdk/aws-ec2/lib/instance.ts @@ -1,8 +1,9 @@ import * as crypto from 'crypto'; import * as iam from '@aws-cdk/aws-iam'; -import { Annotations, Duration, Fn, IResource, Lazy, Resource, Stack, Tags } from '@aws-cdk/core'; +import { Annotations, Aspects, Duration, Fn, IResource, Lazy, Resource, Stack, Tags } from '@aws-cdk/core'; import { Construct } from 'constructs'; +import { InstanceRequireImdsv2Aspect } from './aspects'; import { CloudFormationInit } from './cfn-init'; import { Connections, IConnectable } from './connections'; import { CfnInstance } from './ec2.generated'; @@ -230,6 +231,13 @@ export interface InstanceProps { * @default - default options */ readonly initOptions?: ApplyCloudFormationInitOptions; + + /** + * Whether IMDSv2 should be required on this instance. + * + * @default - false + */ + readonly requireImdsv2?: boolean; } /** @@ -408,6 +416,10 @@ export class Instance extends Resource implements IInstance { return `${originalLogicalId}${digest}`; }, })); + + if (props.requireImdsv2) { + Aspects.of(this).add(new InstanceRequireImdsv2Aspect()); + } } /** diff --git a/packages/@aws-cdk/aws-ec2/lib/launch-template.ts b/packages/@aws-cdk/aws-ec2/lib/launch-template.ts index fdc03755c0268..ae7f5316c01af 100644 --- a/packages/@aws-cdk/aws-ec2/lib/launch-template.ts +++ b/packages/@aws-cdk/aws-ec2/lib/launch-template.ts @@ -12,8 +12,10 @@ import { TagType, Tags, Token, + Aspects, } from '@aws-cdk/core'; import { Construct } from 'constructs'; +import { LaunchTemplateRequireImdsv2Aspect } from '.'; import { Connections, IConnectable } from './connections'; import { CfnLaunchTemplate } from './ec2.generated'; import { InstanceType } from './instance-types'; @@ -332,6 +334,13 @@ export interface LaunchTemplateProps { * @default No security group is assigned. */ readonly securityGroup?: ISecurityGroup; + + /** + * Whether IMDSv2 should be required on launched instances. + * + * @default - false + */ + readonly requireImdsv2?: boolean; } /** @@ -637,6 +646,10 @@ export class LaunchTemplate extends Resource implements ILaunchTemplate, iam.IGr this.latestVersionNumber = resource.attrLatestVersionNumber; this.launchTemplateId = resource.ref; this.versionNumber = Token.asString(resource.getAtt('LatestVersionNumber')); + + if (props.requireImdsv2) { + Aspects.of(this).add(new LaunchTemplateRequireImdsv2Aspect()); + } } /** diff --git a/packages/@aws-cdk/aws-ec2/test/aspects/require-imdsv2-aspect.test.ts b/packages/@aws-cdk/aws-ec2/test/aspects/require-imdsv2-aspect.test.ts new file mode 100644 index 0000000000000..ade2eaeab1f1d --- /dev/null +++ b/packages/@aws-cdk/aws-ec2/test/aspects/require-imdsv2-aspect.test.ts @@ -0,0 +1,205 @@ +import { + countResources, + expect as expectCDK, + haveResourceLike, +} from '@aws-cdk/assert-internal'; +import '@aws-cdk/assert-internal/jest'; +import * as cdk from '@aws-cdk/core'; +import { + CfnLaunchTemplate, + Instance, + InstanceRequireImdsv2Aspect, + InstanceType, + LaunchTemplate, + LaunchTemplateRequireImdsv2Aspect, + MachineImage, + Vpc, +} from '../../lib'; + +describe('RequireImdsv2Aspect', () => { + let app: cdk.App; + let stack: cdk.Stack; + let vpc: Vpc; + + beforeEach(() => { + app = new cdk.App(); + stack = new cdk.Stack(app, 'Stack'); + vpc = new Vpc(stack, 'Vpc'); + }); + + test('suppresses warnings', () => { + // GIVEN + const aspect = new LaunchTemplateRequireImdsv2Aspect({ + suppressWarnings: true, + }); + const errmsg = 'ERROR'; + const visitMock = jest.spyOn(aspect, 'visit').mockImplementation((node) => { + // @ts-ignore + aspect.warn(node, errmsg); + }); + const construct = new cdk.Construct(stack, 'Construct'); + + // WHEN + aspect.visit(construct); + + // THEN + expect(visitMock).toHaveBeenCalled(); + expect(construct.node.metadataEntry).not.toContainEqual({ + data: expect.stringContaining(errmsg), + type: 'aws:cdk:warning', + trace: undefined, + }); + }); + + describe('InstanceRequireImdsv2Aspect', () => { + test('requires IMDSv2', () => { + // GIVEN + const instance = new Instance(stack, 'Instance', { + vpc, + instanceType: new InstanceType('t2.micro'), + machineImage: MachineImage.latestAmazonLinux(), + }); + const aspect = new InstanceRequireImdsv2Aspect(); + + // WHEN + cdk.Aspects.of(stack).add(aspect); + app.synth(); + + // THEN + const launchTemplate = instance.node.tryFindChild('LaunchTemplate') as LaunchTemplate; + expect(launchTemplate).toBeDefined(); + expectCDK(stack).to(haveResourceLike('AWS::EC2::LaunchTemplate', { + LaunchTemplateName: stack.resolve(launchTemplate.launchTemplateName), + LaunchTemplateData: { + MetadataOptions: { + HttpTokens: 'required', + }, + }, + })); + expectCDK(stack).to(haveResourceLike('AWS::EC2::Instance', { + LaunchTemplate: { + LaunchTemplateName: stack.resolve(launchTemplate.launchTemplateName), + }, + })); + }); + + test('does not toggle when Instance has a LaunchTemplate', () => { + // GIVEN + const instance = new Instance(stack, 'Instance', { + vpc, + instanceType: new InstanceType('t2.micro'), + machineImage: MachineImage.latestAmazonLinux(), + }); + instance.instance.launchTemplate = { + launchTemplateName: 'name', + version: 'version', + }; + const aspect = new InstanceRequireImdsv2Aspect(); + + // WHEN + cdk.Aspects.of(stack).add(aspect); + + // THEN + // Aspect normally creates a LaunchTemplate for the Instance to toggle IMDSv1, + // so we can assert that one was not created + expectCDK(stack).to(countResources('AWS::EC2::LaunchTemplate', 0)); + expect(instance.node.metadataEntry).toContainEqual({ + data: expect.stringContaining('Cannot toggle IMDSv1 because this Instance is associated with an existing Launch Template.'), + type: 'aws:cdk:warning', + trace: undefined, + }); + }); + + test('suppresses Launch Template warnings', () => { + // GIVEN + const instance = new Instance(stack, 'Instance', { + vpc, + instanceType: new InstanceType('t2.micro'), + machineImage: MachineImage.latestAmazonLinux(), + }); + instance.instance.launchTemplate = { + launchTemplateName: 'name', + version: 'version', + }; + const aspect = new InstanceRequireImdsv2Aspect({ + suppressLaunchTemplateWarning: true, + }); + + // WHEN + aspect.visit(instance); + + // THEN + expect(instance.node.metadataEntry).not.toContainEqual({ + data: expect.stringContaining('Cannot toggle IMDSv1 because this Instance is associated with an existing Launch Template.'), + type: 'aws:cdk:warning', + trace: undefined, + }); + }); + }); + + describe('LaunchTemplateRequireImdsv2Aspect', () => { + test('warns when LaunchTemplateData is a CDK token', () => { + // GIVEN + const launchTemplate = new LaunchTemplate(stack, 'LaunchTemplate'); + const cfnLaunchTemplate = launchTemplate.node.tryFindChild('Resource') as CfnLaunchTemplate; + cfnLaunchTemplate.launchTemplateData = fakeToken(); + const aspect = new LaunchTemplateRequireImdsv2Aspect(); + + // WHEN + aspect.visit(launchTemplate); + + // THEN + expect(launchTemplate.node.metadataEntry).toContainEqual({ + data: expect.stringContaining('LaunchTemplateData is a CDK token.'), + type: 'aws:cdk:warning', + trace: undefined, + }); + }); + + test('warns when MetadataOptions is a CDK token', () => { + // GIVEN + const launchTemplate = new LaunchTemplate(stack, 'LaunchTemplate'); + const cfnLaunchTemplate = launchTemplate.node.tryFindChild('Resource') as CfnLaunchTemplate; + cfnLaunchTemplate.launchTemplateData = { + metadataOptions: fakeToken(), + } as CfnLaunchTemplate.LaunchTemplateDataProperty; + const aspect = new LaunchTemplateRequireImdsv2Aspect(); + + // WHEN + aspect.visit(launchTemplate); + + // THEN + expect(launchTemplate.node.metadataEntry).toContainEqual({ + data: expect.stringContaining('LaunchTemplateData.MetadataOptions is a CDK token.'), + type: 'aws:cdk:warning', + trace: undefined, + }); + }); + + test('requires IMDSv2', () => { + // GIVEN + new LaunchTemplate(stack, 'LaunchTemplate'); + const aspect = new LaunchTemplateRequireImdsv2Aspect(); + + // WHEN + cdk.Aspects.of(stack).add(aspect); + + // THEN + expectCDK(stack).to(haveResourceLike('AWS::EC2::LaunchTemplate', { + LaunchTemplateData: { + MetadataOptions: { + HttpTokens: 'required', + }, + }, + })); + }); + }); +}); + +function fakeToken(): cdk.IResolvable { + return { + creationStack: [], + resolve: (_c) => {}, + toString: () => '', + }; +} diff --git a/packages/@aws-cdk/aws-ec2/test/instance.test.ts b/packages/@aws-cdk/aws-ec2/test/instance.test.ts index 884021f518a84..a3a389d94aa9d 100644 --- a/packages/@aws-cdk/aws-ec2/test/instance.test.ts +++ b/packages/@aws-cdk/aws-ec2/test/instance.test.ts @@ -7,7 +7,7 @@ import * as cxschema from '@aws-cdk/cloud-assembly-schema'; import { Stack } from '@aws-cdk/core'; import { AmazonLinuxImage, BlockDeviceVolume, CloudFormationInit, - EbsDeviceVolumeType, InitCommand, Instance, InstanceArchitecture, InstanceClass, InstanceSize, InstanceType, UserData, Vpc, + EbsDeviceVolumeType, InitCommand, Instance, InstanceArchitecture, InstanceClass, InstanceSize, InstanceType, LaunchTemplate, UserData, Vpc, } from '../lib'; @@ -361,6 +361,36 @@ describe('instance', () => { }); + + test('instance requires IMDSv2', () => { + // WHEN + const instance = new Instance(stack, 'Instance', { + vpc, + machineImage: new AmazonLinuxImage(), + instanceType: new InstanceType('t2.micro'), + requireImdsv2: true, + }); + + // Force stack synth so the InstanceRequireImdsv2Aspect is applied + SynthUtils.synthesize(stack); + + // THEN + const launchTemplate = instance.node.tryFindChild('LaunchTemplate') as LaunchTemplate; + expect(launchTemplate).toBeDefined(); + expect(stack).toHaveResourceLike('AWS::EC2::LaunchTemplate', { + LaunchTemplateName: stack.resolve(launchTemplate.launchTemplateName), + LaunchTemplateData: { + MetadataOptions: { + HttpTokens: 'required', + }, + }, + }); + expect(stack).toHaveResourceLike('AWS::EC2::Instance', { + LaunchTemplate: { + LaunchTemplateName: stack.resolve(launchTemplate.launchTemplateName), + }, + }); + }); }); diff --git a/packages/@aws-cdk/aws-ec2/test/launch-template.test.ts b/packages/@aws-cdk/aws-ec2/test/launch-template.test.ts index 27399affe8149..6243a409bc007 100644 --- a/packages/@aws-cdk/aws-ec2/test/launch-template.test.ts +++ b/packages/@aws-cdk/aws-ec2/test/launch-template.test.ts @@ -509,6 +509,22 @@ describe('LaunchTemplate', () => { }, }); }); + + test('Requires IMDSv2', () => { + // WHEN + new LaunchTemplate(stack, 'Template', { + requireImdsv2: true, + }); + + // THEN + expect(stack).toHaveResourceLike('AWS::EC2::LaunchTemplate', { + LaunchTemplateData: { + MetadataOptions: { + HttpTokens: 'required', + }, + }, + }); + }); }); describe('LaunchTemplate marketOptions', () => { From 4f392a1e1ccb8ce3cb19802932746c7f8d97831a Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Wed, 20 Oct 2021 12:10:53 +0200 Subject: [PATCH 30/37] chore: improve yarn lock error checking (#17001) The yarn lock checking script was always wrong: it would assume a package was local if its version number was `0.0.0`... but this is no longer true after running `align-version.sh`. I don't understand why this doesn't fail on the build server, but it definitely fails for me on my machine when trying to do a full build "as if" I was the build server. ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- scripts/check-yarn-lock.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/check-yarn-lock.js b/scripts/check-yarn-lock.js index 8fd59b5a60bc8..1941ad074ece6 100755 --- a/scripts/check-yarn-lock.js +++ b/scripts/check-yarn-lock.js @@ -36,9 +36,11 @@ async function main() { const yarnPackages = yarnLockPackages(); const projects = await new Project(repoRoot()).getPackages(); + const localPackageNames = new Set(projects.map(p => p.name)); + function errorIfNotInYarnLock(package, dependencyName, dependencyVersion) { const dependencyId = `${dependencyName}@${dependencyVersion}`; - const isLocalDependency = dependencyVersion === '0.0.0' || dependencyVersion === '^0.0.0'; + const isLocalDependency = localPackageNames.has(dependencyName); if (!isLocalDependency && !yarnPackages.has(dependencyId)) { throw new Error(`ERROR! Dependency ${dependencyId} from ${package.name} not present in yarn.lock. Please run 'yarn install' and try again!`); } From ac54842ba15991b2359181a27d2530158622d9f9 Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Wed, 20 Oct 2021 13:48:58 +0200 Subject: [PATCH 31/37] chore(assertions): remove `rosetta:extract` from build command (#17072) This form of executing Rosetta is not mocked by the jsii integ tests (which try executing a CDK build using a new version of the jsii tools). The jsii integ tests rely on passing environment variables `$CDK_BUILD_JSII`, `$PACMAK` and `$ROSETTA` (instead of replacing symlinks in the Node module farm). This leads to the generation of `.jsii.tabl.json` during build using the NPM-installed version of `jsii-rosetta`, which subsequently interferes with the run of `$PACMAK` which *is* the new version (since Rosetta tablets are supposed to be short-lived, there is no backwards compatibility guarantee between different versions). There will be a supported mechanism to achieve what this single post-build command is trying to achieve, so remove it. ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- packages/@aws-cdk/assertions/package.json | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/@aws-cdk/assertions/package.json b/packages/@aws-cdk/assertions/package.json index e87d1c9526302..ac9a08ae297d6 100644 --- a/packages/@aws-cdk/assertions/package.json +++ b/packages/@aws-cdk/assertions/package.json @@ -55,11 +55,6 @@ } } }, - "cdk-build": { - "post": [ - "yarn rosetta:extract" - ] - }, "author": { "name": "Amazon Web Services", "url": "https://aws.amazon.com", From 403d3ce3bc0f4e30e9694e5c20743f0032009464 Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Wed, 20 Oct 2021 16:03:10 +0200 Subject: [PATCH 32/37] fix(pipelines): `additionalInputs` fails for deep directory (#17074) If the directory is nested deeper than one level underneath `.` or `..`, the wrong directory gets created. Also add in protection against the directory already existing, in which case the same behavior would happen. Fixes #16936. ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../lib/codepipeline/_codebuild-factory.ts | 7 ++- .../test/codepipeline/codebuild-step.test.ts | 44 +++++++++++++++++++ .../pipelines/test/compliance/synths.test.ts | 4 +- 3 files changed, 51 insertions(+), 4 deletions(-) create mode 100644 packages/@aws-cdk/pipelines/test/codepipeline/codebuild-step.test.ts diff --git a/packages/@aws-cdk/pipelines/lib/codepipeline/_codebuild-factory.ts b/packages/@aws-cdk/pipelines/lib/codepipeline/_codebuild-factory.ts index 59abcbe8e287d..fb523cf7d6818 100644 --- a/packages/@aws-cdk/pipelines/lib/codepipeline/_codebuild-factory.ts +++ b/packages/@aws-cdk/pipelines/lib/codepipeline/_codebuild-factory.ts @@ -333,8 +333,11 @@ function generateInputArtifactLinkCommands(artifacts: ArtifactMap, inputs: FileS return inputs.map(input => { const fragments = []; - if (!['.', '..'].includes(path.dirname(input.directory))) { - fragments.push(`mkdir -p -- "${input.directory}"`); + fragments.push(`[[ ! -d "${input.directory}" ]] || { echo 'additionalInputs: "${input.directory}" must not exist yet. If you want to merge multiple artifacts, use a "cp" command.'; exit 1; }`); + + const parentDirectory = path.dirname(input.directory); + if (!['.', '..'].includes(parentDirectory)) { + fragments.push(`mkdir -p -- "${parentDirectory}"`); } const artifact = artifacts.toCodePipeline(input.fileSet); diff --git a/packages/@aws-cdk/pipelines/test/codepipeline/codebuild-step.test.ts b/packages/@aws-cdk/pipelines/test/codepipeline/codebuild-step.test.ts new file mode 100644 index 0000000000000..78f1e0f471655 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/codepipeline/codebuild-step.test.ts @@ -0,0 +1,44 @@ +import { Template, Match } from '@aws-cdk/assertions'; +import { Stack } from '@aws-cdk/core'; +import * as cdkp from '../../lib'; +import { PIPELINE_ENV, TestApp } from '../testhelpers'; + +let app: TestApp; +let pipelineStack: Stack; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); +}); + +afterEach(() => { + app.cleanup(); +}); + +test('additionalinputs creates the right commands', () => { + // WHEN + new cdkp.CodePipeline(pipelineStack, 'Pipeline', { + synth: new cdkp.CodeBuildStep('Synth', { + commands: ['/bin/true'], + input: cdkp.CodePipelineSource.gitHub('test/test', 'main'), + additionalInputs: { + 'some/deep/directory': cdkp.CodePipelineSource.gitHub('test2/test2', 'main'), + }, + }), + }); + + // THEN + Template.fromStack(pipelineStack).hasResourceProperties('AWS::CodeBuild::Project', { + Source: { + BuildSpec: Match.serializedJson(Match.objectLike({ + phases: { + install: { + commands: [ + '[[ ! -d "some/deep/directory" ]] || { echo \'additionalInputs: "some/deep/directory" must not exist yet. If you want to merge multiple artifacts, use a "cp" command.\'; exit 1; } && mkdir -p -- "some/deep" && ln -s -- "$CODEBUILD_SRC_DIR_test2_test2_Source" "some/deep/directory"', + ], + }, + }, + })), + }, + }); +}); \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/compliance/synths.test.ts b/packages/@aws-cdk/pipelines/test/compliance/synths.test.ts index f8e39a536309f..4b5a072099469 100644 --- a/packages/@aws-cdk/pipelines/test/compliance/synths.test.ts +++ b/packages/@aws-cdk/pipelines/test/compliance/synths.test.ts @@ -947,8 +947,8 @@ behavior('Multiple input sources in side-by-side directories', (suite) => { phases: { install: { commands: [ - 'ln -s -- "$CODEBUILD_SRC_DIR_foo_bar_Source" "../sibling"', - 'ln -s -- "$CODEBUILD_SRC_DIR_Prebuild_Output" "sub"', + '[[ ! -d "../sibling" ]] || { echo \'additionalInputs: "../sibling" must not exist yet. If you want to merge multiple artifacts, use a "cp" command.\'; exit 1; } && ln -s -- "$CODEBUILD_SRC_DIR_foo_bar_Source" "../sibling"', + '[[ ! -d "sub" ]] || { echo \'additionalInputs: "sub" must not exist yet. If you want to merge multiple artifacts, use a "cp" command.\'; exit 1; } && ln -s -- "$CODEBUILD_SRC_DIR_Prebuild_Output" "sub"', ], }, build: { From 1fa1876e65b5124a4348d71ae6c1f8b153d34eb2 Mon Sep 17 00:00:00 2001 From: Otavio Macedo Date: Wed, 20 Oct 2021 16:27:30 +0100 Subject: [PATCH 33/37] chore: replaced merge action with a queue action (#17052) Co-authored-by: Otavio Macedo --- .mergify.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.mergify.yml b/.mergify.yml index 97f3ce42a91be..be30275403f3e 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -1,4 +1,8 @@ # See https://doc.mergify.io +queue_rules: + - name: default + conditions: + - status-success~=AWS CodeBuild us-east-1 pull_request_rules: - name: label core @@ -12,10 +16,9 @@ pull_request_rules: actions: comment: message: Thank you for contributing! Your pull request will be updated from master and then merged automatically (do not update manually, and be sure to [allow changes to be pushed to your fork](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/allowing-changes-to-a-pull-request-branch-created-from-a-fork)). - merge: - strict: smart + queue: + name: default method: squash - strict_method: merge commit_message: title+body conditions: - base!=release From a129046495a926561f94f5ce1f41c34b1df3afde Mon Sep 17 00:00:00 2001 From: Ryan Parker Date: Wed, 20 Oct 2021 14:49:13 -0700 Subject: [PATCH 34/37] fix(lambda-layer-node-proxy-agent): Replace use of package.json with Dockerfile command `npm install [package]@[version]` (#17078) ## Summary This PR fixes an issue where the package.json file used in `lambda-layer-node-proxy-agent` was not being bundled into monocdk / CDK v2. This is due to limitations of Ubergen. Ubergen only copies .ts files, README.md files and some special .json files. This PR: - Removes the package.json from `packages/@aws-cdk/lambda-layer-node-proxy-agent/layer` - Adds a step in the layer's Dockerfile to install a specific version of `proxy-agent` i.e. `npm install [package]@[version]`. This is so that we can do without the `package.json` file while still managing which version we bundle with the layer. Fixes: https://github.com/aws/aws-cdk/issues/17061 ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../lambda-layer-node-proxy-agent/layer/.dockerignore | 1 - .../layer/.no-packagejson-validator | 0 .../lambda-layer-node-proxy-agent/layer/Dockerfile | 5 +++-- .../lambda-layer-node-proxy-agent/layer/package.json | 9 --------- .../lib/node-proxy-agent-layer.ts | 4 ++-- 5 files changed, 5 insertions(+), 14 deletions(-) delete mode 100644 packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/.no-packagejson-validator delete mode 100644 packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/package.json diff --git a/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/.dockerignore b/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/.dockerignore index 69b73f61d249a..88a84e55aa43b 100644 --- a/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/.dockerignore +++ b/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/.dockerignore @@ -1,2 +1 @@ build.sh -.no-packagejson-validator diff --git a/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/.no-packagejson-validator b/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/.no-packagejson-validator deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/Dockerfile b/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/Dockerfile index 8c747a0e2b95f..2e3f644258652 100644 --- a/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/Dockerfile +++ b/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/Dockerfile @@ -1,6 +1,8 @@ # base lambda image FROM public.ecr.aws/lambda/nodejs:latest +ARG PROXY_AGENT_VERSION=5.0.0 + USER root RUN mkdir -p /opt WORKDIR /tmp @@ -17,8 +19,7 @@ RUN yum update -y \ # RUN mkdir -p /opt/nodejs -COPY package.json /opt/nodejs -RUN cd /opt/nodejs && npm install +RUN cd /opt/nodejs && npm install proxy-agent@${PROXY_AGENT_VERSION} # # create the bundle diff --git a/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/package.json b/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/package.json deleted file mode 100644 index 102dd83c99391..0000000000000 --- a/packages/@aws-cdk/lambda-layer-node-proxy-agent/layer/package.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name": "node-proxy-agent-layer", - "private": true, - "version": "0.0.1", - "license": "Apache-2.0", - "devDependencies": { - "proxy-agent": "^5.0.0" - } -} diff --git a/packages/@aws-cdk/lambda-layer-node-proxy-agent/lib/node-proxy-agent-layer.ts b/packages/@aws-cdk/lambda-layer-node-proxy-agent/lib/node-proxy-agent-layer.ts index 9e6471a2da2c6..60b8c8697dea7 100644 --- a/packages/@aws-cdk/lambda-layer-node-proxy-agent/lib/node-proxy-agent-layer.ts +++ b/packages/@aws-cdk/lambda-layer-node-proxy-agent/lib/node-proxy-agent-layer.ts @@ -11,8 +11,8 @@ export class NodeProxyAgentLayer extends lambda.LayerVersion { constructor(scope: Construct, id: string) { super(scope, id, { code: lambda.Code.fromAsset(path.join(__dirname, 'layer.zip'), { - // we hash the package.json (it contains the tools versions) because hashing the zip is non-deterministic - assetHash: hashFile(path.join(__dirname, '..', 'layer', 'package.json')), + // we hash the Dockerfile (it contains the tools versions) because hashing the zip is non-deterministic + assetHash: hashFile(path.join(__dirname, '..', 'layer', 'Dockerfile')), }), description: '/opt/nodejs/node_modules/proxy-agent', }); From 74066ca5c19509b60d95fc35ac7f732f44c6d8ae Mon Sep 17 00:00:00 2001 From: Otavio Macedo Date: Thu, 21 Oct 2021 10:15:39 +0100 Subject: [PATCH 35/37] chore: bumped jsii dependencies to 1.40.0 (#17087) ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- package.json | 8 +- packages/aws-cdk/package.json | 2 +- packages/awslint/package.json | 4 +- packages/decdk/package.json | 4 +- tools/@aws-cdk/cdk-build-tools/package.json | 6 +- yarn.lock | 103 +++++++++++--------- 6 files changed, 68 insertions(+), 59 deletions(-) diff --git a/package.json b/package.json index 97612d72cea6f..7db1f7c20c0ad 100644 --- a/package.json +++ b/package.json @@ -20,10 +20,10 @@ "fs-extra": "^9.1.0", "graceful-fs": "^4.2.8", "jest-junit": "^12.3.0", - "jsii-diff": "^1.39.0", - "jsii-pacmak": "^1.39.0", - "jsii-reflect": "^1.39.0", - "jsii-rosetta": "^1.39.0", + "jsii-diff": "^1.40.0", + "jsii-pacmak": "^1.40.0", + "jsii-reflect": "^1.40.0", + "jsii-rosetta": "^1.40.0", "lerna": "^4.0.0", "patch-package": "^6.4.7", "standard-version": "^9.3.1", diff --git a/packages/aws-cdk/package.json b/packages/aws-cdk/package.json index 1e6f3b7bef7fe..4492f92549b45 100644 --- a/packages/aws-cdk/package.json +++ b/packages/aws-cdk/package.json @@ -71,7 +71,7 @@ "@aws-cdk/cloudformation-diff": "0.0.0", "@aws-cdk/cx-api": "0.0.0", "@aws-cdk/region-info": "0.0.0", - "@jsii/check-node": "1.39.0", + "@jsii/check-node": "1.40.0", "archiver": "^5.3.0", "aws-sdk": "^2.979.0", "camelcase": "^6.2.0", diff --git a/packages/awslint/package.json b/packages/awslint/package.json index f035267fc9292..1bae6a09ba6cb 100644 --- a/packages/awslint/package.json +++ b/packages/awslint/package.json @@ -18,11 +18,11 @@ "awslint": "bin/awslint" }, "dependencies": { - "@jsii/spec": "^1.39.0", + "@jsii/spec": "^1.40.0", "camelcase": "^6.2.0", "colors": "^1.4.0", "fs-extra": "^9.1.0", - "jsii-reflect": "^1.39.0", + "jsii-reflect": "^1.40.0", "yargs": "^16.2.0" }, "devDependencies": { diff --git a/packages/decdk/package.json b/packages/decdk/package.json index 6ee38d4e401a4..8a87b4a7001f8 100644 --- a/packages/decdk/package.json +++ b/packages/decdk/package.json @@ -243,7 +243,7 @@ "@aws-cdk/region-info": "0.0.0", "constructs": "^3.3.69", "fs-extra": "^9.1.0", - "jsii-reflect": "^1.39.0", + "jsii-reflect": "^1.40.0", "jsonschema": "^1.4.0", "yaml": "1.10.2", "yargs": "^16.2.0" @@ -254,7 +254,7 @@ "@types/yaml": "1.9.7", "@types/yargs": "^15.0.14", "jest": "^26.6.3", - "jsii": "^1.39.0" + "jsii": "^1.40.0" }, "keywords": [ "aws", diff --git a/tools/@aws-cdk/cdk-build-tools/package.json b/tools/@aws-cdk/cdk-build-tools/package.json index d2b03befabebb..84005357a2458 100644 --- a/tools/@aws-cdk/cdk-build-tools/package.json +++ b/tools/@aws-cdk/cdk-build-tools/package.json @@ -55,9 +55,9 @@ "fs-extra": "^9.1.0", "jest": "^26.6.3", "jest-junit": "^11.1.0", - "jsii": "^1.39.0", - "jsii-pacmak": "^1.39.0", - "jsii-reflect": "^1.39.0", + "jsii": "^1.40.0", + "jsii-pacmak": "^1.40.0", + "jsii-reflect": "^1.40.0", "markdownlint-cli": "^0.29.0", "nyc": "^15.1.0", "semver": "^7.3.5", diff --git a/yarn.lock b/yarn.lock index 3fc7477ad872b..442e61841ae13 100644 --- a/yarn.lock +++ b/yarn.lock @@ -576,18 +576,18 @@ "@types/yargs" "^15.0.0" chalk "^4.0.0" -"@jsii/check-node@1.39.0": - version "1.39.0" - resolved "https://registry.npmjs.org/@jsii/check-node/-/check-node-1.39.0.tgz#31a22f6270c790b5f2bb0f7d2950511e6d7c0c9e" - integrity sha512-tqx5o0Zw6WrVKmB9S1X0E8AajfXjWu9yoOUtUdYVCR6NAdi8mY/NQ3uYJ8II0AF1MtC0PdASOjb/6i3h02komw== +"@jsii/check-node@1.40.0": + version "1.40.0" + resolved "https://registry.npmjs.org/@jsii/check-node/-/check-node-1.40.0.tgz#49882a61ad1b3a37cd35c35fa1a2301955f1c058" + integrity sha512-rk0hFXxFQR8rDGUfsZT9ua6OufOpnLQWsNFyFU86AvpoKQ0ciw2KlGdWs7OYFnzPq8sQGhSS+iuBrUboaHW3jg== dependencies: chalk "^4.1.2" semver "^7.3.5" -"@jsii/spec@^1.39.0": - version "1.39.0" - resolved "https://registry.npmjs.org/@jsii/spec/-/spec-1.39.0.tgz#17b2d55f8261da3ed4a670e9a4c5dfa18a7e050a" - integrity sha512-NbCmAYOB938uyWHwXj6fhdeIzznhHbxLmvl4Jtwe08Nrz5Gs4n79snV29XWIQulDMa4HYkNh1yqhBXOHkd+GAg== +"@jsii/spec@^1.40.0": + version "1.40.0" + resolved "https://registry.npmjs.org/@jsii/spec/-/spec-1.40.0.tgz#027dd2a9c2c0b49e5974ad6445728dde91569fe3" + integrity sha512-SJ9Kwz0C53bomYWb5PlESt6v8JmfgqqFjc1annNK+foHxcaUzs3trhKbBXgxhcoApE2pMnUIBj3DG9gLNmKdWw== dependencies: jsonschema "^1.4.0" @@ -2845,6 +2845,15 @@ codemaker@^1.39.0: decamelize "^5.0.1" fs-extra "^9.1.0" +codemaker@^1.40.0: + version "1.40.0" + resolved "https://registry.npmjs.org/codemaker/-/codemaker-1.40.0.tgz#80ed75a433fb08976c602b9080dc7fffbb13dbb9" + integrity sha512-X0dMlXILO5r9/YhNAbiLl9kNIfhATfGS8nAT7xC09zREipANnCEbjZuF8jtFGzrD942/k5QNROmqRtqRaZJ1QQ== + dependencies: + camelcase "^6.2.0" + decamelize "^5.0.1" + fs-extra "^9.1.0" + collect-v8-coverage@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" @@ -6011,70 +6020,70 @@ jsesc@^2.5.1: resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== -jsii-diff@^1.39.0: - version "1.39.0" - resolved "https://registry.npmjs.org/jsii-diff/-/jsii-diff-1.39.0.tgz#e6dc9ffc5689bfcfa2bcd9532829054003547470" - integrity sha512-O49YB3IElNIyP7zbBXLTYmLPwxxJY4Zs4rcZ4zpO73Yv373edT+TmoKkZV05DhKNcU79nFPB+axr6sKP6ElzFw== +jsii-diff@^1.40.0: + version "1.40.0" + resolved "https://registry.npmjs.org/jsii-diff/-/jsii-diff-1.40.0.tgz#97668273bc6c7f8ea6c6c27ebd8d70d433e1208d" + integrity sha512-Q0ctTmPE3wZ03CP++MxjPMBV3ynonDHq1gsd5mFUk9DW+cTyKb78KUkyjhgQnuiehXLRDQtoTlWJkH9C5xhEnQ== dependencies: - "@jsii/check-node" "1.39.0" - "@jsii/spec" "^1.39.0" + "@jsii/check-node" "1.40.0" + "@jsii/spec" "^1.40.0" fs-extra "^9.1.0" - jsii-reflect "^1.39.0" + jsii-reflect "^1.40.0" log4js "^6.3.0" typescript "~3.9.10" yargs "^16.2.0" -jsii-pacmak@^1.39.0: - version "1.39.0" - resolved "https://registry.npmjs.org/jsii-pacmak/-/jsii-pacmak-1.39.0.tgz#b5c66eb32a62390e02c0273f6edca3871600cc74" - integrity sha512-+B2Z62v/MQ8fQcvd1nhKUWv+ZoNEArwa6OiTSvAuMsRoZpZ7Uvabscu71Uu3zq1XzJ6WQStw90ENHkw40l/o7w== +jsii-pacmak@^1.40.0: + version "1.40.0" + resolved "https://registry.npmjs.org/jsii-pacmak/-/jsii-pacmak-1.40.0.tgz#5c0ecd5ff9c0917931bbe66773402dfe5517fbec" + integrity sha512-8IyvvWiD2eUpVhw0WXrYJILz+NSeNEwcWfQB+fUmn2gL8q27hlPZhHE7BVlr8+rb+EJVVLeHmpAMgA/SF9g/vQ== dependencies: - "@jsii/check-node" "1.39.0" - "@jsii/spec" "^1.39.0" + "@jsii/check-node" "1.40.0" + "@jsii/spec" "^1.40.0" clone "^2.1.2" - codemaker "^1.39.0" + codemaker "^1.40.0" commonmark "^0.30.0" escape-string-regexp "^4.0.0" fs-extra "^9.1.0" - jsii-reflect "^1.39.0" - jsii-rosetta "^1.39.0" + jsii-reflect "^1.40.0" + jsii-rosetta "^1.40.0" semver "^7.3.5" spdx-license-list "^6.4.0" xmlbuilder "^15.1.1" yargs "^16.2.0" -jsii-reflect@^1.39.0: - version "1.39.0" - resolved "https://registry.npmjs.org/jsii-reflect/-/jsii-reflect-1.39.0.tgz#c35a395b7ec14c4e94aef8bf195a0ff329311534" - integrity sha512-HEMpGHJBDtUbhdnfYUH0M/NTrYxaXrb0B2DXglzN/EYzKJsdp4FAmDPzpKEwnGVK3ReJLZ68YRogTq3msyuQDQ== +jsii-reflect@^1.40.0: + version "1.40.0" + resolved "https://registry.npmjs.org/jsii-reflect/-/jsii-reflect-1.40.0.tgz#f8715f1506059d49294b32fe2c710753dd9545ba" + integrity sha512-/ccIjkRSfbHCl1MCfwWFaz2RjoAAiNH5teE95Qi11a4gbTu52WcOFIg3Y+8llzHmmLykr9jTDqBtgyzi9WI6dw== dependencies: - "@jsii/check-node" "1.39.0" - "@jsii/spec" "^1.39.0" + "@jsii/check-node" "1.40.0" + "@jsii/spec" "^1.40.0" colors "^1.4.0" fs-extra "^9.1.0" - oo-ascii-tree "^1.39.0" + oo-ascii-tree "^1.40.0" yargs "^16.2.0" -jsii-rosetta@^1.39.0: - version "1.39.0" - resolved "https://registry.npmjs.org/jsii-rosetta/-/jsii-rosetta-1.39.0.tgz#b4251bb9b0295d2a8c2c7a7d8b1d8d744f432305" - integrity sha512-Fx+kQ+IDEMILQvTESW9TMXLxzQa7h/nm4EKXuDKAeglr5RNhzvTvhsgJy+WshJoMsNcT9ImCV8gmvqAqdSBrWA== +jsii-rosetta@^1.40.0: + version "1.40.0" + resolved "https://registry.npmjs.org/jsii-rosetta/-/jsii-rosetta-1.40.0.tgz#eff34919ed9d4193ddb4a684f6108c82db3feb7c" + integrity sha512-Gb257CdUbHV8ZRFYflZy7F7alH5X49T+pX2133F7eaoMpRqc0V6jQsphaL4V+S/jK29XOfXtANmq55AvmwsWLQ== dependencies: - "@jsii/check-node" "1.39.0" - "@jsii/spec" "^1.39.0" + "@jsii/check-node" "1.40.0" + "@jsii/spec" "^1.40.0" "@xmldom/xmldom" "^0.7.5" commonmark "^0.30.0" fs-extra "^9.1.0" typescript "~3.9.10" yargs "^16.2.0" -jsii@^1.39.0: - version "1.39.0" - resolved "https://registry.npmjs.org/jsii/-/jsii-1.39.0.tgz#68554dd5c20ac4b7da118f748d5297e5f9e58384" - integrity sha512-2ReD7t6rGhT+c41xovFoAMc4XU5/O2VqGRh3Ud/wN+Nn1ISjZFQa4doQ1xtZLFb1065Vxyv5VCqWp80t6Xw2iA== +jsii@^1.40.0: + version "1.40.0" + resolved "https://registry.npmjs.org/jsii/-/jsii-1.40.0.tgz#cc04f2bad5ae9495513af921cfcaca99dc8753d3" + integrity sha512-QUPmQzq7c/FREvtfw9+eIU16LB45hxRPtdLO2Ci2ZX1df4E4+vegtfvvjUJ21diVo2hwVp4UCftKqrXZ/cXEFg== dependencies: - "@jsii/check-node" "1.39.0" - "@jsii/spec" "^1.39.0" + "@jsii/check-node" "1.40.0" + "@jsii/spec" "^1.40.0" case "^1.6.3" colors "^1.4.0" deep-equal "^2.0.5" @@ -7413,10 +7422,10 @@ onetime@^5.1.0, onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" -oo-ascii-tree@^1.39.0: - version "1.39.0" - resolved "https://registry.npmjs.org/oo-ascii-tree/-/oo-ascii-tree-1.39.0.tgz#7dfc1fc11e0c7c7bf34d0b91591db9bfe0406cbb" - integrity sha512-a0g33GTdCizt5jnQzY9j6cRNyx5xITmZb+b3C21+KNweaERltcR1BQO/tLUuuVEFRVWvZcUqrFDVa8f8nqOafA== +oo-ascii-tree@^1.40.0: + version "1.40.0" + resolved "https://registry.npmjs.org/oo-ascii-tree/-/oo-ascii-tree-1.40.0.tgz#69005b8f5f140ed23a81e90b3659750dc3a62522" + integrity sha512-nkiEc8TJZwGxPdEB1jRxHWyc/qBTPQSf70KhO+WjuiWzVfLVEWF/dksWRjm8e510YmPrBjfYCJOn+BVlOUojSQ== open@^7.4.2: version "7.4.2" From 76b9efadbf85cedd88838616f556f99c0dda4979 Mon Sep 17 00:00:00 2001 From: AWS CDK Team Date: Thu, 21 Oct 2021 12:40:24 +0000 Subject: [PATCH 36/37] chore(release): 1.129.0 --- CHANGELOG.md | 26 ++++++++++++++++++++++++++ version.v1.json | 2 +- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f254623e858c1..8c9fd17ee41a3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,32 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [1.129.0](https://github.com/aws/aws-cdk/compare/v1.128.0...v1.129.0) (2021-10-21) + + +### Features + +* **aws-autoscaling:** add flag and aspect to require imdsv2 ([#16052](https://github.com/aws/aws-cdk/issues/16052)) ([ef7e20d](https://github.com/aws/aws-cdk/commit/ef7e20df08b4321f210bfc050afa42d7b4901931)) +* **codebuild:** add support for small ARM machine type ([#16635](https://github.com/aws/aws-cdk/issues/16635)) ([55fbc86](https://github.com/aws/aws-cdk/commit/55fbc866ef0195fdfc722206e4d69a1f4469cd40)), closes [#16633](https://github.com/aws/aws-cdk/issues/16633) +* **dynamodb:** add option to skip waiting for global replication to finish ([#16983](https://github.com/aws/aws-cdk/issues/16983)) ([254601f](https://github.com/aws/aws-cdk/commit/254601f477a4da309e81f5384140427f1b958bfd)), closes [#16611](https://github.com/aws/aws-cdk/issues/16611) +* **ec2:** add aspect to require imdsv2 ([#16051](https://github.com/aws/aws-cdk/issues/16051)) ([0947b21](https://github.com/aws/aws-cdk/commit/0947b21c1e3186042324820ec5ab433237246f58)) +* **eks:** configure serviceIpv4Cidr on the cluster ([#16957](https://github.com/aws/aws-cdk/issues/16957)) ([72102c7](https://github.com/aws/aws-cdk/commit/72102c750bfd6564cd51c1a5d8abc79b1ba1d3ce)), closes [/docs.aws.amazon.com/eks/latest/APIReference/API_KubernetesNetworkConfigRequest.html#AmazonEKS-Type-KubernetesNetworkConfigRequest-serviceIpv4](https://github.com/aws//docs.aws.amazon.com/eks/latest/APIReference/API_KubernetesNetworkConfigRequest.html/issues/AmazonEKS-Type-KubernetesNetworkConfigRequest-serviceIpv4) [#16541](https://github.com/aws/aws-cdk/issues/16541) +* **events:** Add DLQ support for SQS target ([#16916](https://github.com/aws/aws-cdk/issues/16916)) ([7fda903](https://github.com/aws/aws-cdk/commit/7fda90318e18b3a5d126b040e35a0146634d5f2d)), closes [#16417](https://github.com/aws/aws-cdk/issues/16417) +* **msk:** add Kafka version 2.8.1 ([#16881](https://github.com/aws/aws-cdk/issues/16881)) ([7db5c8c](https://github.com/aws/aws-cdk/commit/7db5c8cdafe7b9b22b6b40cb25ed8bd1946301f4)) +* **stepfunctions-tasks:** add `enableNetworkIsolation` property to `SageMakerCreateTrainingJobProps` ([#16792](https://github.com/aws/aws-cdk/issues/16792)) ([69ac520](https://github.com/aws/aws-cdk/commit/69ac520452b219bf242f2fbb4740f6b1b8b8790f)), closes [#16779](https://github.com/aws/aws-cdk/issues/16779) + + +### Bug Fixes + +* **apigatewayv2:** unable to retrieve domain url for default stage ([#16854](https://github.com/aws/aws-cdk/issues/16854)) ([c6db91e](https://github.com/aws/aws-cdk/commit/c6db91eee2cb658ce347c7ac6d6e3c95bc5977dc)), closes [#16638](https://github.com/aws/aws-cdk/issues/16638) +* **cfn-diff:** correctly handle Date strings in diff ([#16591](https://github.com/aws/aws-cdk/issues/16591)) ([86f2714](https://github.com/aws/aws-cdk/commit/86f2714613f06aaf2bcee27da2f66066c8e863d0)), closes [#16444](https://github.com/aws/aws-cdk/issues/16444) +* **ecs:** imported services don't have account & region set correctly ([#16997](https://github.com/aws/aws-cdk/issues/16997)) ([dc6f743](https://github.com/aws/aws-cdk/commit/dc6f7433f01b9bc2c8206fb03d72ab8404fe4f6a)), closes [#11199](https://github.com/aws/aws-cdk/issues/11199) [#11199](https://github.com/aws/aws-cdk/issues/11199) [#15944](https://github.com/aws/aws-cdk/issues/15944) +* **events:** PhysicalName.GENERATE_IF_NEEDED does not work for EventBus ([#17008](https://github.com/aws/aws-cdk/issues/17008)) ([707fa00](https://github.com/aws/aws-cdk/commit/707fa003a458039878a1ae5173b6665a84c1170b)), closes [#14337](https://github.com/aws/aws-cdk/issues/14337) +* **lambda:** docker image function fails when insightsVersion is specified ([#16781](https://github.com/aws/aws-cdk/issues/16781)) ([d0e15cc](https://github.com/aws/aws-cdk/commit/d0e15ccaca22c5e05b9186aa1a241e744d67c96a)), closes [#16642](https://github.com/aws/aws-cdk/issues/16642) +* **lambda-layer-node-proxy-agent:** Replace use of package.json with Dockerfile command `npm install [package]@[version]` ([#17078](https://github.com/aws/aws-cdk/issues/17078)) ([a129046](https://github.com/aws/aws-cdk/commit/a129046495a926561f94f5ce1f41c34b1df3afde)) +* **opensearch:** add validation to domainName property ([#17017](https://github.com/aws/aws-cdk/issues/17017)) ([3ec6832](https://github.com/aws/aws-cdk/commit/3ec683283e96159d588797bd46d33c82ff3076f1)), closes [#17016](https://github.com/aws/aws-cdk/issues/17016) +* **pipelines:** `additionalInputs` fails for deep directory ([#17074](https://github.com/aws/aws-cdk/issues/17074)) ([403d3ce](https://github.com/aws/aws-cdk/commit/403d3ce3bc0f4e30e9694e5c20743f0032009464)), closes [#16936](https://github.com/aws/aws-cdk/issues/16936) + ## [1.128.0](https://github.com/aws/aws-cdk/compare/v1.127.0...v1.128.0) (2021-10-14) diff --git a/version.v1.json b/version.v1.json index c24ba0065c588..cdb974a76d7f9 100644 --- a/version.v1.json +++ b/version.v1.json @@ -1,3 +1,3 @@ { - "version": "1.128.0" + "version": "1.129.0" } \ No newline at end of file From 3987808d16dbb37b6d5c16ad7a23d07bc6c2e9ec Mon Sep 17 00:00:00 2001 From: Otavio Macedo Date: Thu, 21 Oct 2021 13:45:13 +0100 Subject: [PATCH 37/37] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8c9fd17ee41a3..511ed1a25dabd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,7 +11,7 @@ All notable changes to this project will be documented in this file. See [standa * **codebuild:** add support for small ARM machine type ([#16635](https://github.com/aws/aws-cdk/issues/16635)) ([55fbc86](https://github.com/aws/aws-cdk/commit/55fbc866ef0195fdfc722206e4d69a1f4469cd40)), closes [#16633](https://github.com/aws/aws-cdk/issues/16633) * **dynamodb:** add option to skip waiting for global replication to finish ([#16983](https://github.com/aws/aws-cdk/issues/16983)) ([254601f](https://github.com/aws/aws-cdk/commit/254601f477a4da309e81f5384140427f1b958bfd)), closes [#16611](https://github.com/aws/aws-cdk/issues/16611) * **ec2:** add aspect to require imdsv2 ([#16051](https://github.com/aws/aws-cdk/issues/16051)) ([0947b21](https://github.com/aws/aws-cdk/commit/0947b21c1e3186042324820ec5ab433237246f58)) -* **eks:** configure serviceIpv4Cidr on the cluster ([#16957](https://github.com/aws/aws-cdk/issues/16957)) ([72102c7](https://github.com/aws/aws-cdk/commit/72102c750bfd6564cd51c1a5d8abc79b1ba1d3ce)), closes [/docs.aws.amazon.com/eks/latest/APIReference/API_KubernetesNetworkConfigRequest.html#AmazonEKS-Type-KubernetesNetworkConfigRequest-serviceIpv4](https://github.com/aws//docs.aws.amazon.com/eks/latest/APIReference/API_KubernetesNetworkConfigRequest.html/issues/AmazonEKS-Type-KubernetesNetworkConfigRequest-serviceIpv4) [#16541](https://github.com/aws/aws-cdk/issues/16541) +* **eks:** configure serviceIpv4Cidr on the cluster ([#16957](https://github.com/aws/aws-cdk/issues/16957)) ([72102c7](https://github.com/aws/aws-cdk/commit/72102c750bfd6564cd51c1a5d8abc79b1ba1d3ce)), closes [#16541](https://github.com/aws/aws-cdk/issues/16541) * **events:** Add DLQ support for SQS target ([#16916](https://github.com/aws/aws-cdk/issues/16916)) ([7fda903](https://github.com/aws/aws-cdk/commit/7fda90318e18b3a5d126b040e35a0146634d5f2d)), closes [#16417](https://github.com/aws/aws-cdk/issues/16417) * **msk:** add Kafka version 2.8.1 ([#16881](https://github.com/aws/aws-cdk/issues/16881)) ([7db5c8c](https://github.com/aws/aws-cdk/commit/7db5c8cdafe7b9b22b6b40cb25ed8bd1946301f4)) * **stepfunctions-tasks:** add `enableNetworkIsolation` property to `SageMakerCreateTrainingJobProps` ([#16792](https://github.com/aws/aws-cdk/issues/16792)) ([69ac520](https://github.com/aws/aws-cdk/commit/69ac520452b219bf242f2fbb4740f6b1b8b8790f)), closes [#16779](https://github.com/aws/aws-cdk/issues/16779)