Skip to content
This repository has been archived by the owner on May 12, 2023. It is now read-only.

Commit

Permalink
Fix issues in image processing CD pipeline
Browse files Browse the repository at this point in the history
- Changed image processing pipeline to get manifest pipeline bucket names
from other stacks instead of requiring them via context
- Changed image processing to use the env config to get the rbsc bucket
since that changes per env
- Fixed an issue with the target stack names. We had changed them from
`marble-image` to `marble-image-processing` but had missed updating the
pipeline to use these names when deploying.
- Fixed an issue with the code terminating too soon during synth,
preventing cdk from seeing dependencies correctly. We need to find an
alternative solution to the `if !fs.existsSync(props.lambdaCodePath); return`
checks that still allow the code to continue, but still throws an error
when that stack is deployed. In this specific case, cdk was not seeing
the dependency between image processing and manifest pipeline because
the manifest pipeline was exiting too soon due to the missing files. For
now, I just moved these fs checks down a bit and forced the export/import
to happen in the manifest pipeline. This is a pretty fragile solution,
and it's not very intuitive what's happening when this occurs, so I'm
going to look for a more general solution to this, but this fixes the
immediate issue for now. See aws/aws-cdk#6743
for the original issue we were trying to prevent with these checks.
  • Loading branch information
Justin Gondron committed Sep 11, 2020
1 parent f9259b1 commit 6ecea8a
Show file tree
Hide file tree
Showing 5 changed files with 62 additions and 66 deletions.
16 changes: 9 additions & 7 deletions deploy/cdk/bin/services.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,6 @@ export const instantiateStacks = (app: App, namespace: string, contextEnv: Conte
...userContentContext,
})

const imageProcessingContext = getContextByNamespace('imageProcessing')
const imageProcessingStack = new imageProcessing.ImagesStack(app, `${namespace}-image-processing`, {
foundationStack,
...commonProps,
...imageProcessingContext,
})

const elasticsearchContext = getContextByNamespace('elasticsearch')
const elasticSearchStack = new elasticsearch.ElasticStack(app, `${namespace}-elastic`, {
foundationStack,
Expand All @@ -82,6 +75,15 @@ export const instantiateStacks = (app: App, namespace: string, contextEnv: Conte
...manifestPipelineContext,
})

const imageProcessingContext = getContextByNamespace('imageProcessing')
const imageProcessingStack = new imageProcessing.ImagesStack(app, `${namespace}-image-processing`, {
foundationStack,
rbscBucketName: contextEnv.rBSCS3ImageBucketName,
manifestPipelineStack,
...commonProps,
...imageProcessingContext,
})

return {
foundationStack,
website,
Expand Down
6 changes: 0 additions & 6 deletions deploy/cdk/cdk.context.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,6 @@
"iiifImageService:qaRepoName": "iiif-qa",
"iiifImageService:qaSourceBranch": "master",

"imageProcessing:prodRbscBucketName": "libnd-smb-rbsc",
"imageProcessing:prodProcessBucketName": "marble-manifest-prod-processbucket-kskqchthxshg",
"imageProcessing:prodImageBucketName": "marble-data-broker-publicbucket-1kvqtwnvkhra2",
"imageProcessing:rbscBucketName": "marble-rbsc",
"imageProcessing:processBucketName": "marble-image",
"imageProcessing:imageBucketName": "marble-image",
"imageProcessing:lambdaCodePath": "../../../marble-images/s3_event",
"imageProcessing:dockerfilePath": "../../../marble-images/",
"imageProcessing:appRepoOwner": "ndlib",
Expand Down
25 changes: 11 additions & 14 deletions deploy/cdk/lib/image-processing/deployment-pipeline.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ import cdk = require('@aws-cdk/core')
import { SlackApproval, PipelineNotifications } from '@ndlib/ndlib-cdk'
import { CDKPipelineDeploy } from '../cdk-pipeline-deploy'
import { NamespacedPolicy, GlobalActions } from '../namespaced-policy'
import { ManifestPipelineStack } from '../manifest-pipeline'
import { FoundationStack } from '../foundation'


export interface IDeploymentPipelineStackProps extends cdk.StackProps {
Expand All @@ -16,9 +18,9 @@ export interface IDeploymentPipelineStackProps extends cdk.StackProps {
readonly contextEnvName: string;
readonly owner: string;
readonly contact: string;
rbscBucketName: string;
processBucketName: string;
imageBucketName: string;
readonly rbscBucketName: string;
readonly testFoundationStack: FoundationStack;
readonly prodFoundationStack: FoundationStack;
readonly lambdaCodePath: string;
readonly dockerfilePath: string;
readonly appRepoOwner: string;
Expand All @@ -35,11 +37,11 @@ export class DeploymentPipelineStack extends cdk.Stack {
constructor(scope: cdk.Construct, id: string, props: IDeploymentPipelineStackProps) {
super(scope, id, props)

const testStackName = `${props.namespace}-test-image`
const prodStackName = `${props.namespace}-prod-image`
const testStackName = `${props.namespace}-test-image-processing`
const prodStackName = `${props.namespace}-prod-image-processing`

// Helper for creating a Pipeline project and action with deployment permissions needed by this pipeline
const createDeploy = (targetStack: string, namespace: string) => {
const createDeploy = (targetStack: string, namespace: string, foundationStack: FoundationStack) => {
const cdkDeploy = new CDKPipelineDeploy(this, `${namespace}-deploy`, {
targetStack,
dependsOnStacks: [],
Expand All @@ -54,9 +56,7 @@ export class DeploymentPipelineStack extends cdk.Stack {
projectName: "marble",
owner: props.owner,
contact: props.contact,
"imageProcessing:rbscBucketName": props.rbscBucketName,
"imageProcessing:processBucketName": props.processBucketName,
"imageProcessing:imageBucketName": props.imageBucketName,
"imageProcessing:imageBucketName": foundationStack.publicBucket.bucketName,
"imageProcessing:lambdaCodePath": "$CODEBUILD_SRC_DIR_AppCode/s3_event",
"imageProcessing:dockerfilePath": "$CODEBUILD_SRC_DIR_AppCode/",
},
Expand Down Expand Up @@ -110,7 +110,7 @@ export class DeploymentPipelineStack extends cdk.Stack {
})

// Deploy to Test
const deployTest = createDeploy(testStackName, `${props.namespace}-test`)
const deployTest = createDeploy(testStackName, `${props.namespace}-test`, props.testFoundationStack)

// Approval
const appRepoUrl = `https://github.com/${props.appRepoOwner}/${props.appRepoName}`
Expand All @@ -129,10 +129,7 @@ export class DeploymentPipelineStack extends cdk.Stack {
}

// Deploy to Production
props.imageBucketName = this.node.tryGetContext('imageProcessing:prodImageBucketName')
props.rbscBucketName = this.node.tryGetContext('imageProcessing:prodRbscBucketName')
props.processBucketName = this.node.tryGetContext('imageProcessing:prodProcessBucketName')
const deployProd = createDeploy(prodStackName, `${props.namespace}-prod`)
const deployProd = createDeploy(prodStackName, `${props.namespace}-prod`, props.prodFoundationStack)

// Pipeline
const pipeline = new codepipeline.Pipeline(this, 'DeploymentPipeline', {
Expand Down
36 changes: 17 additions & 19 deletions deploy/cdk/lib/image-processing/images-stack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,35 +9,24 @@ import cdk = require('@aws-cdk/core')
import fs = require('fs')
import { FoundationStack } from '../foundation'
import { S3NotificationToLambdaCustomResource } from './s3ToLambda'
import { ManifestPipelineStack } from '../manifest-pipeline'

export interface ImagesStackProps extends cdk.StackProps {
readonly lambdaCodePath: string;
readonly dockerfilePath: string;
readonly rbscBucketName: string;
readonly processBucketName: string;
readonly imageBucketName: string;
readonly manifestPipelineStack: ManifestPipelineStack;
readonly foundationStack: FoundationStack;
}

export class ImagesStack extends cdk.Stack {
constructor(scope: cdk.Construct, id: string, props: ImagesStackProps) {
super(scope, id, props)

if(!fs.existsSync(props.lambdaCodePath)) {
this.node.addError(`Cannot deploy this stack. Asset path not found ${props.lambdaCodePath}`)
return
}
if(!fs.existsSync(props.dockerfilePath)) {
this.node.addError(`Cannot deploy this stack. Asset path not found ${props.dockerfilePath}`)
return
}

const rbscBucketName = props.rbscBucketName
const rbscBucket = s3.Bucket.fromBucketName(this, 'RbscBucket', rbscBucketName)
const processBucketName = props.processBucketName
const processBucket = s3.Bucket.fromBucketName(this, 'ProcessBucket', processBucketName)
const imageBucketName = props.imageBucketName
const imageBucket = s3.Bucket.fromBucketName(this, 'ImageBucket', imageBucketName)
const processBucket = props.manifestPipelineStack.processBucket
const imageBucket = props.foundationStack.publicBucket

/* get rbsc bucket and attach object listener */
const changedImgRole = new iam.Role(this, 'S3ImageRole', {
Expand All @@ -56,19 +45,23 @@ export class ImagesStack extends cdk.Stack {
const roleLoggingPolicy = iam.ManagedPolicy.fromAwsManagedPolicyName(awsLambdaLoggingPolicy)
changedImgRole.addManagedPolicy(roleLoggingPolicy)

if(!fs.existsSync(props.lambdaCodePath)) {
this.node.addError(`Cannot deploy this stack. Asset path not found ${props.lambdaCodePath}`)
return
}
const imageTracker = new lambda.Function(this, 'Handler', {
runtime: lambda.Runtime.PYTHON_3_8,
code: lambda.Code.fromAsset(props.lambdaCodePath),
handler: 'generate.handler',
environment: {
PROCESS_BUCKET: processBucketName,
PROCESS_BUCKET: processBucket.bucketName,
},
role: changedImgRole,
})
// https://github.com/aws/aws-cdk/issues/2004
new S3NotificationToLambdaCustomResource(this, id, rbscBucket, imageTracker)

const cluster: ecs.Cluster = props.foundationStack.cluster
const cluster = props.foundationStack.cluster as ecs.Cluster
cluster.addCapacity('Ec2Group', {
instanceType: ec2.InstanceType.of(ec2.InstanceClass.T3, ec2.InstanceSize.MICRO),
minCapacity: 1,
Expand Down Expand Up @@ -113,15 +106,20 @@ export class ImagesStack extends cdk.Stack {

const taskDef = new ecs.Ec2TaskDefinition(this, "TaskDefinition", { taskRole })
const logging = new ecs.AwsLogDriver({ streamPrefix: 'marbleimg' })

if(!fs.existsSync(props.dockerfilePath)) {
this.node.addError(`Cannot deploy this stack. Asset path not found ${props.dockerfilePath}`)
return
}
taskDef.addContainer("AppContainer", {
image: ecs.ContainerImage.fromAsset(props.dockerfilePath),
memoryLimitMiB: 512,
logging,
environment: {
LEVEL0: 'enable',
RBSC_BUCKET: rbscBucketName,
PROCESS_BUCKET: processBucketName,
IMAGE_BUCKET: imageBucketName,
PROCESS_BUCKET: processBucket.bucketName,
IMAGE_BUCKET: imageBucket.bucketName,
},
})

Expand Down
45 changes: 25 additions & 20 deletions deploy/cdk/lib/manifest-pipeline/manifest-pipeline-stack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { Bucket, HttpMethods, IBucket } from "@aws-cdk/aws-s3"
import { ParameterType, StringParameter } from '@aws-cdk/aws-ssm'
import { Choice, Condition, Errors, Fail, LogLevel, StateMachine, Succeed } from '@aws-cdk/aws-stepfunctions'
import * as tasks from '@aws-cdk/aws-stepfunctions-tasks'
import { Construct, Duration, Fn, Stack, StackProps } from "@aws-cdk/core"
import { Construct, Duration, Fn, Stack, StackProps, CfnOutput } from "@aws-cdk/core"
import fs = require('fs')
import path = require('path')
import { FoundationStack } from '../foundation'
Expand Down Expand Up @@ -143,23 +143,24 @@ export class ManifestPipelineStack extends Stack {
this.node.addError(`hostnamePrefix does not match legal pattern.`)
}

if (!fs.existsSync(props.lambdaCodeRootPath)) {
this.node.addError(`Cannot deploy this stack. Asset path not found ${props.lambdaCodeRootPath}`)
return
}


// Create Origin Access Id
const originAccessId = new OriginAccessIdentity(this, 'OriginAccessIdentity', {
comment: Fn.sub('Static assets in ${AWS::StackName}'),
})


// Create buckets needed
this.processBucket = new Bucket(this, 'ProcessBucket', {
const processBucket = new Bucket(this, 'ProcessBucket', {
serverAccessLogsBucket: props.foundationStack.logBucket,
serverAccessLogsPrefix: 's3/data-broker/',
})
// Manually control and force the export of this bucket
const processBucketExportName = `${this.stackName}:ProcessBucketArn`
new CfnOutput(this, 'ProcessBucketArnOutput', {
value: processBucket.bucketArn,
exportName: processBucketExportName,
})
// Construct the import for anything that references this bucket
this.processBucket = Bucket.fromBucketArn(this, 'BucketImport', Fn.importValue(processBucketExportName))

this.manifestBucket = new Bucket(this, 'ManifestBucket', {
cors: [
Expand Down Expand Up @@ -242,7 +243,7 @@ export class ManifestPipelineStack extends Stack {
new StringParameter(this, 'sSMProcessBucket', {
type: ParameterType.STRING,
parameterName: `${props.appConfigPath}/process-bucket`,
stringValue: this.processBucket.bucketName,
stringValue: processBucket.bucketName,
description: 'S3 Bucket to accumulate assets during processing',
})

Expand Down Expand Up @@ -329,6 +330,10 @@ export class ManifestPipelineStack extends Stack {
})
}

if (!fs.existsSync(props.lambdaCodeRootPath)) {
this.node.addError(`Cannot deploy this stack. Asset path not found ${props.lambdaCodeRootPath}`)
return
}
const initManifestLambda = new Function(this, 'InitManifestLambdaFunction', {
code: Code.fromAsset(path.join(props.lambdaCodeRootPath, 'init/')),
description: 'Initializes the manifest pipeline step functions',
Expand All @@ -345,7 +350,7 @@ export class ManifestPipelineStack extends Stack {
timeout: Duration.seconds(90),
})

this.processBucket.grantReadWrite(initManifestLambda)
processBucket.grantReadWrite(initManifestLambda)


const processManifestLambda = new Function(this, 'ProcessManifestLambdaFunction', {
Expand All @@ -363,7 +368,7 @@ export class ManifestPipelineStack extends Stack {
timeout: Duration.seconds(900),
})

this.processBucket.grantReadWrite(processManifestLambda)
processBucket.grantReadWrite(processManifestLambda)


const finalizeManifestLambda = new Function(this, 'FinalizeManifestLambdaFunction', {
Expand All @@ -373,7 +378,7 @@ export class ManifestPipelineStack extends Stack {
runtime: Runtime.PYTHON_3_8,
environment: {
SENTRY_DSN: props.sentryDsn,
PROCESS_BUCKET: this.processBucket.bucketArn,
PROCESS_BUCKET: processBucket.bucketArn,
},
initialPolicy: [
ManifestPipelineStack.ssmPolicy(props.appConfigPath),
Expand All @@ -388,7 +393,7 @@ export class ManifestPipelineStack extends Stack {
})

this.manifestBucket.grantReadWrite(finalizeManifestLambda)
this.processBucket.grantReadWrite(finalizeManifestLambda)
processBucket.grantReadWrite(finalizeManifestLambda)
props.foundationStack.publicBucket.grantReadWrite(finalizeManifestLambda)

// Create tasks for state machine
Expand Down Expand Up @@ -470,7 +475,7 @@ export class ManifestPipelineStack extends Stack {
})

this.manifestBucket.grantReadWrite(museumExportLambda)
this.processBucket.grantReadWrite(museumExportLambda)
processBucket.grantReadWrite(museumExportLambda)


const alephExportLambda = new Function(this, 'AlephExportLambda', {
Expand All @@ -491,7 +496,7 @@ export class ManifestPipelineStack extends Stack {
})

this.manifestBucket.grantReadWrite(alephExportLambda)
this.processBucket.grantReadWrite(alephExportLambda)
processBucket.grantReadWrite(alephExportLambda)


const curateExportLambda = new Function(this, 'CurateExportLambda', {
Expand All @@ -512,7 +517,7 @@ export class ManifestPipelineStack extends Stack {
})

this.manifestBucket.grantReadWrite(curateExportLambda)
this.processBucket.grantReadWrite(curateExportLambda)
processBucket.grantReadWrite(curateExportLambda)


const archivesSpaceExportLambda = new Function(this, 'ArchivesSpaceExportLambda', {
Expand All @@ -533,7 +538,7 @@ export class ManifestPipelineStack extends Stack {
})

this.manifestBucket.grantReadWrite(archivesSpaceExportLambda)
this.processBucket.grantReadWrite(archivesSpaceExportLambda)
processBucket.grantReadWrite(archivesSpaceExportLambda)


const collectionsApiLambda = new Function(this, 'CollectionsApiLambda', {
Expand Down Expand Up @@ -561,7 +566,7 @@ export class ManifestPipelineStack extends Stack {
})

this.manifestBucket.grantReadWrite(collectionsApiLambda)
this.processBucket.grantReadWrite(collectionsApiLambda)
processBucket.grantReadWrite(collectionsApiLambda)

const objectFilesApiLambda = new Function(this, 'ObjectFilesApiLambda', {
code: Code.fromAsset(path.join(props.lambdaCodeRootPath, 'object_files_api/')),
Expand All @@ -582,7 +587,7 @@ export class ManifestPipelineStack extends Stack {
})

this.manifestBucket.grantReadWrite(objectFilesApiLambda)
this.processBucket.grantReadWrite(objectFilesApiLambda)
processBucket.grantReadWrite(objectFilesApiLambda)


// Create tasks for harvest state machine
Expand Down

0 comments on commit 6ecea8a

Please sign in to comment.