repo_name string | dataset string | owner string | lang string | func_name string | code string | docstring string | url string | sha string |
|---|---|---|---|---|---|---|---|---|
project-lakechain | github_2023 | awslabs | typescript | MultiOutputPipeline.createDeliveryStream | private createDeliveryStream() {
const firehoseStorageConnectorBucket = new s3.Bucket(this, 'FirehoseStorageConnectorBucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The IAM role to be used by the delivery stream.
const deliveryRole = this.getFirehoseRole('FirehoseDeliveryRole', firehoseStorageConnectorBucket);
// Create the delivery stream.
const deliveryStream = new firehose.CfnDeliveryStream(this, 'Stream', {
deliveryStreamType: 'DirectPut',
s3DestinationConfiguration: {
bucketArn: firehoseStorageConnectorBucket.bucketArn,
bufferingHints: {
intervalInSeconds: 60,
sizeInMBs: 1
},
roleArn: deliveryRole.roleArn
}
});
return ({ deliveryStream, firehoseStorageConnectorBucket });
} | /**
* Creates the Firehose delivery stream.
* @returns the created delivery stream.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/storage-connector-pipeline/stack.ts#L186-L212 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | MultiOutputPipeline.getFirehoseRole | private getFirehoseRole(id: string, bucket: s3.IBucket): iam.IRole {
const role = new iam.Role(this, id, {
assumedBy: new iam.ServicePrincipal('firehose.amazonaws.com')
});
role.addToPolicy(new iam.PolicyStatement({
actions: [
's3:AbortMultipartUpload',
's3:GetBucketLocation',
's3:GetObject',
's3:ListBucket',
's3:ListBucketMultipartUploads',
's3:PutObject'
],
resources: [
bucket.bucketArn,
bucket.arnForObjects('*')
]
}));
return (role);
} | /**
* Creates the IAM role to be used by the Firehose
* delivery stream.
* @param id the identifier of the role.
* @returns the created role.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/storage-connector-pipeline/stack.ts#L220-L241 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | AudioRecordingSummarizationStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline summarizing audio recordings using Amazon Transcribe and Amazon Bedrock.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// We are using the `TranscribeAudioProcessor` component to transcribe
// audio recordings into a VTT file.
const transcribe = new TranscribeAudioProcessor.Builder()
.withScope(this)
.withIdentifier('TranscribeTextProcessor')
.withCacheStorage(cache)
.withSource(trigger)
.withOutputFormats('vtt')
.build();
// We are using the `AnthropicTextProcessor` component to summarize
// the input text.
const textSummarizer = new AnthropicTextProcessor.Builder()
.withScope(this)
.withIdentifier('AnthropicTextProcessor')
.withCacheStorage(cache)
.withSource(transcribe)
.withRegion('us-east-1')
.withModel(AnthropicTextModel.ANTHROPIC_CLAUDE_V3_SONNET)
.withPrompt(`
Give a very detailed summary of the VTT transcription file with the following constraints:
- Write a verbose and very detailed summary of the transcription in plain text.
- Keep all the data points of the conversation.
- Do not say "Here is a summary", just write the summary as is.
- If you cannot summarize the text, just return an empty string without explanation.
`)
.withModelParameters({
temperature: 0.5,
max_tokens: 4096
})
.build();
// Write both the transcription and the summarization results
// to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSources([
transcribe,
textSummarizer
])
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/audio-recording-summarization-pipeline/stack.ts#L45-L145 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | BedrockSummarizationStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline summarizing text documents using Amazon Bedrock.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Convert PDF documents to text.
const pdfConverter = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Convert text-oriented documents (Docx, Markdown, HTML, etc) to text.
const pandocConverter = new PandocTextConverter.Builder()
.withScope(this)
.withIdentifier('PandocConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// We are using the `AnthropicTextProcessor` component to summarize
// the input text.
const textSummarizer = new AnthropicTextProcessor.Builder()
.withScope(this)
.withIdentifier('AnthropicTextProcessor')
.withCacheStorage(cache)
.withSources([
pdfConverter,
pandocConverter,
trigger
])
.withRegion('us-east-1')
.withModel(AnthropicTextModel.ANTHROPIC_CLAUDE_V3_HAIKU)
.withPrompt(`
Provide a detailed summary of the given text with the following constraints:
- Write a very detailed summary in the same language as the original text.
- Keep the original meaning, style, and tone of the text in the summary.
- Do not say "Here is a summary", just write the summary as is.
- If you cannot summarize the text, just return an empty string without explanation.
`)
.withModelParameters({
temperature: 0.5,
max_tokens: 4096
})
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(textSummarizer)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/claude-summarization-pipeline/stack.ts#L50-L156 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ExtractiveSummarizationStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline summarizing text documents using BERT extractive summarizer.',
...env
});
// The VPC in which the summarization models will be deployed.
const vpc = this.createVpc('Vpc');
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
trigger
// Summarize text documents using the Bert
// extractive summarizer.
.pipe(
new BertExtractiveSummarizer.Builder()
.withScope(this)
.withIdentifier('BertExtractiveSummarizer')
.withCacheStorage(cache)
.withSource(trigger)
.withVpc(vpc)
// Optionally set the summarization ratio.
.withRatio(0.2)
.withComputeType(ComputeType.GPU)
.build()
)
// Write the results to the destination bucket.
.pipe(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/extractive-summarization-pipeline/stack.ts#L51-L134 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ExtractiveSummarizationStack.createVpc | private createVpc(id: string): ec2.IVpc {
return (new ec2.Vpc(this, id, {
enableDnsSupport: true,
enableDnsHostnames: true,
ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'),
maxAzs: 3,
subnetConfiguration: [{
// Used by NAT Gateways to provide Internet access
// to the containers.
name: 'public',
subnetType: ec2.SubnetType.PUBLIC,
cidrMask: 28
}, {
// Used by summarization containers.
name: 'private',
subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS,
cidrMask: 24
}, {
// Used by EFS.
name: 'isolated',
subnetType: ec2.SubnetType.PRIVATE_ISOLATED,
cidrMask: 28
}]
}));
} | /**
* @param id the VPC identifier.
* @returns a new VPC with a public, private and isolated
* subnets for the pipeline.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/extractive-summarization-pipeline/stack.ts#L141-L165 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | LlamaSummarizationPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline summarizing text documents using Amazon Bedrock.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Convert PDF documents to text.
const pdfConverter = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Convert text-oriented documents (Docx, Markdown, HTML, etc) to text.
const pandocConverter = new PandocTextConverter.Builder()
.withScope(this)
.withIdentifier('PandocConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// We are using the `LlamaTextProcessor` component to summarize
// the input text.
const textSummarizer = new LlamaTextProcessor.Builder()
.withScope(this)
.withIdentifier('LlamaTextProcessor')
.withCacheStorage(cache)
.withSources([
pdfConverter,
pandocConverter,
trigger
])
.withRegion('us-west-2')
.withModel(LlamaModel.LLAMA3_1_8B_INSTRUCT_V1)
.withPrompt('Provide a short summary of the given text')
.withModelParameters({
temperature: 0.5,
maxTokens: 2048
})
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(textSummarizer)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/llama-summarization-pipeline/stack.ts#L50-L150 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | MistralSummarizationPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline summarizing text documents using Mistral models on Amazon Bedrock.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Convert PDF documents to text.
const pdfConverter = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Convert text-oriented documents (Docx, Markdown, HTML, etc) to text.
const pandocConverter = new PandocTextConverter.Builder()
.withScope(this)
.withIdentifier('PandocConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// We are using the `MistralTextProcessor` component to summarize
// the input text.
const textSummarizer = new MistralTextProcessor.Builder()
.withScope(this)
.withIdentifier('MistralTextProcessor')
.withCacheStorage(cache)
.withSources([
pdfConverter,
pandocConverter,
trigger
])
.withRegion('us-west-2')
.withModel(MistralTextModel.MISTRAL_LARGE_2)
.withPrompt('Provide a short summary of the given text')
.withModelParameters({
temperature: 0.5,
maxTokens: 2048
})
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(textSummarizer)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/mistral-summarization-pipeline/stack.ts#L50-L150 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | OllamaSummarizationStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline summarizing text documents using Ollama.',
...env
});
// The VPC in which the ollama model will be deployed.
const vpc = this.createVpc('Vpc');
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Convert PDF documents to text.
const pdfConverter = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Convert text-oriented documents (Docx, Markdown, HTML, etc) to text.
const pandocConverter = new PandocTextConverter.Builder()
.withScope(this)
.withIdentifier('PandocConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// We are using the `OllamaProcessor` component to summarize
// the input text.
const ollama = new OllamaProcessor.Builder()
.withScope(this)
.withIdentifier('OllamaProcessor')
.withCacheStorage(cache)
.withVpc(vpc)
.withSources([
pdfConverter,
pandocConverter,
trigger
])
.withModel(OllamaModel.LLAMA_3)
.withPrompt(`Provide a detailed summary of the document`)
.withInfrastructure(new InfrastructureDefinition.Builder()
.withMaxMemory(15 * 1024)
.withGpus(1)
.withInstanceType(ec2.InstanceType.of(
ec2.InstanceClass.G4DN,
ec2.InstanceSize.XLARGE2
))
.build())
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(ollama)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/ollama-summarization-pipeline/stack.ts#L56-L163 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | OllamaSummarizationStack.createVpc | private createVpc(id: string): ec2.IVpc {
return (new ec2.Vpc(this, id, {
enableDnsSupport: true,
enableDnsHostnames: true,
ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'),
maxAzs: 1,
subnetConfiguration: [{
// Used by NAT Gateways to provide Internet access
// to the containers.
name: 'public',
subnetType: ec2.SubnetType.PUBLIC,
cidrMask: 28
}, {
// Used by summarization containers.
name: 'private',
subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS,
cidrMask: 24
}, {
// Used by EFS.
name: 'isolated',
subnetType: ec2.SubnetType.PRIVATE_ISOLATED,
cidrMask: 28
}]
}));
} | /**
* @param id the VPC identifier.
* @returns a new VPC with a public, private and isolated
* subnets for the pipeline.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/ollama-summarization-pipeline/stack.ts#L170-L194 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TitanSummarizationPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline summarizing text documents using Amazon Titan.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Convert PDF documents to text.
const pdfConverter = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Convert text-oriented documents (Docx, Markdown, HTML, etc) to text.
const pandocConverter = new PandocTextConverter.Builder()
.withScope(this)
.withIdentifier('PandocConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// We are using the `TitanTextProcessor` component to summarize
// the input text.
const textSummarizer = new TitanTextProcessor.Builder()
.withScope(this)
.withIdentifier('TitanTextProcessor')
.withCacheStorage(cache)
.withSources([
pdfConverter,
pandocConverter,
trigger
])
.withRegion('us-east-1')
.withModel(TitanTextModel.AMAZON_TITAN_TEXT_PREMIER_V1)
.withPrompt('Provide a short summary of the given text')
.withModelParameters({
temperature: 0.5,
maxTokens: 2048
})
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(textSummarizer)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/titan-summarization-pipeline/stack.ts#L50-L150 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | intent | const intent = async (events: CloudEvent[], ffmpeg: Ffmpeg, utils: FfmpegUtils) => {
const videos = events.filter(
(event) => event.data().document().mimeType() === 'video/mp4'
);
// Create the FFMPEG chain.
return (ffmpeg()
.input(utils.file(videos[0]))
.noVideo()
.save('output.mp3')
);
}; | /**
* This intent is a function that will get executed in the cloud
* by the FFMPEG middleware. It takes a video input and extracts
* the audio from it.
* @param events the events to process, in this case there will
* be only one event, as video files are processed sequentially.
* @param ffmpeg the FFMPEG instance.
* @param utils a set of utilities to interact with the FFMPEG
* middleware.
* @returns the FFMPEG chain.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/video-summarization-pipeline/stack.ts#L74-L85 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | VideoSummarizationStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline summarizing video documents using Amazon Bedrock.',
...env
});
// The VPC in which the FFMPEG processor will be deployed.
const vpc = this.createVpc('Vpc');
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
trigger
// The FFMPEG processor extracts the audio from the video.
.pipe(
new FfmpegProcessor.Builder()
.withScope(this)
.withIdentifier('FfmpegProcessor')
.withCacheStorage(cache)
.withVpc(vpc)
.withIntent(intent)
.build()
)
// We are using the `TranscribeAudioProcessor` component to transcribe
// audio into a VTT file.
.pipe(
new TranscribeAudioProcessor.Builder()
.withScope(this)
.withIdentifier('TranscribeTextProcessor')
.withCacheStorage(cache)
.withOutputFormats('vtt')
.build()
)
// We are using the `StructuredEntityExtractor` middleware to summarize
// the input text and extract structured metadata from it.
.pipe(
new StructuredEntityExtractor.Builder()
.withScope(this)
.withIdentifier('StructuredEntityExtractor')
.withCacheStorage(cache)
.withRegion('us-east-1')
.withSchema(schema)
.build()
)
// Write the results to the destination bucket.
.pipe(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/video-summarization-pipeline/stack.ts#L103-L203 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | VideoSummarizationStack.createVpc | private createVpc(id: string): ec2.IVpc {
return (new ec2.Vpc(this, id, {
enableDnsSupport: true,
enableDnsHostnames: true,
ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'),
maxAzs: 1,
subnetConfiguration: [{
// Used by NAT Gateways to provide Internet access
// to the containers.
name: 'public',
subnetType: ec2.SubnetType.PUBLIC,
cidrMask: 28
}, {
// Used by the containers.
name: 'private',
subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS,
cidrMask: 24
}, {
// Used by EFS.
name: 'isolated',
subnetType: ec2.SubnetType.PRIVATE_ISOLATED,
cidrMask: 28
}]
}));
} | /**
* @param id the VPC identifier.
* @returns a new VPC with a public, private and isolated
* subnets for the pipeline.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/summarization-pipelines/video-summarization-pipeline/stack.ts#L210-L234 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | time | const time = (offset = 10): Date => {
const date = new Date();
date.setMinutes(date.getMinutes() + offset);
return (date);
}; | /**
* @returns a date based on the local timezone
* with a given offset which is by default 10 minutes.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-processing-pipelines/article-curation-pipeline/stack.ts#L34-L38 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ArticleStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline converting HTML articles into plain text and extracting their metadata.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// The URIs of the feeds to process.
const uris = [
'https://aws.amazon.com/blogs/aws/feed/',
'https://aws.amazon.com/blogs/architecture/feed/'
];
// Schedules the execution of the pipeline 10 minutes
// after the deployment of the stack.
const trigger = new SchedulerEventTrigger.Builder()
.withScope(this)
.withIdentifier('SchedulerEventTrigger')
.withCacheStorage(cache)
.withSchedule(
scheduler.ScheduleExpression.at(time())
)
.withDocuments(uris)
.build();
trigger
// Process the RSS syndication feeds.
.pipe(
new SyndicationFeedProcessor.Builder()
.withScope(this)
.withIdentifier('SyndicationFeedProcessor')
.withCacheStorage(cache)
.build()
)
// Parse HTML articles from the feed into plain text.
.pipe(
new Newspaper3kParser.Builder()
.withScope(this)
.withIdentifier('Newspaper3kParser')
.withCacheStorage(cache)
.build()
)
// Store the results in S3.
.pipe(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.build()
);
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
// Display the execution time of the pipeline in the console.
new cdk.CfnOutput(this, 'ExecutionTime', {
description: 'The time the pipeline will be executed.',
value: time().toLocaleString()
});
// Display the trigger time in stderr.
console.error(`💁 The pipeline will be triggered at ${time().toLocaleString()}`);
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-processing-pipelines/article-curation-pipeline/stack.ts#L56-L141 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | EmailNlpPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline showing how to analyze e-mails.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
trigger
// Convert e-mails to text.
.pipe(
new EmailTextProcessor.Builder()
.withScope(this)
.withIdentifier('EmailTextProcessor')
.withCacheStorage(cache)
.withSource(trigger)
.withOutputFormat('text')
.build()
)
// Perform NLP analysis of the text.
.pipe(
new NlpTextProcessor.Builder()
.withScope(this)
.withIdentifier('NlpTextProcessor')
.withCacheStorage(cache)
.withSource(trigger)
.withIntent(
l.nlp()
.language()
.pii()
.sentiment()
.stats()
.readingTime()
)
.build()
)
// Write the results to the destination bucket.
.pipe(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('Storage')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-processing-pipelines/email-nlp-pipeline/stack.ts#L46-L139 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | JMESPathPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline parsing and transforming JSON documents.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
trigger
.pipe(
// Transform an input JSON document using JMESPath expressions.
// Feel free to modify the expression to suit the structure of
// your JSON documents.
new JMESPathProcessor.Builder()
.withScope(this)
.withIdentifier('Parser')
.withCacheStorage(cache)
.withExpression('array[*].name')
.withSource(trigger)
.build()
)
.pipe(
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(trigger)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-processing-pipelines/jmespath-parsing-pipeline/stack.ts#L44-L123 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | NlpStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline analyzing text documents using NLP.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Convert PDF documents to text.
const pdfConverter = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Convert text-oriented documents (Docx, Markdown, HTML, etc) to text.
const pandocConverter = new PandocTextConverter.Builder()
.withScope(this)
.withIdentifier('PandocConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Extracts metadata from text documents.
const nlpProcessor = new NlpTextProcessor.Builder()
.withScope(this)
.withIdentifier('NlpProcessor')
.withCacheStorage(cache)
.withSources([
pdfConverter,
pandocConverter,
trigger
])
.withIntent(
l.nlp()
.language()
.pii(l.confidence(0.9))
.entities(l.filter('PERSON'))
.pos(l.confidence(0.9), l.filter('ADJ', 'NOUN'))
.readingTime()
.stats()
)
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(nlpProcessor)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-processing-pipelines/nlp-pipeline/stack.ts#L50-L151 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | PdfVisionPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline converting PDF documents into text.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket where results are stored.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects in the bucket.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// In this step, we extract all the pages from the PDF document,
// as individual PDF documents.
// We also enable layout extraction to determine the number of
// tables and images in the document.
const pageExtractor = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PageExtractor')
.withCacheStorage(cache)
.withSource(trigger)
.withMaxMemorySize(2048)
.withTask(new ExtractPagesTask.Builder()
.withOutputType('pdf')
.withLayoutExtraction(true)
.build()
)
.build();
// This condition determines whether the PDF page contains
// complex elements such as tables or images.
const isComplexDocument = new Condition.Builder()
.withScope(this)
.withIdentifier('IsComplexDocument')
.withCacheStorage(cache)
.withSource(pageExtractor)
.withConditional(async (event: CloudEvent) => {
const metadata = event.data().metadata()
if (metadata.properties?.kind === 'text'
&& typeof metadata.properties.attrs?.layout?.tableCount !== 'undefined'
&& typeof metadata.properties.attrs?.layout?.imageCount !== 'undefined'
) {
const layout = metadata.properties.attrs.layout;
return (layout.tableCount! > 0 || layout.imageCount! > 0);
}
return (false);
})
.build();
// This step is used to convert the PDF page into an image if it
// has been identified as containing complex elements. Tne image
// will be further passed to the Anthropic image model.
const pdfToImage = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfToImage')
.withCacheStorage(cache)
.withTask(new ExtractDocumentTask.Builder()
.withOutputType('image')
.build()
)
.build();
// The image transform step is used to resize the image
// and convert it to a JPEG format. This is to ensure that
// the image is of a small enough size for the Anthropic model.
const imageTransform = new SharpImageTransform.Builder()
.withScope(this)
.withIdentifier('SharpImageTransform')
.withCacheStorage(cache)
.withSource(pdfToImage)
.withSharpTransforms(
sharp()
.resize(1024)
.jpeg()
)
.build();
// This step is used to convert the PDF page into text
// directly, if it does not contain any complex elements.
const pdfConverter = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfConverter')
.withCacheStorage(cache)
.withTask(new ExtractDocumentTask.Builder()
.withOutputType('text')
.build()
)
.build();
// Condition branch if the document contains complex elements.
isComplexDocument.onMatch(pdfToImage);
// Condition branch if the document does not contain complex elements.
isComplexDocument.onMismatch(pdfConverter);
// The `AnthropicTextProcessor` is used to generate a markdown
// representation of the text in the image. This is useful for
// generating a structured representation of the document when
// the document contains complex elements.
const anthropic = new AnthropicTextProcessor.Builder()
.withScope(this)
.withIdentifier('AnthropicTextProcessor')
.withCacheStorage(cache)
.withRegion('us-east-1')
.withModel(AnthropicTextModel.ANTHROPIC_CLAUDE_V3_SONNET)
.withSource(imageTransform)
.withPrompt(`
Here is a document associated with a page from a PDF document.
Your role is to accurately transcribe the pixels of the page into accurate markdown; keep the text untouched.
Transcribe the content of tables into a structured and formatted markdown table inlined with the text.
Provide a short caption contextual description for each images inlined with the text.
Format your output as a clean and readable markdown document.
Do not say "this document", only output the transcript.
Skip the preamble; go straight into the transcription.
In case you cannot transcribe a document, output an empty string, like "".
`)
.withModelParameters({
temperature: 0.1,
max_tokens: 4096,
top_p: 0.9,
top_k: 250
})
.build();
// We reduce all the pages together in a single event within
// a 6-minutes time window.
const reducer = new Reducer.Builder()
.withScope(this)
.withIdentifier('Reducer')
.withCacheStorage(cache)
.withSources([ pdfConverter, anthropic ])
.withReducerStrategy(new TimeWindowStrategy.Builder()
.withTimeWindow(cdk.Duration.minutes(6))
.withJitter(cdk.Duration.seconds(15))
.build()
)
.build();
// We concatenate all the pages together into a single document.
const transform = new Transform.Builder()
.withScope(this)
.withIdentifier('Concat')
.withCacheStorage(cache)
.withSource(reducer)
.withTransformExpression(concat)
.build();
// Write both the result document to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withSource(transform)
.withDestinationBucket(destination)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-processing-pipelines/pdf-vision-pipeline/stack.ts#L49-L249 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | PiiRedactionStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A PII redaction pipeline using Project Lakechain.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Convert PDF documents to text.
const pdfConverter = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Convert text-oriented documents (Docx, Markdown, HTML, etc) to text.
const pandocConverter = new PandocTextConverter.Builder()
.withScope(this)
.withIdentifier('PandocConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Extracts metadata from text documents.
const nlpProcessor = new NlpTextProcessor.Builder()
.withScope(this)
.withIdentifier('NlpProcessor')
.withCacheStorage(cache)
.withSources([
pdfConverter,
pandocConverter,
trigger
])
.withIntent(
l.nlp()
.language()
.pii(l.confidence(0.9))
)
.build();
// We use the text transform processor to transform
// PII data into a redacted format.
// @see https://docs.aws.amazon.com/comprehend/latest/dg/how-pii.html
const textTransform = new TextTransformProcessor.Builder()
.withScope(this)
.withIdentifier('TextTransformProcessor')
.withCacheStorage(cache)
.withSource(nlpProcessor)
.withIntent(
t.text().redact(t.pii())
)
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(textTransform)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-processing-pipelines/pii-redaction-pipeline/stack.ts#L54-L164 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TextModerationPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline demonstrating how to use Amazon Comprehend for text moderation.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The moderated texts bucket.
const moderated = new s3.Bucket(this, 'Moderated', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The safe texts bucket.
const safe = new s3.Bucket(this, 'Safe', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Monitor a bucket for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// The NLP text process will identify PII information
// and perform sentiment analysis
const nlpProcessor = new NlpTextProcessor.Builder()
.withScope(this)
.withIdentifier('NlpTextProcessor')
.withCacheStorage(cache)
.withSource(trigger)
.withIntent(
l.nlp()
.language()
.sentiment()
.pii(l.confidence(90))
)
.build()
const condition = new Condition.Builder()
.withScope(this)
.withIdentifier('Condition')
.withCacheStorage(cache)
.withSource(nlpProcessor)
.withConditional(async (event: CloudEvent) => {
const metadata = event.data().metadata();
const attrs = metadata.properties?.attrs as TextMetadata;
const piis = attrs.stats?.piis;
const sentiment = attrs.sentiment;
const has_pii = piis != 0;
const non_negative_sentiment = sentiment == "positive" || sentiment == "neutral";
return !has_pii && non_negative_sentiment;
})
.build();
// Writes the results to the moderated bucket when
// PII labels exist in the document metadata and the
// sentiment is not positive
condition.onMismatch(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('ModeratedStorage')
.withCacheStorage(cache)
.withDestinationBucket(moderated)
.build()
);
// Writes the results to the safe bucket when PII
// labels do not exist in the document metadata and
// the sentiment is positive
condition.onMatch(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('SafeStorage')
.withCacheStorage(cache)
.withDestinationBucket(safe)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the moderated bucket information in the console.
new cdk.CfnOutput(this, 'ModeratedBucketName', {
description: 'The name of the bucket containing moderated documents.',
value: moderated.bucketName
});
// Display the safe bucket information in the console.
new cdk.CfnOutput(this, 'SafeBucketName', {
description: 'The name of the bucket containing safe documents.',
value: safe.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-processing-pipelines/text-moderation-pipeline/stack.ts#L44-L168 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TextSplittingStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline showcasing how to split text at scale.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Creates the character text splitter, which will
// split the text into chunks of 4000 characters,
// based on the Langchain `CharacterTextSplitter`
// implementation.
const characterTextSplitter = new CharacterTextSplitter.Builder()
.withScope(this)
.withIdentifier('CharacterTextSplitter')
.withCacheStorage(cache)
.withSource(trigger)
.withChunkSize(4000)
.withChunkOverlap(200)
.build();
// Creates the recursive character text splitter, which will
// split the text into chunks of up to 4000 characters,
// based on the Langchain `RecursiveCharacterTextSplitter`
// implementation.
const recursiveTextSplitter = new RecursiveCharacterTextSplitter.Builder()
.withScope(this)
.withIdentifier('RecursiveCharacterTextSplitter')
.withCacheStorage(cache)
.withSource(trigger)
.withChunkSize(4000)
.withChunkOverlap(200)
.build();
// Creates the tiling text splitter, which uses
// the NLTK tiling algorithm to split the text
// into paragraphs.
const tilingTextSplitter = new TilingTextSplitter.Builder()
.withScope(this)
.withIdentifier('TilingTextSplitter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Creates the sentence text splitter, which uses
// the NLTK sentence tokenizer to split the text
// into paragraphs of maximum 4000 bytes.
const sentenceTextSplitter = new SentenceTextSplitter.Builder()
.withScope(this)
.withIdentifier('SentenceTextSplitter')
.withCacheStorage(cache)
.withSource(trigger)
.withMaxBytesLength(4000)
.build();
// Creates the regexp text splitter, which uses
// a string or regular expression to split the text into
// paragraphs.
const regexpTextSplitter = new RegexpTextSplitter.Builder()
.withScope(this)
.withIdentifier('RegexpTextSplitter')
.withCacheStorage(cache)
.withSource(trigger)
.withSeparator('\n\n')
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSources([
characterTextSplitter,
recursiveTextSplitter,
tilingTextSplitter,
sentenceTextSplitter,
regexpTextSplitter
])
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-processing-pipelines/text-splitting-pipeline/stack.ts#L42-L170 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TextToSpeechStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline converting text to speech using Amazon Polly.',
...env
});
// The VPC in which the Bark model will be deployed.
const vpc = this.createVpc('Vpc');
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Monitor a bucket for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Detect the language of the text using the NLP
// text processor.
const nlpProcessor = new NlpTextProcessor.Builder()
.withScope(this)
.withIdentifier('Nlp')
.withCacheStorage(cache)
.withSource(trigger)
.withIntent(
l.nlp().language()
)
.build();
// Convert the text to speech using the Bark model
// running on AWS ECS. This middleware will use the
// language detected by the NLP processor to select
// the appropriate voice.
const synthesizer = new BarkSynthesizer.Builder()
.withScope(this)
.withIdentifier('BarkTextToSpeech')
.withCacheStorage(cache)
.withVpc(vpc)
.withSource(nlpProcessor)
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(synthesizer)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-to-speech-pipelines/bark-synthesizer/stack.ts#L47-L137 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TextToSpeechStack.createVpc | private createVpc(id: string): ec2.IVpc {
return (new ec2.Vpc(this, id, {
enableDnsSupport: true,
enableDnsHostnames: true,
ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'),
subnetConfiguration: [{
// Used by NAT Gateways to provide Internet access
// to the containers.
name: 'public',
subnetType: ec2.SubnetType.PUBLIC,
cidrMask: 28
}, {
// Used by Bark containers.
name: 'private',
subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS,
cidrMask: 24
}, {
// Used by EFS.
name: 'isolated',
subnetType: ec2.SubnetType.PRIVATE_ISOLATED,
cidrMask: 28
}]
}));
} | /**
* @param id the VPC identifier.
* @returns a new VPC with a public, private and isolated
* subnets for the pipeline.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-to-speech-pipelines/bark-synthesizer/stack.ts#L144-L167 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ElevenLabsSynthesisPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline converting text to speech using Amazon Polly.',
...env
});
// Checking whether environment variables are defined.
if (!process.env.ELEVENLABS_API_KEY_SECRET_NAME) {
throw new Error(`
Missing the ELEVENLABS_API_KEY_SECRET_NAME environment variable.
`);
}
// The ElevenLabs API key.
const elevenLabsApiKey = secrets.Secret.fromSecretNameV2(
this,
'ElevenLabsApiKey',
process.env.ELEVENLABS_API_KEY_SECRET_NAME
);
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Monitor a bucket for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Convert the text to speech using the ElevenLabs API.
const synthesizer = new ElevenLabsSynthesizer.Builder()
.withScope(this)
.withIdentifier('ElevenLabsSynthesizer')
.withCacheStorage(cache)
.withSource(trigger)
.withApiKey(elevenLabsApiKey)
// Rachel voice.
.withVoice('EXAVITQu4vr4xnSDxMaL')
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withSource(synthesizer)
.withDestinationBucket(destination)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-to-speech-pipelines/elevenlabs-synthesizer/stack.ts#L51-L139 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TextToSpeechStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline converting text to speech using Amazon Polly.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Monitor a bucket for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Detect the language of the text using the NLP
// processor.
const nlpProcessor = new NlpTextProcessor.Builder()
.withScope(this)
.withIdentifier('Nlp')
.withCacheStorage(cache)
.withSource(trigger)
.withIntent(
l.nlp().language()
)
.build();
// Convert the text to speech using Amazon Polly
// based on the language detected by the NLP processor.
const synthesizer = new PollySynthesizer.Builder()
.withScope(this)
.withIdentifier('PollySynthesizer')
.withCacheStorage(cache)
.withSource(nlpProcessor)
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(synthesizer)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-to-speech-pipelines/polly-synthesizer/stack.ts#L45-L129 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | BedrockTranslationPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline demonstrating how to use Amazon Bedrock to translate documents.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Monitor a bucket for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
// Convert PDF documents to text.
const pdfConverter = new PdfTextConverter.Builder()
.withScope(this)
.withIdentifier('PdfConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Convert text-oriented documents (Docx, Markdown, HTML, etc) to text.
const pandocConverter = new PandocTextConverter.Builder()
.withScope(this)
.withIdentifier('PandocConverter')
.withCacheStorage(cache)
.withSource(trigger)
.build();
// Split text documents into chunks of maximum 4096 bytes while
// preserving sentence boundaries.
// @note This is because a single invocation to a Bedrock model can only
// output a maximum of 4096 tokens.
const sentenceTextSplitter = new SentenceTextSplitter.Builder()
.withScope(this)
.withIdentifier('SentenceTextSplitter')
.withCacheStorage(cache)
.withSources([
pdfConverter,
pandocConverter,
trigger
])
.withMaxBytesLength(4096)
.build();
// The `StructuredEntityExtractor` middleware will translate the text
// in a JSON structured way.
const extractor = new StructuredEntityExtractor.Builder()
.withScope(this)
.withIdentifier('StructuredEntityExtractor')
.withCacheStorage(cache)
.withRegion('us-east-1')
.withSource(sentenceTextSplitter)
.withSchema(schema)
.withInstructions(`
You must accurately translate the given text to ${TARGET_LANGUAGE} while ensuring
that you translate exactly the entire text, sentence by sentence.
`)
.build();
// The reducer middleware will reduce all the translated
// chunks into a single aggregated document that will be
// passed to the next middleware.
const reducer = new Reducer.Builder()
.withScope(this)
.withIdentifier('Reducer')
.withCacheStorage(cache)
.withSource(extractor)
.withReducerStrategy(new ConditionalStrategy.Builder()
.withConditional(conditional)
.build()
)
.build();
// The transform middleware will sort and concatenate all the
// translated documents into a single text file.
const transform = new Transform.Builder()
.withScope(this)
.withIdentifier('Transform')
.withCacheStorage(cache)
.withSource(reducer)
.withTransformExpression(concat)
.build();
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.withSource(transform)
.build();
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-translation-pipelines/bedrock-translation-pipeline/stack.ts#L53-L187 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TextTranslationPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline demonstrating how to translate documents using Amazon Translate.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Monitor a bucket for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
trigger
// Translate text documents using Amazon Translate.
.pipe(
new TranslateTextProcessor.Builder()
.withScope(this)
.withIdentifier('TranslateTextProcessor')
.withCacheStorage(cache)
.withSource(trigger)
// In this example we translate input documents
// to both french and spanish.
.withOutputLanguages(['fr', 'es'])
.build()
)
// Write the results to the destination bucket.
.pipe(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/text-translation-pipelines/translate-pipeline/stack.ts#L42-L119 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TranscribeStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline transcribing audio into text using Amazon Transcribe.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Monitor a bucket for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
trigger
// Transcribe audio files using Amazon Transcribe.
.pipe(
new TranscribeAudioProcessor.Builder()
.withScope(this)
.withIdentifier('TranscribeAudioProcessor')
.withCacheStorage(cache)
.withSource(trigger)
.withOutputFormats('json', 'vtt')
.build()
)
// Store the results in the destination bucket.
.pipe(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/transcription-pipelines/transcribe-pipeline/stack.ts#L43-L118 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | WhisperStack.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline transcribing audio into text using the OpenAI Whisper model.',
...env
});
// The VPC in which the whisper containers will be deployed.
const vpc = this.createVpc('Vpc');
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Monitor a bucket for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
trigger
// Transcribe audio files with OpenAI Whisper.
// The processing will run by default on GPU accelerated hardware.
// Use `.withComputeType()` to run this model on CPU.
.pipe(
new WhisperTranscriber.Builder()
.withScope(this)
.withIdentifier('WhisperTranscriber')
.withCacheStorage(cache)
.withVpc(vpc)
.withModel('base')
.withOutputFormat('vtt')
.build()
)
// Store the results in the destination bucket.
.pipe(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('S3StorageConnector')
.withCacheStorage(cache)
.withSource(trigger)
.withDestinationBucket(destination)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/transcription-pipelines/whisper-pipeline/stack.ts#L45-L127 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | WhisperStack.createVpc | private createVpc(id: string): ec2.IVpc {
return (new ec2.Vpc(this, id, {
enableDnsSupport: true,
enableDnsHostnames: true,
ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'),
subnetConfiguration: [{
// Used by NAT Gateways to provide Internet access
// to the containers.
name: 'public',
subnetType: ec2.SubnetType.PUBLIC,
cidrMask: 28
}, {
// Used by Whisper containers.
name: 'private',
subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS,
cidrMask: 24
}, {
// Used by EFS.
name: 'isolated',
subnetType: ec2.SubnetType.PRIVATE_ISOLATED,
cidrMask: 28
}]
}));
} | /**
* @param id the VPC identifier.
* @returns a new VPC with a public, private and isolated
* subnets for the pipeline.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/transcription-pipelines/whisper-pipeline/stack.ts#L134-L157 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | intent | const intent = async (events: CloudEvent[], ffmpeg: Ffmpeg, utils: FfmpegUtils) => {
const videos = events.filter(
(event) => event.data().document().mimeType() === 'video/mp4'
);
// Create the FFMPEG chain.
return (ffmpeg()
.input(utils.file(videos[0]))
.noVideo()
.save('output.mp3')
);
}; | /**
* This intent is a function that will get executed in the cloud
* by the FFMPEG middleware. It takes a video input and extracts
* the audio from it.
* @param events the events to process, in this case there will
* be only one event, as video files are processed sequentially.
* @param ffmpeg the FFMPEG instance.
* @param utils a set of utilities to interact with the FFMPEG
* middleware.
* @returns the FFMPEG chain.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/video-processing-pipelines/audio-extraction-pipeline/stack.ts#L46-L57 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | AudioExtractionPipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A pipeline using FFMPEG to extract audio from video files.',
...env
});
// The VPC in which the FFMPEG processor will be deployed.
const vpc = this.createVpc('Vpc');
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache', {});
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
trigger
.pipe(
// The FFMPEG processor extracts the audio from the video.
new FfmpegProcessor.Builder()
.withScope(this)
.withIdentifier('FfmpegProcessor')
.withCacheStorage(cache)
.withVpc(vpc)
.withIntent(intent)
.build()
)
.pipe(
// Write the results to the destination bucket.
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('Storage')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/video-processing-pipelines/audio-extraction-pipeline/stack.ts#L76-L155 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | AudioExtractionPipeline.createVpc | private createVpc(id: string): ec2.IVpc {
return (new ec2.Vpc(this, id, {
enableDnsSupport: true,
enableDnsHostnames: true,
ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'),
maxAzs: 1,
subnetConfiguration: [{
// Used by NAT Gateways to provide Internet access
// to the containers.
name: 'public',
subnetType: ec2.SubnetType.PUBLIC,
cidrMask: 28
}, {
// Used by the containers.
name: 'private',
subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS,
cidrMask: 24
}, {
// Used by EFS.
name: 'isolated',
subnetType: ec2.SubnetType.PRIVATE_ISOLATED,
cidrMask: 28
}]
}));
} | /**
* @param id the VPC identifier.
* @returns a new VPC with a public, private and isolated
* subnets for the pipeline.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/video-processing-pipelines/audio-extraction-pipeline/stack.ts#L162-L186 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | SamplePipeline.constructor | constructor(scope: Construct, id: string, env: cdk.StackProps) {
super(scope, id, {
description: 'A sample pipeline using a custom middleware.',
...env
});
///////////////////////////////////////////
/////// S3 Storage ///////
///////////////////////////////////////////
// The source bucket.
const source = new s3.Bucket(this, 'Bucket', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The destination bucket.
const destination = new s3.Bucket(this, 'Destination', {
encryption: s3.BucketEncryption.S3_MANAGED,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: true,
removalPolicy: cdk.RemovalPolicy.DESTROY,
enforceSSL: true
});
// The cache storage.
const cache = new CacheStorage(this, 'Cache');
///////////////////////////////////////////
/////// Lakechain Pipeline ///////
///////////////////////////////////////////
// Create the S3 trigger monitoring the bucket
// for uploaded objects.
const trigger = new S3EventTrigger.Builder()
.withScope(this)
.withIdentifier('Trigger')
.withCacheStorage(cache)
.withBucket(source)
.build();
trigger
// Convert e-mails to text.
.pipe(
new SimpleTextProcessor.Builder()
.withScope(this)
.withIdentifier('SimpleTextProcessor')
.withCacheStorage(cache)
.withSource(trigger)
.build()
)
// Write the results to the destination bucket.
.pipe(
new S3StorageConnector.Builder()
.withScope(this)
.withIdentifier('Storage')
.withCacheStorage(cache)
.withDestinationBucket(destination)
.build()
);
// Display the source bucket information in the console.
new cdk.CfnOutput(this, 'SourceBucketName', {
description: 'The name of the source bucket.',
value: source.bucketName
});
// Display the destination bucket information in the console.
new cdk.CfnOutput(this, 'DestinationBucketName', {
description: 'The name of the destination bucket.',
value: destination.bucketName
});
} | /**
* Stack constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/cli/lib/templates/middleware/typescript/example/stack.ts#L46-L121 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | SimpleTextProcessorBuilder.build | public build(): SimpleTextProcessor {
return (new SimpleTextProcessor(
this.scope,
this.identifier, {
...this.props
}
));
} | /**
* @returns a new instance of the `SimpleTextProcessor`
* service constructed with the given parameters.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/cli/lib/templates/middleware/typescript/src/index.ts#L72-L79 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | SimpleTextProcessor.constructor | constructor(scope: Construct, id: string, private props: MiddlewareProps) {
super(scope, id, description, {
...props,
queueVisibilityTimeout: cdk.Duration.seconds(
3 * PROCESSING_TIMEOUT.toSeconds()
)
});
///////////////////////////////////////////
/////// Processing Function ///////
///////////////////////////////////////////
// The lambda function.
this.processor = new node.NodejsFunction(this, 'Compute', {
description: 'A function counting the number of words in a text document.',
entry: path.resolve(__dirname, 'lambdas', 'word-counter', 'index.js'),
vpc: this.props.vpc,
memorySize: this.props.maxMemorySize ?? DEFAULT_MEMORY_SIZE,
timeout: PROCESSING_TIMEOUT,
runtime: EXECUTION_RUNTIME,
architecture: lambda.Architecture.ARM_64,
tracing: lambda.Tracing.ACTIVE,
environmentEncryption: this.props.kmsKey,
logGroup: this.logGroup,
insightsVersion: this.props.cloudWatchInsights ?
LAMBDA_INSIGHTS_VERSION :
undefined,
environment: {
POWERTOOLS_SERVICE_NAME: description.name,
POWERTOOLS_METRICS_NAMESPACE: NAMESPACE,
SNS_TARGET_TOPIC: this.eventBus.topicArn,
LAKECHAIN_CACHE_STORAGE: this.props.cacheStorage.id()
},
bundling: {
minify: true,
externalModules: [
'@aws-sdk/client-s3',
'@aws-sdk/client-sns'
]
}
});
// Allows this construct to act as a `IGrantable`
// for other middlewares to grant the processing
// lambda permissions to access their resources.
this.grantPrincipal = this.processor.grantPrincipal;
// Plug the SQS queue into the lambda function.
this.processor.addEventSource(new sources.SqsEventSource(this.eventQueue, {
batchSize: props.batchSize ?? 5,
maxBatchingWindow: props.batchingWindow,
reportBatchItemFailures: true
}));
// Function permissions.
this.eventBus.grantPublish(this.processor);
super.bind();
} | /**
* Provider constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/cli/lib/templates/middleware/typescript/src/index.ts#L103-L161 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | SimpleTextProcessor.grantReadProcessedDocuments | grantReadProcessedDocuments(grantee: iam.IGrantable): iam.Grant {
// Since this middleware simply passes through the data
// from the previous middleware, we grant any subsequent
// middlewares in the pipeline read access to the
// data of all source middlewares.
for (const source of this.sources) {
source.grantReadProcessedDocuments(grantee);
}
return ({} as iam.Grant);
} | /**
* Allows a grantee to read from the processed documents
* generated by this middleware.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/cli/lib/templates/middleware/typescript/src/index.ts#L167-L176 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | SimpleTextProcessor.supportedInputTypes | supportedInputTypes(): string[] {
return ([
'text/plain'
]);
} | /**
* @returns an array of mime-types supported as input
* type by the data producer.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/cli/lib/templates/middleware/typescript/src/index.ts#L182-L186 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | SimpleTextProcessor.supportedOutputTypes | supportedOutputTypes(): string[] {
return ([
'text/plain'
]);
} | /**
* @returns an array of mime-types supported as output
* type by the data producer.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/cli/lib/templates/middleware/typescript/src/index.ts#L192-L196 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | SimpleTextProcessor.supportedComputeTypes | supportedComputeTypes(): ComputeType[] {
return ([
ComputeType.CPU
]);
} | /**
* @returns the supported compute types by a given
* middleware.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/cli/lib/templates/middleware/typescript/src/index.ts#L202-L206 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | SimpleTextProcessor.conditional | conditional() {
return (super
.conditional()
.and(when('type').equals('document-created'))
);
} | /**
* @returns the middleware conditional statement defining
* in which conditions this middleware should be executed.
* In this case, we want the middleware to only be invoked
* when the document mime-type is supported, and the event
* type is `document-created`.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/cli/lib/templates/middleware/typescript/src/index.ts#L215-L220 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Lambda.handler | async handler(event: SQSEvent, _: Context): Promise<SQSBatchResponse> {
return (await processPartialResponse(
event,
(record: SQSRecord) => this.processEvent(
CloudEvent.from(JSON.parse(record.body))
),
processor
));
} | // eslint-disable-next-line @typescript-eslint/no-unused-vars | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/cli/lib/templates/middleware/typescript/src/lambdas/word-counter/index.ts#L83-L91 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | EcsCluster.constructor | constructor(scope: Construct, id: string, private props: EcsClusterProps) {
super(scope, id);
// Validate the properties.
this.props = EcsClusterPropsSchema.parse(props);
///////////////////////////////////////////
///////// Security Group //////////
///////////////////////////////////////////
// The security group used to control access to the
// ECS cluster.
const securityGroup = new ec2.SecurityGroup(this, 'SecurityGroup', {
vpc: this.props.vpc
});
///////////////////////////////////////////
/////// Elastic File-system ///////
///////////////////////////////////////////
if (this.props.fileSystem) {
// The filesystem will contain the temporary files and
// act as a caching layer.
this.fileSystem = new efs.FileSystem(this, 'Filesystem', {
removalPolicy: cdk.RemovalPolicy.DESTROY,
vpc: this.props.vpc,
throughputMode: efs.ThroughputMode.ELASTIC,
encrypted: true,
kmsKey: this.props.kmsKey,
vpcSubnets: {
subnetType: ec2.SubnetType.PRIVATE_ISOLATED
}
});
// Allow services in the VPC to access the EFS.
this.fileSystem.connections.allowFrom(
securityGroup,
ec2.Port.tcp(2049),
'Provides access to the EFS from the container tasks.'
);
if (this.props.fileSystem.accessPoint) {
// Creating an access point to the EFS which allows specific
// POSIX users to access the filesystem.
this.accessPoint = new efs.AccessPoint(this, 'AccessPoint', {
fileSystem: this.fileSystem,
path: this.props.fileSystem?.containerPath ?? '/cache',
createAcl: {
ownerGid: `${this.props.fileSystem.accessPoint.gid}`,
ownerUid: `${this.props.fileSystem.accessPoint.uid}`,
permissions: `${this.props.fileSystem.accessPoint.permission}`
},
posixUser: {
gid: `${this.props.fileSystem.accessPoint.gid}`,
uid: `${this.props.fileSystem.accessPoint.uid}`
}
});
}
}
///////////////////////////////////////////
////////// ECS Cluster //////////
///////////////////////////////////////////
// The ECS cluster running processing jobs.
const cluster = new ecs.Cluster(this, 'Cluster', {
vpc: this.props.vpc,
containerInsights: this.props.containerInsights
});
// Creating a launch template that will define the characteristics
// of the EC2 instances to run within the cluster.
const launchTemplate = new ec2.LaunchTemplate(this, 'Template', {
...this.props.launchTemplateProps,
// Providing the EC2 instances with the ability to
// be managed by AWS Systems Manager.
role: new iam.Role(this, 'InstanceRole', {
assumedBy: new iam.ServicePrincipal('ec2.amazonaws.com'),
managedPolicies: [
iam.ManagedPolicy.fromManagedPolicyArn(this, 'SSMStandard',
'arn:aws:iam::aws:policy/AmazonSSMManagedInstanceCore'
)
]
}),
securityGroup,
instanceMetadataTags: true
});
// The task execution role.
const taskExecutionRole = new iam.Role(this, 'ExecutionRole', {
managedPolicies: [
iam.ManagedPolicy.fromManagedPolicyArn(this, 'ECSTaskExecutionRole',
'arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy'
)
],
assumedBy: new iam.CompositePrincipal(
new iam.ServicePrincipal('ecs.amazonaws.com'),
new iam.ServicePrincipal('ecs-tasks.amazonaws.com')
)
});
// The task definition for running processing containers.
this.taskDefinition = new ecs.TaskDefinition(this, 'TaskDefinition', {
networkMode: ecs.NetworkMode.HOST,
compatibility: ecs.Compatibility.EC2,
executionRole: taskExecutionRole
});
this.taskRole = this.taskDefinition.taskRole;
// Allow the tasks to consume messages from the queue
// and to publish events to the event bus.
this.props.eventQueue.grantConsumeMessages(this.taskRole);
this.props.eventBus.grantPublish(this.taskRole);
// Allow the tasks to write logs to X-Ray.
this.taskRole.addManagedPolicy(
iam.ManagedPolicy.fromAwsManagedPolicyName('AWSXRayDaemonWriteAccess')
);
// Creating the capacity provider to be used by the cluster.
// The capacity provider uses an autoscaling group allowing
// to spread containers across a cluster of instances.
const capacityProvider = new ecs.AsgCapacityProvider(this, 'CapacityProvider', {
autoScalingGroup: new autoscaling.AutoScalingGroup(this, 'Asg', {
vpc: this.props.vpc,
vpcSubnets: {
subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS
},
minCapacity: this.props.autoScaling.minInstanceCapacity,
maxCapacity: this.props.autoScaling.maxInstanceCapacity,
launchTemplate
})
});
cluster.addAsgCapacityProvider(capacityProvider);
///////////////////////////////////////////
/////// User-Provider Container ///////
///////////////////////////////////////////
this.container = this.taskDefinition.addContainer(this.props.containerProps.containerName, {
image: this.props.containerProps.image,
cpu: this.props.containerProps.cpuLimit,
memoryLimitMiB: this.props.containerProps.memoryLimitMiB,
gpuCount: this.props.containerProps.gpuCount,
logging: ecs.LogDrivers.awsLogs({
streamPrefix: this.props.containerProps.containerName,
logGroup: this.props.logGroup
}),
environment: {
INPUT_QUEUE_URL: this.props.eventQueue.queueUrl,
SNS_TARGET_TOPIC: this.props.eventBus.topicArn,
CACHE_DIR: this.props.fileSystem?.containerPath ?? '/cache',
AWS_DEFAULT_REGION: cdk.Stack.of(this).region,
...this.props.containerProps.environment
}
});
if (this.props.fileSystem) {
// Mounting the elastic filesystem as a volume
// for the container.
this.taskDefinition.addVolume({
name: 'efs',
efsVolumeConfiguration: {
fileSystemId: this.fileSystem.fileSystemId,
transitEncryption: 'ENABLED',
authorizationConfig: this.accessPoint && {
accessPointId: this.accessPoint.accessPointId,
iam: 'ENABLED'
}
}
});
// Reference the volume as a mount point.
this.container.addMountPoints({
containerPath: this.props.fileSystem?.containerPath ?? '/cache',
readOnly: this.props.fileSystem?.readonly ?? false,
sourceVolume: 'efs'
});
}
// If a CMK is specified, we grant additional permissions
// to the task role and the autoscaling group.
if (this.props.kmsKey) {
this.props.kmsKey.grantDecrypt(this.taskRole);
}
///////////////////////////////////////////
/////// X-Ray Sidecar Container ///////
///////////////////////////////////////////
if (this.props.xraySidecar) {
this.xraySidecar = this.taskDefinition.addContainer('xray', {
image: ecs.ContainerImage.fromRegistry('amazon/aws-xray-daemon'),
memoryLimitMiB: 256,
cpu: 32,
portMappings: [{
hostPort: 2000,
containerPort: 2000,
protocol: ecs.Protocol.UDP
}],
logging: ecs.LogDrivers.awsLogs({
streamPrefix: 'xray',
logGroup: this.props.logGroup
}),
environment: {
AWS_REGION: cdk.Stack.of(this).region
}
});
// Reference the X-Ray daemon address as an environment variable
// to the user-provider container.
this.container.addEnvironment('AWS_XRAY_DAEMON_ADDRESS', '127.0.0.1:2000');
}
///////////////////////////////////////////
//////// ECS Tasks Autoscaler //////
///////////////////////////////////////////
// The goal of the autoscaler lambda function is to scale the number of
// tasks in the cluster based on the number of messages
// visible on the SQS queue.
//
// A deliberate choice was made to use a Lambda function
// to perform the scaling instead of using an ECS service
// and a CloudWatch alarm in order to reduce the latency
// in scaling the tasks, while providing more flexibility
// in deciding how much tasks to schedule.
//
// For example, the autoscaler takes into account the number
// of tasks currently running in the cluster and the number
// of tasks that are pending to be run, in addition to the
// number of messages visible on the SQS queue.
//
// We also did not want any task to be killed while processing
// messages by the ECS service auto-scaling service, and each
// task consumes messages from the queue indefinitely as long
// as there are visible messages in the queue.
this.autoScaler = new node.NodejsFunction(this, 'AutoScaler', {
description: 'Manages auto-scaling of tasks in the ECS cluster.',
entry: path.resolve(__dirname, 'lambdas', 'ecs-task-autoscaler', 'index.js'),
runtime: lambda.Runtime.NODEJS_18_X,
tracing: lambda.Tracing.ACTIVE,
environmentEncryption: this.props.kmsKey,
logGroup: this.props.logGroup,
environment: {
POWERTOOLS_SERVICE_NAME: this.props.containerProps.containerName,
POWERTOOLS_METRICS_NAMESPACE: NAMESPACE,
CONTAINER_SQS_QUEUE_URL: this.props.eventQueue.queueUrl,
ECS_CLUSTER_ARN: cluster.clusterArn,
ECS_TASK_DEFINITION_ARN: this.taskDefinition.taskDefinitionArn,
CAPACITY_PROVIDER_NAME: capacityProvider.capacityProviderName,
MAX_TASK_NUMBER: `${this.props.autoScaling.maxTaskCapacity}`,
MESSAGES_PER_TASK: `${this.props.autoScaling.maxMessagesPerTask}`
},
bundling: {
minify: true,
externalModules: [
'@aws-sdk/client-sqs',
'@aws-sdk/client-ecs'
]
}
});
// Allow the autoscaler function to start new ECS tasks.
this.taskDefinition.grantRun(this.autoScaler);
// Allow the autoscaler to read properties from the queue.
this.props.eventQueue.grant(this.autoScaler, 'sqs:GetQueueAttributes');
// Allow the autoscaler to retrieve information about tasks
// running in the cluster. That is, knowing whether a given
// task is currently running or pending.
this.autoScaler.addToRolePolicy(new iam.PolicyStatement({
effect: iam.Effect.ALLOW,
actions: [
'ecs:ListTasks',
'ecs:DescribeTasks'
],
resources: ['*'],
conditions: {
ArnEquals: {
'ecs:cluster': cluster.clusterArn
}
}
}));
// CloudWatch Event rule to trigger the autoscaler.
new events.Rule(this, 'PeriodicRule', {
schedule: events.Schedule.rate(cdk.Duration.minutes(1)),
targets: [new eventTargets.LambdaFunction(this.autoScaler)]
});
} | /**
* Constructs a new instance of the `EcsCluster` class.
* @param scope the construct scope.
* @param id the construct identifier.
* @param props the construct properties.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/ecs-cluster/src/index.ts#L96-L388 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Lambda.runTasks | runTasks(taskNumber: number): Promise<any> {
// Create an ECS task to process the document.
return (ecs.send(new RunTaskCommand({
cluster: ECS_CLUSTER_ARN,
taskDefinition: ECS_TASK_DEFINITION_ARN,
capacityProviderStrategy: [{
capacityProvider: CAPACITY_PROVIDER_NAME,
base: 0,
weight: 1
}],
count: taskNumber
})));
} | /**
* Runs the specified number of tasks on the ECS cluster.
* @param taskNumber the number of tasks to run.
* @returns a promise that resolves when all the tasks
* have been started.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/ecs-cluster/src/lambdas/ecs-task-autoscaler/index.ts#L92-L104 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Lambda.calculateTasksToRun | calculateTasksToRun(
sqsVisibleMessages: number,
currentTasksRunning: number,
currentTasksPending: number
) {
const currentTasksScheduled = currentTasksRunning + currentTasksPending;
// If there are no visible messages on the SQS queue,
// it means that there either are no documents to process,
// or the pressure on the consumers is low.
if (sqsVisibleMessages === 0) {
return (0);
}
// If the number of scheduled tasks is equal or higher
// than the maximum expected number of tasks, there is
// no need to run new tasks.
if (currentTasksScheduled >= MAX_TASK_NUMBER) {
return (0);
}
// The compute potential of tasks which are currently
// pending execution. It is important to take into account
// the compute potential of pending tasks in order to avoid
// starting too many tasks at once.
const pendingTasksPotential = currentTasksPending * MESSAGES_PER_TASK;
// If there is no compute potential in the pending tasks,
// we need to start new tasks proportionally to the number
// of visible messages on the SQS queue.
if (pendingTasksPotential === 0) {
return (Math.min(
Math.ceil(sqsVisibleMessages / MESSAGES_PER_TASK),
MAX_TASK_NUMBER - currentTasksScheduled
));
}
// If there is compute potential in the pending tasks,
// we take into account the remaining potential in the
// calculation of the number of tasks to run.
return (Math.min(
Math.floor(sqsVisibleMessages / pendingTasksPotential),
MAX_TASK_NUMBER - currentTasksScheduled
));
} | /**
* A rudimentary algorithm to calculate the number of tasks
* to start based on the number of visible messages on the
* target SQS queue and the number of running tasks in the
* target ECS cluster.
*
* The main objective of this algorithm is to be able to scale
* tasks much faster than we would using an alarm and an ECS
* service.
* The algorithm is based on the number of messages that each
* task can process simultaneously, while also avoiding to start too
* many tasks at once.
*
* @param sqsVisibleMessages the number of visible messages
* on the target SQS queue.
* @param currentTasksRunning the number of running tasks
* in the target ECS cluster.
* @param currentTasksPending the number of pending tasks
* in the target ECS cluster.
* @returns the number of tasks to start.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/ecs-cluster/src/lambdas/ecs-task-autoscaler/index.ts#L127-L171 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Lambda.getTasksDetails | async getTasksDetails(taskArns: string[]): Promise<TaskDetails> {
const tasks = (await ecs.send(new DescribeTasksCommand({
cluster: ECS_CLUSTER_ARN,
tasks: taskArns
}))).tasks ?? [];
// We group the tasks by status, categorizing them into
// pending and running tasks.
return (tasks.reduce((acc: TaskDetails, task: Task) => {
if (runningStates.includes(task.lastStatus ?? '')) {
acc.running.push(task);
} else if (pendingStates.includes(task.lastStatus ?? '')) {
acc.pending.push(task);
}
return (acc);
}, { running: [], pending: [] }));
} | /**
* @param taskArns the list of task ARNs to retrieve details for.
* @returns a promise that resolves an array of both running and
* pending tasks.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/ecs-cluster/src/lambdas/ecs-task-autoscaler/index.ts#L178-L194 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Lambda.handler | async handler(event: any, context: Context) {
// Detailed statistics about tasks.
let tasksRunning = 0;
let tasksPending = 0;
// Retrieve the number of messages visible on the target
// SQS queue and the number of running tasks in the target
// ECS cluster.
const results = await Promise.all([
sqs.send(new GetQueueAttributesCommand({
QueueUrl: CONTAINER_SQS_QUEUE_URL,
AttributeNames: ['ApproximateNumberOfMessages']
})),
ecs.send(new ListTasksCommand({
cluster: ECS_CLUSTER_ARN,
desiredStatus: 'RUNNING'
}))
]);
// Extract the number of visible messages on the SQS queue.
const sqsVisibleMessages = parseInt(
results[0].Attributes?.ApproximateNumberOfMessages as string
);
// The tasks scheduled on the cluster.
const tasksScheduled = results[1].taskArns ?? [];
// If there are currently scheduled tasks on the
// cluster, we retrieve their details to be able to
// distinguish between running and pending tasks.
if (tasksScheduled.length > 0) {
const tasksDetails = await this.getTasksDetails(tasksScheduled);
tasksRunning = tasksDetails.running.length;
tasksPending = tasksDetails.pending.length;
}
// Calculate the number of tasks to run.
const tasksToRun = this.calculateTasksToRun(
sqsVisibleMessages,
tasksRunning,
tasksPending
);
logger.info(
`Tasks to run: ${tasksToRun}, Tasks running: ${tasksRunning}, Tasks pending: ${tasksPending}, visible messages: ${sqsVisibleMessages}`
);
return (tasksToRun ? await this.runTasks(tasksToRun) : Promise.resolve());
} | // eslint-disable-next-line @typescript-eslint/no-unused-vars | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/ecs-cluster/src/lambdas/ecs-task-autoscaler/index.ts#L204-L252 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Collection.fromCollectionAttributes | public static fromCollectionAttributes(scope: Construct, id: string, attrs: CollectionAttributes): ICollection {
return new class extends CollectionBase {
public readonly collectionName = attrs.collectionName;
public readonly collectionArn = attrs.collectionArn;
public readonly collectionId = attrs.collectionId;
public readonly collectionEndpoint = attrs.collectionEndpoint;
public readonly dashboardEndpoint = attrs.dashboardEndpoint;
constructor() { super(scope, id); }
};
} | /**
* Creates a collection construct that represents an external collection.
* @param scope The parent creating construct (usually `this`).
* @param id The construct's name.
* @param attrs A `CollectionAttributes` object.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-collection/src/index.ts#L160-L170 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Collection.constructor | constructor(scope: Construct, id: string, props: CollectionProps) {
super(scope, id);
this.collectionName = props.name;
// The security group associated with the domain.
const securityGroup = new ec2.SecurityGroup(this, 'SecurityGroup', {
vpc: props.vpc
});
// Allow VPC instances to communicate with the domain.
securityGroup.addIngressRule(
ec2.Peer.ipv4(props.vpc.vpcCidrBlock),
ec2.Port.tcp(443)
);
// Create the VPC endpoint.
const vpcEndpoint = new opensearchserverless.CfnVpcEndpoint(this, 'VpcEndpoint', {
name: formatName(`vpc-endpoint-${props.name}`),
subnetIds: props.vpc.privateSubnets.map((subnet) => subnet.subnetId),
vpcId: props.vpc.vpcId,
securityGroupIds: [securityGroup.securityGroupId]
});
// The network security policy.
const networkSecurityPolicy = new opensearchserverless.CfnSecurityPolicy(this, 'NetworkSecurityPolicy', {
name: formatName(`network-security-policy-${props.name}`),
type: 'network',
description: 'The collection network security policy.',
policy: JSON.stringify([{
Rules: [{
ResourceType: 'collection',
Resource: [`collection/${this.collectionName}`],
}],
AllowFromPublic: false,
SourceVPCEs: [vpcEndpoint.attrId]
}])
});
networkSecurityPolicy.addDependency(vpcEndpoint);
// The encryption security policy.
const encryptionSecurityPolicy = new opensearchserverless.CfnSecurityPolicy(this, 'EncryptionSecurityPolicy', {
name: formatName(`encryption-security-policy-${props.name}`),
type: 'encryption',
description: 'The collection security encryption policy.',
policy: JSON.stringify({
Rules: [{
ResourceType: 'collection',
Resource: [`collection/${this.collectionName}`]
}],
AWSOwnedKey: true
})
});
encryptionSecurityPolicy.addDependency(networkSecurityPolicy);
// Create the OpenSearch collection.
this.cfnCollection = new opensearchserverless.CfnCollection(this, 'Resource', {
name: props.name,
description: props.description,
type: props.type
});
this.cfnCollection.addDependency(networkSecurityPolicy);
this.cfnCollection.addDependency(encryptionSecurityPolicy);
this.collectionArn = this.cfnCollection.attrArn;
this.collectionId = this.cfnCollection.attrId;
this.collectionEndpoint = this.cfnCollection.attrCollectionEndpoint;
this.dashboardEndpoint = this.cfnCollection.attrDashboardEndpoint;
} | /**
* OpenSearch collection constructor.
* @param scope the scope of the construct
* @param id the id of the construct
* @param props the collection properties
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-collection/src/index.ts#L178-L249 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Collection.addAccessPolicy | public addAccessPolicy(
name: string,
principal: (string | undefined)[],
permissions: string[]
) {
new opensearchserverless.CfnAccessPolicy(this, `AccessPolicy-${name}`, {
name: formatName(`access-policy-${name}`),
type: 'data',
policy: JSON.stringify([{
Rules: [{
ResourceType: 'index',
Resource: [`index/${this.collectionName}/*`],
Permission: permissions
}],
Principal: principal
}])
});
} | /**
* Creates a new access policy for the collection.
* @param name the name of the access policy.
* @param principal the principal to grant access to.
* @param permissions the permissions to grant.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-collection/src/index.ts#L257-L274 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | OpenSearchDomain.constructor | constructor(scope: Construct, id: string, props: OpenSearchDomainProps) {
super(scope, id);
// The security group associated with the domain.
this.securityGroup = new ec2.SecurityGroup(this, 'SecurityGroup', {
vpc: props.vpc,
allowAllOutbound: true
});
// Allow VPC instances to communicate with the domain.
this.securityGroup.addIngressRule(
ec2.Peer.ipv4(props.vpc.vpcCidrBlock),
ec2.Port.tcp(443)
);
// Create a user pool that will be integrated with the domain
// for authentication purposes.
this.userPool = props.existingUserPool ?? new cognito.UserPool(this, 'Userpool', {
signInAliases: { username: true, email: false },
removalPolicy: cdk.RemovalPolicy.DESTROY,
passwordPolicy: {
minLength: 8,
requireDigits: true,
requireUppercase: true,
requireLowercase: true,
requireSymbols: true
},
selfSignUpEnabled: false
});
// Set a domain name on the user pool to allow users to sign in
// to the OpenSearch dashboard.
this.userPool.addDomain('UserPoolDomain', {
cognitoDomain: {
domainPrefix: cdk.Fn.join('-', [
cdk.Fn.select(0,
cdk.Fn.split('-', cdk.Fn.select(2, cdk.Fn.split('/', cdk.Stack.of(this).stackId)))
)
])
}
});
// The identity pool associated with the domain.
const identityPool = new cognito.CfnIdentityPool(this, 'IdentityPool', {
allowUnauthenticatedIdentities: false
});
// Create an authenticated role for the identity pool.
const authenticatedRole = new iam.Role(this, 'AuthenticatedRole', {
assumedBy: new iam.FederatedPrincipal('cognito-identity.amazonaws.com', {
StringEquals: {
'cognito-identity.amazonaws.com:aud': identityPool.ref,
},
'ForAnyValue:StringLike': {
'cognito-identity.amazonaws.com:amr': 'authenticated',
},
},
'sts:AssumeRoleWithWebIdentity'
)
});
// Attach the authenticated role to the identity pool.
new cognito.CfnIdentityPoolRoleAttachment(this, 'identityPoolRoleAttachment', {
identityPoolId: identityPool.ref,
roles: { authenticated: authenticatedRole.roleArn },
});
// Create the OpenSearch domain.
this.domain = new opensearch.Domain(this, 'Domain', {
version: opensearch.EngineVersion.OPENSEARCH_2_11,
enforceHttps: true,
nodeToNodeEncryption: true,
encryptionAtRest: { enabled: true },
removalPolicy: cdk.RemovalPolicy.DESTROY,
vpc: props.vpc,
securityGroups: [this.securityGroup],
tlsSecurityPolicy: opensearch.TLSSecurityPolicy.TLS_1_2,
cognitoDashboardsAuth: {
identityPoolId: identityPool.ref,
userPoolId: this.userPool.userPoolId,
role: new iam.Role(this, 'CognitoAccessForAmazonOpenSearch', {
assumedBy: new iam.ServicePrincipal('opensearchservice.amazonaws.com'),
managedPolicies: [{
managedPolicyArn: 'arn:aws:iam::aws:policy/AmazonOpenSearchServiceCognitoAccess'
}]
})
},
...props.opts
});
// Allow IAM users from the current account to interact with the domain.
this.domain.addAccessPolicies(
new iam.PolicyStatement({
effect: iam.Effect.ALLOW,
principals: [new iam.AccountPrincipal(cdk.Aws.ACCOUNT_ID)],
actions: [
'es:ESHttpGet',
'es:ESHttpPost'
],
resources: [
`${this.domain.domainArn}/*`
]
})
);
// Allow users to sign-in using the user pool.
this.domain.addAccessPolicies(
new iam.PolicyStatement({
effect: iam.Effect.ALLOW,
principals: [new iam.ArnPrincipal(authenticatedRole.roleArn)],
actions: [
'es:*'
],
resources: [
`${this.domain.domainArn}/*`
]
})
);
} | /**
* OpenSearch domain constructor.
* @param scope the scope of the construct
* @param id the id of the construct
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-domain/src/index.ts#L74-L192 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | OpenSearchIndex.constructor | constructor(scope: Construct, id: string, props: OpenSearchIndexProps) {
super(scope, id);
// Set the index name.
this.indexName = props.indexName;
// The OpenSearch endpoint.
const endpoint = this.getEndpoint(props.endpoint);
// The identifier of the OpenSearch service.
const serviceIdentifier = this.getServiceIdentifier(props.endpoint);
// The path to the lambda function directory.
const processorPath = path.resolve(__dirname, 'lambdas', 'index-manager');
// Create the lambda function that will create the index.
const processor = new node.NodejsFunction(this, 'Compute', {
description: 'A custom resource managing the state of an OpenSearch index.',
entry: path.resolve(processorPath, 'index.js'),
vpc: props.vpc,
timeout: cdk.Duration.seconds(10),
runtime: lambda.Runtime.NODEJS_18_X,
architecture: lambda.Architecture.ARM_64,
logGroup: props.logGroup,
environment: {
OPENSEARCH_ENDPOINT: endpoint,
OPENSEARCH_INDEX_NAME: props.indexName,
BODY_PARAMETERS: JSON.stringify(props.body),
SERVICE_IDENTIFIER: serviceIdentifier
},
bundling: {
minify: true,
externalModules: [
'@aws-sdk/credential-provider-node'
]
}
});
if (serviceIdentifier === 'es') {
// Allow the lambda function to manage the index.
(props.endpoint as opensearch.Domain).grantWrite(processor);
} else if (serviceIdentifier === 'aoss') {
const endpoint = props.endpoint as oss.Collection;
// If the endpoint is a collection, we also need to create an
// access policy on the collection to allow the lambda function
// to manage the index.
// Add a new access policy.
endpoint.addAccessPolicy(
this.indexName,
[processor.role!.roleArn],
['aoss:CreateIndex', 'aoss:DeleteIndex', 'aoss:UpdateIndex']
);
// We also need to grant the lambda function permissions
// to write to the OpenSearch index.
processor.addToRolePolicy(new iam.PolicyStatement({
effect: iam.Effect.ALLOW,
actions: ['aoss:APIAccessAll'],
resources: [endpoint.collectionArn]
}));
}
// Create a custom resource that will manage the index.
const resource = new cdk.CustomResource(this, 'Resource', {
serviceToken: new custom.Provider(this, 'Provider', {
onEventHandler: processor,
logGroup: props.logGroup
}).serviceToken,
resourceType: 'Custom::OpenSearchIndex',
properties: {
IndexName: props.indexName,
Body: props.body,
Endpoint: endpoint
}
});
resource.node.addDependency(props.endpoint);
} | /**
* OpenSearch Index constructor
* @param scope the scope of the construct
* @param id the id of the construct
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-index/src/index.ts#L86-L164 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | OpenSearchIndex.getEndpoint | private getEndpoint(endpoint: opensearch.IDomain | oss.ICollection): string {
const serviceIdentifier = this.getServiceIdentifier(endpoint);
if (serviceIdentifier === 'es') {
return (`https://${(endpoint as opensearch.Domain).domainEndpoint}`);
} else if (serviceIdentifier === 'aoss') {
return ((endpoint as oss.Collection).collectionEndpoint);
} else {
throw new Error('Invalid endpoint.');
}
} | /**
* Get the URL of the OpenSearch endpoint.
* @param endpoint the OpenSearch endpoint.
* @returns the endpoint URL.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-index/src/index.ts#L171-L181 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | OpenSearchIndex.getServiceIdentifier | private getServiceIdentifier(endpoint: opensearch.IDomain | oss.ICollection): ServiceIdentifier {
const e = endpoint as any;
if (e.domainArn
&& e.domainName
&& e.domainId
&& e.domainEndpoint) {
return ('es');
} else if (e.collectionName
&& e.collectionArn
&& e.collectionId
&& e.collectionEndpoint) {
return ('aoss');
} else {
throw new Error('Invalid endpoint.');
}
} | /**
* Get the service identifier of the OpenSearch endpoint.
* @param endpoint the OpenSearch endpoint.
* @returns the service identifier.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-index/src/index.ts#L188-L204 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | onCreate | const onCreate = async () => {
try {
await client.indices.create({
index: INDEX_NAME,
body: BODY_PARAMETERS
});
} catch (error: any) {
// We don't catch index already exists errors.
if (error.meta?.body?.error?.type !== 'resource_already_exists_exception') {
throw error;
}
}
return {
PhysicalResourceId: INDEX_NAME
};
}; | /**
* Creates the index associated with the given event.
* @returns the response to the custom resource event
* @throws if an error occurs
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-index/src/lambdas/index-manager/index.ts#L62-L78 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | onDelete | const onDelete = async () => {
try {
await client.indices.delete({ index: INDEX_NAME });
} catch (error: any) {
// The index might not exist anymore.
if (error.statusCode !== 404) {
throw error;
}
}
return {
PhysicalResourceId: INDEX_NAME
};
}; | /**
* Deletes the index associated with the given event.
* @returns the response to the custom resource event
* @throws if an error occurs
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-index/src/lambdas/index-manager/index.ts#L85-L98 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | OpenSearchSavedObject.constructor | constructor(scope: Construct, id: string, props: OpenSearchSavedObjectProps) {
super(scope, id);
// The domain endpoint.
const domain = `https://${props.domain.domainEndpoint}`;
// The path to the lambda function directory.
const processorPath = path.resolve(__dirname, 'lambdas', 'object-manager');
// The lambda function that will create the index.
const processor = new node.NodejsFunction(this, 'Compute', {
description: 'A custom resource allowing to restore saved objects on an OpenSearch domain.',
entry: path.resolve(processorPath, 'index.js'),
vpc: props.vpc,
runtime: lambda.Runtime.NODEJS_18_X,
architecture: lambda.Architecture.ARM_64,
logGroup: props.logGroup,
environment: {
OPENSEARCH_DOMAIN: domain
},
bundling: {
minify: true,
externalModules: [
'@aws-sdk/credential-provider-node'
]
}
});
// Allow the lambda function to manage the index.
props.domain.grantWrite(processor);
// Create a custom resource that will upload the saved object.
const resource = new cdk.CustomResource(this, 'Resource', {
serviceToken: new custom.Provider(this, 'Provider', {
onEventHandler: processor,
logGroup: props.logGroup
}).serviceToken,
resourceType: 'Custom::OpenSearchSavedObject',
properties: {
SavedObject: props.savedObject
}
});
resource.node.addDependency(props.domain);
} | /**
* OpenSearch Index constructor
* @param scope the scope of the construct
* @param id the id of the construct
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-saved-object/src/index.ts#L76-L120 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | asFile | const asFile = (obj: any) => {
const boundary = `----WebKitFormBoundary${Date.now().toString(16)}`;
let data = `--${boundary}\r\n`;
data += 'Content-Disposition: form-data; name="file"; filename="a.ndjson"\r\n';
data += 'Content-Type: application/ndjson\r\n\r\n';
data += `${obj}\r\n`;
data += `--${boundary}--`;
return ({ file: data, boundary });
}; | /**
* Creates a multipart/form-data file from the given object.
* @param obj the object to wrap as a multipart/form-data file
* @returns a string representing the multipart/form-data file
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-saved-object/src/lambdas/object-manager/index.ts#L41-L51 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | onCreate | const onCreate = async (event: any) => {
const { SavedObject } = event.ResourceProperties;
const { file, boundary } = asFile(SavedObject.data);
// Upload the saved object.
await client.transport.request({
method: 'POST',
path: '_dashboards/api/saved_objects/_import?overwrite=true',
body: file
}, {
headers: {
'Content-Type': `multipart/form-data; boundary=${boundary}`,
'osd-xsrf': 'true'
}
});
return {
PhysicalResourceId: SavedObject.name
};
}; | /**
* Creates the index associated with the given event.
* @param event the CloudFormation custom resource event
* @returns the response to the custom resource event
* @throws if an error occurs
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/opensearch-saved-object/src/lambdas/object-manager/index.ts#L59-L78 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | onCreate | const onCreate = async (event: any) => {
const { ServiceLinkedRole } = event.ResourceProperties;
try {
// Checking if the service linked role exists.
await iam.send(new GetRoleCommand({
RoleName: SERVICE_LINKED_ROLE_NAME
}));
} catch (err: any) {
if (err.name === 'NoSuchEntityException') {
await iam.send(new CreateServiceLinkedRoleCommand({
AWSServiceName: SERVICE_DOMAIN,
Description: ROLE_DESCRIPTION
}));
} else {
throw err;
}
}
return {
PhysicalResourceId: ServiceLinkedRole
};
}; | /**
* Creates the service linked role associated with the given service.
* @param event the CloudFormation custom resource event
* @returns the response to the custom resource event
* @throws if an error occurs
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/constructs/service-linked-role/src/lambdas/handler/index.ts#L52-L74 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | CacheStorage.constructor | constructor(scope: Construct, id: string, props: CacheStorageProps = {}) {
super(scope, id);
// By default, we destroy the cache storage when the stack is destroyed.
if (!props.removalPolicy) {
props.removalPolicy = cdk.RemovalPolicy.DESTROY;
}
// The cache storage.
this.storage = new s3.Bucket(this, 'Storage', {
encryption: props.encryptionKey ?
s3.BucketEncryption.KMS :
s3.BucketEncryption.S3_MANAGED,
encryptionKey: props.encryptionKey,
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
autoDeleteObjects: props.removalPolicy === cdk.RemovalPolicy.DESTROY,
enforceSSL: true,
removalPolicy: props.removalPolicy,
lifecycleRules: [{
expiration: cdk.Duration.days(1),
abortIncompleteMultipartUploadAfter: cdk.Duration.days(1)
}]
});
} | /**
* `CacheStorage` constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/cache-storage.ts#L54-L77 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | CacheStorage.getBucket | public getBucket(): s3.IBucket {
return (this.storage);
} | /**
* @returns the bucket associated with the cache storage.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/cache-storage.ts#L82-L84 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | CacheStorage.id | public id(): string {
return (this.storage.bucketName);
} | /**
* @returns the identifier of the cache storage.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/cache-storage.ts#L89-L91 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | CacheStorage.kmsKey | public kmsKey(): kms.IKey | undefined {
return (this.storage.encryptionKey);
} | /**
* @returns the KMS key used to encrypt the cache storage.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/cache-storage.ts#L96-L98 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | CacheStorage.grantRead | public grantRead(identity: iam.IGrantable, objectsKeyPattern?: any): iam.Grant {
return (this.storage.grantRead(identity, objectsKeyPattern));
} | /**
* Grant read permissions for the cache storage and it's contents to an IAM
* principal (Role/Group/User).
*
* If encryption is used, permission to use the key to decrypt the contents
* of the cache storage will also be granted to the same principal.
*
* @param identity The principal
* @param objectsKeyPattern Restrict the permission to a certain key pattern (default '*'). Parameter type is `any` but `string` should be passed in.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/cache-storage.ts#L110-L112 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | CacheStorage.grantWrite | public grantWrite(identity: iam.IGrantable, objectsKeyPattern?: any): iam.Grant {
return (this.storage.grantWrite(identity, objectsKeyPattern));
} | /**
* Grant write permissions to the cache storage to an IAM principal.
*
* If encryption is used, permission to use the key to encrypt the contents
* of written files will also be granted to the same principal.
*
* @param identity The principal
* @param objectsKeyPattern Restrict the permission to a certain key pattern (default '*'). Parameter type is `any` but `string` should be passed in.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/cache-storage.ts#L123-L125 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | CacheStorage.grantReadWrite | public grantReadWrite(identity: iam.IGrantable, objectsKeyPattern?: any): iam.Grant {
return (this.storage.grantReadWrite(identity, objectsKeyPattern));
} | /**
* Grant read/write permissions to the cache storage to an IAM principal.
*
* If encryption is used, permission to use the key to encrypt/decrypt the contents
* of the cache storage will also be granted to the same principal.
*
* @param identity The principal
* @param objectsKeyPattern Restrict the permission to a certain key pattern (default '*'). Parameter type is `any` but `string` should be passed in.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/cache-storage.ts#L136-L138 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | CacheStorage.grantDelete | public grantDelete(identity: iam.IGrantable, objectsKeyPattern?: any): iam.Grant {
return (this.storage.grantDelete(identity, objectsKeyPattern));
} | /**
* Grant delete permissions to the cache storage to an IAM principal.
*
* @param identity The principal
* @param objectsKeyPattern Restrict the permission to a certain key pattern (default '*'). Parameter type is `any` but `string` should be passed in.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/cache-storage.ts#L146-L148 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | CacheStorage.grantPut | public grantPut(identity: iam.IGrantable, objectsKeyPattern?: any): iam.Grant {
return (this.storage.grantPut(identity, objectsKeyPattern));
} | /**
* Grants s3:PutObject* and s3:Abort* permissions for the bucket associated with
* the cache storage to an IAM principal.
*
* If encryption is used, permission to use the key to encrypt the contents
* of written files will also be granted to the same principal.
*
* @param identity The principal
* @param objectsKeyPattern Restrict the permission to a certain key pattern (default '*'). Parameter type is `any` but `string` should be passed in.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/cache-storage.ts#L160-L162 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Service.constructor | constructor(scope: Construct, id: string, protected description: ServiceDescription) {
super(scope, id);
// Verifying whether the version is valid.
if (!valid(description.version)) {
throw new Error(`Semver value '${description.version}' is invalid`);
}
// Exporting the service attributes.
for (const [key, value] of Object.entries(description.attrs ?? {})) {
this.addProperty(key, value);
}
// Exporting the description.
this.addProperty('description', description.description);
if (!process.env.DISABLE_TRACKING_CODE) {
// Apply the tracking code to the current stack.
this.applyTrackingCode(description, TRACKING_CODE);
}
// Adding service tags.
cdk.Tags.of(this).add('Context', 'project-lakechain');
cdk.Tags.of(this).add('Service', description.name);
cdk.Tags.of(this).add('Version', description.version);
} | /**
* Construct constructor.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/service.ts#L79-L104 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Service.serviceDescription | public serviceDescription(): ServiceDescription {
return (this.description);
} | /**
* @returns the service description.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/service.ts#L109-L111 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Service.addProperty | public addProperty(key: string, value: string) {
return (new ssm.StringParameter(this, key, {
description: `Attribute '${key}' for service '${this.description.name}'.`,
parameterName: `/services/${this.node.addr}/${this.description.name}/${this.description.version}/${key}`,
stringValue: value
}));
} | /**
* Registers a new SSM parameter associated with the
* given key and value.
* @returns the newly created SSM parameter.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/service.ts#L118-L124 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Service.applyTrackingCode | private applyTrackingCode(serviceDescription: ServiceDescription, code: string) {
const stack = cdk.Stack.of(this);
const description = stack.templateOptions.description ?? '';
const fullDescription = TRACKING_REGEXP.exec(description);
const tag = serviceDescription.name.split(TRACKING_TAG_SEPARATOR).join('_');
if (fullDescription == null) {
stack.templateOptions.description = `${description} (${code}) (version:${serviceDescription.version}) (tag:${tag})`;
} else {
const description = fullDescription[1];
const existingTags = fullDescription[5];
let newTags;
if (existingTags) {
const tags = existingTags.split(TRACKING_TAG_SEPARATOR);
if (tags.includes(tag)) {
newTags = existingTags;
} else {
newTags = existingTags + TRACKING_TAG_SEPARATOR + tag;
}
} else {
newTags = tag;
}
stack.templateOptions.description = `${description} (${code}) (version:${serviceDescription.version}) (tag:${newTags})`;
}
} | /**
* Adds the given tracking code to the stack description.
* @param serviceDescription the service description.
* @param code the tracking code to apply.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/service.ts#L131-L157 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.constructor | constructor(subject: string) {
this.negated = false;
this.subject = subject;
this.scope = this.createScope(subject);
} | /**
* Condition constructor.
* @param subject the element in the message metadata
* to apply the conditional statement on.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L58-L62 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.createScope | private createScope(subject: string) {
const paths = subject.split('.');
const value = {};
// Transform the key object path into a scope hierarchy and set
// the subject to the last key.
paths.reduce((scope: any, key) => {
scope[key] = {};
return (scope[key]);
}, value);
return (value);
} | /**
* Initializes the scope of the conditional statement.
* The scope represents the hierarchy of objects used to
* address a given attribute.
* For example, in SNS filtering, the subject `foo.bar` will
* result in the following scope :
*
* {
* "foo": {
* "bar": {}
* }
* }
* @param subject the element in the message metadata to
* apply the conditional statement on.
* @returns the created scope.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L80-L92 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.equals | public equals(value: string | number) {
if (typeof value === 'string') {
return (this.includes(value));
} else {
if (this.negated) {
throw new Error(
'Strict equal numeric value conditionals with the `not` operator are not supported by SNS.'
);
} else {
set(this.scope, this.subject, [{"numeric": ["=", value]}]);
}
}
return (new ConditionalStatement(this.scope, this.subject));
} | /**
* The `equals` operator allows to express strict equality
* conditionals on a string or numeric value.
* @param value the value to check the subject against.
* @returns a new conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L100-L113 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.lt | public lt(value: number) {
if (this.negated) {
set(this.scope, this.subject, [{"numeric": [">=", value]}]);
} else {
set(this.scope, this.subject, [{"numeric": ["<", value]}]);
}
return (new ConditionalStatement(this.scope, this.subject));
} | /**
* The `lt` operator allows to express a less than
* conditional on a numeric value.
* @param value the value to check the subject against.
* @returns a new conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L121-L128 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.gt | public gt(value: number) {
if (this.negated) {
set(this.scope, this.subject, [{"numeric": ["<=", value]}]);
} else {
set(this.scope, this.subject, [{"numeric": [">", value]}]);
}
return (new ConditionalStatement(this.scope, this.subject));
} | /**
* The `gt` operator allows to express a greater than
* conditional on a numeric value.
* @param value the value to check the subject against.
* @returns a new conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L136-L143 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.lte | public lte(value: number) {
return (this.not().gt(value));
} | /**
* The `lte` operator allows to express a less than or
* equal conditional on a numeric value.
* @param value the value to check the subject against.
* @returns a new conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L151-L153 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.gte | public gte(value: number) {
return (this.not().lt(value));
} | /**
* The `gte` operator allows to express a greater than or
* equal conditional on a numeric value.
* @param value the value to check the subject against.
* @returns a new conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L161-L163 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.between | public between(operand_1: number, operand_2: number) {
const gteOp: RangeOperator = '>=';
const lteOp: RangeOperator = '<=';
if (this.negated) {
throw new Error('Range comparisons do not support the `not` operator.');
}
set(this.scope, this.subject, [{"numeric": [gteOp, operand_1, lteOp, operand_2]}]);
return (new ConditionalStatement(this.scope, this.subject));
} | /**
* The `between` operator allows to express a range
* conditional on a numeric value.
* @param value the value to check the subject against.
* @returns a new conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L171-L180 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.includes | public includes(...values: string[] | number[]) {
if (typeof values[0] === 'string') {
if (this.negated) {
set(this.scope, this.subject, [{"anything-but": [...values]}]);
} else {
set(this.scope, this.subject, [...values]);
}
} else {
if (!this.negated) {
throw new Error('Cannot use numeric values with `includes`.');
}
set(this.scope, this.subject, [{"anything-but": [...values]}]);
}
return (new ConditionalStatement(this.scope, this.subject));
} | /**
* The `includes` operator allows to express a conditional
* on a string or numeric set of values.
* @param values the values to check the subject against.
* @returns a new conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L188-L202 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.startsWith | public startsWith(value: string) {
if (this.negated) {
set(this.scope, this.subject, [{"anything-but": {"prefix": value}}]);
} else {
set(this.scope, this.subject, [{"prefix": value}]);
}
return (new ConditionalStatement(this.scope, this.subject));
} | /**
* The `startsWith` operator allows to express a conditional
* on a string prefix.
* @param value the prefix to check the subject against.
* @returns a new conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L210-L217 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.exists | public exists() {
set(this.scope, this.subject, [{ exists: !this.negated }]);
return (new ConditionalStatement(this.scope, this.subject));
} | /**
* The `exists` operator allows to express a conditional
* on the existence of a given attribute.
* @returns a new conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L224-L227 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.not | public not() {
this.negated = !this.negated;
return (this);
} | /**
* The `not` operator allows to negate the current
* conditional statement.
* @returns the current conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L234-L237 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.isNegated | public isNegated() {
return (this.negated);
} | /**
* @returns whether the current conditional statement
* is negated.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L243-L245 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.getScope | public getScope() {
return (this.scope);
} | /**
* @returns the JSON representation of the current
* conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L251-L253 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | Condition.getSubject | public getSubject() {
return (this.subject);
} | /**
* @returns the subject of the conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L258-L260 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ConditionalStatement.empty | static empty() {
return (new ConditionalStatement({}, ''));
} | /**
* @returns an empty conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L279-L281 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ConditionalStatement.and | public and(condition: ConditionalStatement) {
const existing = get(this.aggregate, condition.subject());
if (existing) {
throw new Error(
`Cannot apply multiple conditions on the same subject: ${condition.subject()}`
);
}
merge(this.aggregate, condition.value());
return (this);
} | /**
* The `and` operator allows to chain multiple conditions
* on the same subject.
* @param condition the condition to add to the current
* conditional statement.
* @returns the current conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L290-L300 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ConditionalStatement.value | public value() {
return (this.aggregate);
} | /**
* @returns the SNS filtering syntax associated with
* the conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L306-L308 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ConditionalStatement.subject | public subject() {
return (this.subjectValue);
} | /**
* @returns the subject of the conditional statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L313-L315 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ConditionalStatement.toJSON | public toJSON() {
return (this.value());
} | /**
* @returns the JSON representation of the conditional
* statement.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/src/dsl/vocabulary/conditions.ts#L321-L323 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | DefaultMiddlewareBuilder.build | public build(): Middleware {
return (new DefaultTestMiddleware(
this.scope,
this.identifier, {
...this.props
}
));
} | /**
* @returns a new instance of the `TestMiddleware`
* service constructed with the given parameters.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/test/unit/middleware/middleware-conditional.test.ts#L31-L38 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ProducerMiddlewareBuilder.build | public build(): ProducerMiddleware {
return (new ProducerMiddleware(
this.scope,
this.identifier, {
...this.props
}
));
} | /**
* @returns a new instance of the `ProducerMiddleware`
* service constructed with the given parameters.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/test/unit/middleware/middleware-connection.test.ts#L29-L36 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | ConsumerMiddlewareBuilder.build | public build(): ConsumerMiddleware {
return (new ConsumerMiddleware(
this.scope,
this.identifier, {
...this.props
}
));
} | /**
* @returns a new instance of the `ConsumerMiddleware`
* service constructed with the given parameters.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/test/unit/middleware/middleware-connection.test.ts#L71-L78 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TestMiddlewareBuilder.build | public build(): TestMiddleware {
return (new TestMiddleware(
this.scope,
this.identifier, {
...this.props
}
));
} | /**
* @returns a new instance of the `TestMiddleware`
* service constructed with the given parameters.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/test/unit/middleware/middleware-defaults.test.ts#L46-L53 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
project-lakechain | github_2023 | awslabs | typescript | TestMiddlewareBuilder.build | public build(): TestMiddleware {
return (new TestMiddleware(
this.scope,
this.identifier, {
...this.props
}
));
} | /**
* @returns a new instance of the `TestMiddleware`
* service constructed with the given parameters.
*/ | https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/packages/core/test/unit/middleware/middleware-kms.test.ts#L31-L38 | 4285173e80584eedfc1a8424d3d1b6c1a7038088 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.