repo_name
string
dataset
string
owner
string
lang
string
func_name
string
code
string
docstring
string
url
string
sha
string
nodite-light
github_2023
nodite
typescript
RoleService.selectMenuPerms
public async selectMenuPerms(roleId: number): Promise<Pick<IMenu, 'menuId' | 'perms'>[]> { if (await RoleMenuModel.hasFullPerms(roleId)) { return [{ menuId: '*', perms: '*:*:*' }]; } const role = await RoleModel.findOne({ attributes: [], where: { roleId }, include: [ { model: MenuModel, attributes: ['menuId', 'perms'], required: false, }, ], }); return role?.menus || []; }
/** * Select menu list. * @param roleId * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/role/role.service.ts#L110-L128
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
RoleService.updateMenuPerms
public async updateMenuPerms(roleId: number, menuIds: string[]): Promise<void> { if (roleId === 1) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'Role is not allow update!'); } // start transaction. const transaction = await RoleMenuModel.sequelize.transaction(); // role menu associate. await RoleMenuModel.destroy({ where: { roleId }, transaction }); await RoleMenuModel.bulkCreate( menuIds.map((menuId) => ({ roleId, menuId })), { transaction }, ); // update casbin. await CasbinModel.removeRolePolicies(roleId, transaction); if (!lodash.isEmpty(menuIds)) { const menuPerms = menuIds.includes('*') ? ['*:*:*'] : lodash .chain( await MenuModel.findAll({ attributes: ['perms'], where: { menuId: menuIds }, transaction, }), ) .map('perms') .filter() .value(); await CasbinModel.addRolePolicies(roleId, menuPerms, transaction); } // commit transaction. await transaction.commit(); }
/** * Save menu perms. * @param roleId * @param menuIds */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/role/role.service.ts#L135-L174
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
RoleService.selectUsersOfRole
public async selectUsersOfRole(roleId: number): Promise<IUserWithRoles[]> { const userAttrs = ['userId', 'username', 'nickname', 'email', 'status', 'createTime']; const roleAttrs = ['roleId']; const users = await UserModel.findAll({ attributes: userAttrs, include: [ { model: RoleModel, attributes: roleAttrs, where: { roleId }, required: false, }, ], }); return users; }
/** * Select role's users. * @param roleId * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/role/role.service.ts#L181-L198
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
RoleService.assignRoleToUsers
public async assignRoleToUsers( roleId: number, userIds: number[], transaction?: Transaction, ): Promise<void> { if (lodash.isEmpty(userIds)) return; // start transaction. const tac = transaction || (await RoleUserModel.sequelize.transaction()); // role user associate. await RoleUserModel.bulkCreate( userIds.map((userId) => ({ roleId, userId })), { transaction: tac }, ); // update casbin. await Promise.all( userIds.map((userId) => CasbinModel.assignRolesToUser([roleId], userId, tac)), ); // commit transaction. await tac.commit(); }
/** * Assign role to users. * @param roleId * @param userIds */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/role/role.service.ts#L205-L228
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
RoleService.unassignRoleOfUsers
public async unassignRoleOfUsers( roleId: number, userIds: number[], transaction?: Transaction, ): Promise<void> { if (lodash.isEmpty(userIds)) return; // start transaction. const tac = transaction || (await RoleUserModel.sequelize.transaction()); // role user associate. await RoleUserModel.destroy({ where: { roleId, userId: userIds }, transaction: tac }); // update casbin. await Promise.all( userIds.map((userId) => CasbinModel.unassignRolesOfUser([roleId], userId, tac)), ); // commit transaction. await tac.commit(); }
/** * Unassign role of users. * @param roleId * @param userIds */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/role/role.service.ts#L235-L255
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
RoleMenuModel.hasFullPerms
public static async hasFullPerms(roleId: number): Promise<boolean> { const count = await this.count({ where: { roleId, menuId: '*' } }); return count > 0; }
/** * Check if the role has full permissions. * @param roleId * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/role/role_menu.model.ts#L31-L34
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserModel.bcryptPassword
bcryptPassword(): void { if (this.skipBcryptPassword) return; const salt = bcrypt.genSaltSync(10, 'a'); const pass = this.getDataValue('password'); if (!pass) return; this.setDataValue('password', bcrypt.hashSync(this.getDataValue('password'), salt)); }
/** * bcryptPassword. * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.model.ts#L101-L107
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserModel.validPassword
static validPassword(rawPassword: string, encodedPassword: string): boolean { if (!bcrypt.compareSync(rawPassword, encodedPassword)) { throw new AppError(httpStatus.UNAUTHORIZED, 'Invalid password'); } return true; }
/** * validPassword. * @param rawPassword * @param encodedPassword * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.model.ts#L115-L120
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.selectUserList
public async selectUserList(params?: QueryParams): Promise<SequelizePagination<IUser>> { const page = await UserModel.paginate({ attributes: ['userId', 'username', 'nickname', 'email', 'status', 'createTime'], where: UserModel.buildQueryWhere(params), ...lodash.pick(params, ['itemsPerPage', 'page']), }); return { ...page, items: page.items.map((i) => i.toJSON()), }; }
/** * Search users. * @param user * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L37-L48
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.selectUserById
public async selectUserById(id?: number): Promise<IUser> { const user = await UserModel.findOne({ where: { userId: id } }); if (!user) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'User not found'); } return user.toJSON(); }
/** * Select user by id * @param id * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L55-L61
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.selectProfile
public async selectProfile(id: number): Promise<IProfile> { const user = await UserModel.findOne({ where: { userId: id }, include: [{ model: RoleModel, required: false }], }); if (!user) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'User not found'); } const userJson = user.toJSON(); const perms = await Promise.all( lodash.map(userJson.roles, async (role) => this.roleService.selectMenuPerms(role.roleId)), ); return { ...lodash.omit(userJson, ['roles']), roles: lodash.map(userJson.roles, 'roleKey'), perms: lodash.chain(perms).flatten().map('perms').uniq().value(), }; }
/** * Select user profile. * @param id * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L68-L89
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.getByUsername
public async getByUsername(username: string): Promise<IUser> { const user = await UserModel.findOne({ where: { username } }); if (!user) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'User not found'); } return user.toJSON(); }
/** * Get by Username. * @param username * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L96-L102
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.getByEmail
public async getByEmail(email: string): Promise<IUser> { const user = await UserModel.findOne({ where: { email } }); if (!user) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'User not found'); } return user.toJSON(); }
/** * Get by Email. * @param email * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L109-L115
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.create
public async create(user: IUserCreate): Promise<IUser> { return UserModel.create(user); }
/** * Create. * @param user * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L122-L124
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.update
public async update(id: number, body: IUserUpdate): Promise<IUser> { const preUser = await UserModel.findOne({ where: { userId: id } }); if (!preUser) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'User not found'); } preUser.skipBcryptPassword = true; // update user. const user = await preUser.update(body); return user; }
/** * Update. * @param id * @param body * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L132-L145
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.resetPassword
public async resetPassword(id: number, data: IPasswordReset): Promise<IUser> { if (data.password === '') { throw new AppError( httpStatus.BAD_REQUEST, 'Password cannot be empty string, please set null or remove it if you want to keep the old password', ); } const user = await UserModel.findOne({ where: { userId: id } }); if (!user) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'User not found'); } if (!data.password || user.getDataValue('password') === data.password) { user.skipBcryptPassword = true; } else { user.skipBcryptPassword = false; } return user.update({ password: data.password }); }
/** * Reset password. * @param id * @param data * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L153-L174
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.delete
public async delete(id: number): Promise<void> { if (await this.isAdmin(id)) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'Cannot delete admin user!'); } const requester = httpContext.get('user') as AuthorizedRequest['user']; if (id === requester.userId) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'Cannot delete yourself!'); } const user = await UserModel.findOne({ where: { userId: id } }); if (!user) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'User not found'); } if (user.getDataValue('deleted') === 9) { throw new AppError(httpStatus.UNPROCESSABLE_ENTITY, 'User is not allow delete!'); } return user.destroy(); }
/** * Delete. * @param id * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L181-L203
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.selectRolesWithUser
public async selectRolesWithUser(userId: number): Promise<IRoleWithUsers[]> { const roleAttrs = ['roleId', 'roleName', 'roleKey', 'orderNum', 'status', 'createTime']; const userAttrs = ['userId']; const roles = await RoleModel.findAll({ attributes: roleAttrs, include: [ { model: UserModel, attributes: userAttrs, where: { userId }, required: false, }, ], }); return roles; }
/** * Select user's roles. * @param userId * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L210-L227
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.assignRolesToUser
public async assignRolesToUser( roleIds: number[], userId: number, transaction?: Transaction, ): Promise<void> { if (lodash.isEmpty(roleIds)) return; // start transaction. const tac = transaction || (await RoleUserModel.sequelize.transaction()); // role user associate. await RoleUserModel.bulkCreate( roleIds.map((roleId) => ({ roleId, userId })), { transaction: tac }, ); // update casbin. await CasbinModel.assignRolesToUser(roleIds, userId, tac); // commit transaction. await tac.commit(); }
/** * Assign roles to user. * @param roleIds * @param userId * @param transaction * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L236-L257
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.unassignRolesOfUser
public async unassignRolesOfUser( roleIds: number[], userId: number, transaction?: Transaction, ): Promise<void> { if (lodash.isEmpty(roleIds)) return; // start transction. const tac = transaction || (await RoleUserModel.sequelize.transaction()); // role user associate. await RoleUserModel.destroy({ where: { roleId: roleIds, userId }, transaction: tac }); // update casbin. await CasbinModel.unassignRolesOfUser(roleIds, userId, tac); // commit transaction. await tac.commit(); }
/** * Unassign roles of user. * @param roleIds * @param userId * @param transaction * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L266-L284
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.isAdmin
public async isAdmin(userId?: number): Promise<boolean> { if (userId === 1) return true; const hasAdminRole = await RoleUserModel.findOne({ where: { userId, roleId: 1 } }); return !!hasAdminRole; }
/** * Is admin? * @param userId * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L291-L295
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
UserService.validPassword
public validPassword(rawPassword: string, encodedPassword: string): boolean { return UserModel.validPassword(rawPassword, encodedPassword); }
/** * Valid password. * @param user * @param rawPassword * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/services/admin-api/src/components/user/user.service.ts#L303-L305
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
initCharts
const initCharts = () => { const el = unref(elRef); if (!el || !unref(el)) { return; } chartInstance = echarts.init(el, theme); };
// 初始化echart
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/websites/admin-web/src/plugins/echarts.ts#L82-L88
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
setOption
const setOption = (option: EChartsOption) => { nextTick(() => { if (!chartInstance) { initCharts(); if (!chartInstance) return; } chartInstance.setOption(option); hideLoading(); }); };
// 更新/设置配置
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/websites/admin-web/src/plugins/echarts.ts#L91-L101
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
getInstance
function getInstance(): echarts.ECharts | null { if (!chartInstance) { initCharts(); } return chartInstance; }
// 获取echart实例
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/websites/admin-web/src/plugins/echarts.ts#L104-L109
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
resize
function resize() { chartInstance?.resize(); }
// 更新大小
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/websites/admin-web/src/plugins/echarts.ts#L112-L114
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
watchEl
function watchEl() { // 给元素添加过渡 if (animation) { elRef.value.style.transition = 'width 1s, height 1s'; } const resizeObserver = new ResizeObserver((entries) => resize()); resizeObserver.observe(elRef.value); }
// 监听元素大小
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/websites/admin-web/src/plugins/echarts.ts#L117-L124
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
showLoading
function showLoading() { if (!chartInstance) { initCharts(); } chartInstance?.showLoading(); }
// 显示加载状态
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/websites/admin-web/src/plugins/echarts.ts#L127-L132
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
hideLoading
function hideLoading() { if (!chartInstance) { initCharts(); } chartInstance?.hideLoading(); }
// 显示加载状态
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/websites/admin-web/src/plugins/echarts.ts#L134-L139
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
getCurrLang
function getCurrLang(): string { let langcode = document.querySelector('html')?.getAttribute('lang'); if (langcode) return langcode; try { const { 0: navLangcode } = navigator.language.split('-'); if (Object.keys(messages).includes(navLangcode)) langcode = navLangcode; if (navLangcode == 'zh') langcode = 'zhHans'; } catch (e) { /* empty */ } return langcode || 'en'; }
/** * Get current langcode. * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/websites/admin-web/src/utils/locale.ts#L15-L29
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
nodite-light
github_2023
nodite
typescript
getDefLang
function getDefLang(): string { return document.querySelector('html')?.getAttribute('def-lang') || getCurrLang() || 'en'; }
/** * Get default langcode. * @returns */
https://github.com/nodite/nodite-light/blob/f8827d69f0c67e7c59042fd0f3e246a67e80e0ec/websites/admin-web/src/utils/locale.ts#L35-L37
f8827d69f0c67e7c59042fd0f3e246a67e80e0ec
project-lakechain
github_2023
awslabs
typescript
DocumentIndexPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline extracting metadata from documents and indexing them in OpenSearch.', ...env }); // The VPC in which OpenSearch will be deployed. const vpc = this.createVpc('Vpc'); // The OpenSearch domain. const openSearch = this.createOpenSearchDomain(vpc); // The source bucket. const bucket = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// ////// Pipeline Data Sources ////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(bucket) .build(); /////////////////////////////////////////// /// Pipeline Document Converters /// /////////////////////////////////////////// // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert text-oriented documents (Docx, Markdown, HTML, etc) to text. const pandocConverter = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('PandocConverter') .withCacheStorage(cache) .withSource(trigger) .build(); /////////////////////////////////////////// /// Pipeline Metadata Extractors /// /////////////////////////////////////////// // Extracts metadata from audio files. const audioMetadata = new AudioMetadataExtractor.Builder() .withScope(this) .withIdentifier('AudioMetadata') .withCacheStorage(cache) .withSource(trigger) .build(); // Extracts metadata from images. const imageMetadata = new ImageMetadataExtractor.Builder() .withScope(this) .withIdentifier('ImageMetadata') .withCacheStorage(cache) .withSource(trigger) .build(); // Transform all images to PNG format, // resize them to keep a smaller image to pass // to the Rekognition image processor. const sharpTransform = new SharpImageTransform.Builder() .withScope(this) .withIdentifier('SharpImageTransform') .withCacheStorage(cache) .withSource(imageMetadata) .withSharpTransforms( sharp() .png() .resize(1024) ) .build(); // The Rekognition image processor will identify labels in processed images // in order to extract relevant keywords about the image that will be indexed. const rekognition = new RekognitionImageProcessor.Builder() .withScope(this) .withIdentifier('RekognitionImageProcessor') .withCacheStorage(cache) .withSource(sharpTransform) .withIntent( r.detect().labels(r.confidence(90)) ) .build(); // Extracts metadata from text documents. const nlpProcessor = new NlpTextProcessor.Builder() .withScope(this) .withIdentifier('NlpProcessor') .withCacheStorage(cache) .withSources([ trigger, pdfConverter, pandocConverter ]) .withIntent( l.nlp() .language() .readingTime() .stats() ) .build(); // Extracts metadata from video files. const videoMetadata = new VideoMetadataExtractor.Builder() .withScope(this) .withIdentifier('VideoMetadata') .withCacheStorage(cache) .withSource(trigger) .build(); /////////////////////////////////////////// //// Pipeline Storage Providers //// /////////////////////////////////////////// // Indexes the extracted metadata in OpenSearch. new OpenSearchStorageConnector.Builder() .withScope(this) .withIdentifier('OpenSearchStorage') .withCacheStorage(cache) .withSources([ audioMetadata, nlpProcessor, videoMetadata, rekognition ]) .withDomain(openSearch.domain) .withVpc(vpc) .withIndexName(OPENSEARCH_INDEX_NAME) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'BucketName', { description: 'The name of the source bucket.', value: bucket.bucketName }); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'DashboardUrl', { description: 'The OpenSearch dashboard URL.', value: `https://${openSearch.domain.domainEndpoint}/_dashboards` }); // Display the user pool address. new cdk.CfnOutput(this, 'UserPoolUrl', { description: 'The Cognito user pool user management address.', value: `https://${cdk.Aws.REGION}.console.aws.amazon.com/cognito/v2/idp/user-pools/${openSearch.userPool.userPoolId}/users` }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-document-index/stack.ts#L56-L223
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
DocumentIndexPipeline.createOpenSearchDomain
private createOpenSearchDomain(vpc: ec2.IVpc) { const openSearch = new OpenSearchDomain(this, 'Domain', { vpc }); // Create the OpenSearch index. new OpenSearchIndex(this, 'Index', { indexName: OPENSEARCH_INDEX_NAME, endpoint: openSearch.domain, vpc, body: { mappings: { properties: { time: { type: 'date' } } } } }); // Upload the dashboard for visualizing documents // on OpenSearch. new OpenSearchSavedObject(this, 'Dashboard', { domain: openSearch.domain, vpc, savedObject: { data: fs.readFileSync( path.resolve(__dirname, 'assets', 'dashboard.ndjson') ).toString('utf-8'), name: 'dashboard.ndjson' } }); return (openSearch); }
/** * Creates a new OpenSearch domain for this example, * and automatically creates the index and dashboard * for visualizing the documents. * @param vpc the VPC in which the OpenSearch domain * should be deployed. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-document-index/stack.ts#L232-L267
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
DocumentIndexPipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-document-index/stack.ts#L274-L290
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
PodcastGeneratorStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline creating AWS news podcast episodes on the new releases of the day.', ...env }); // The VPC in which the FFMPEG processor will be deployed. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Schedules the execution of the pipeline every 24 hours. // The AWS News RSS feed will be forwarded as an RSS document // to the pipeline. const trigger = new SchedulerEventTrigger.Builder() .withScope(this) .withIdentifier('SchedulerEventTrigger') .withCacheStorage(cache) .withSchedule( scheduler.ScheduleExpression.rate(cdk.Duration.hours(24)) ) .withDocuments([ 'https://aws.amazon.com/fr/blogs/aws/feed/' ]) .build(); // The syndication feed processor will parse the RSS feed // associated with the input URL, and create a new HTML document // for each feed item. const feeds = new SyndicationFeedProcessor.Builder() .withScope(this) .withIdentifier('SyndicationFeedProcessor') .withCacheStorage(cache) .withSource(trigger) .build(); // This condition will filter out all RSS feed items that // have not been created today. const filter = new Condition.Builder() .withScope(this) .withIdentifier('Condition') .withCacheStorage(cache) .withSource(feeds) .withConditional(filterOut) .build(); // The newspaper3k parser will extract the relevant text // from each HTML document associated with an RSS feed item. const parser = new Newspaper3kParser.Builder() .withScope(this) .withIdentifier('Newspaper3kParser') .withCacheStorage(cache) .build(); filter.onMatch(parser); // The feed reducer will aggregate all parsed text documents // from the different RSS feed items into a single event. // We use a 15 seconds time window during which we aggregate // the feed items. const feedReducer = new Reducer.Builder() .withScope(this) .withIdentifier('FeedReducer') .withCacheStorage(cache) .withSource(parser) .withReducerStrategy(new TimeWindowStrategy.Builder() .withTimeWindow(cdk.Duration.seconds(15)) .withJitter(cdk.Duration.seconds(5)) .build()) .build(); // This step uses Amazon Bedrock to generate a podcast story // using the aggregated input document containing the text // for all gathered blog posts. // This is generated as a structured JSON. const podcastGenerator = new StructuredEntityExtractor.Builder() .withScope(this) .withIdentifier('StructuredEntityExtractor') .withCacheStorage(cache) .withRegion('us-east-1') .withSource(feedReducer) .withSchema(schema) .withInstructions(prompt) .withModelParameters({ temperature: 0.5 }) .build(); // The transform will create a new document containing each // voice from the different personas in the podcast episode, // with the metadata containing the voice used to synthesize // the conversation, and the order of the conversation in the // original document. const transform = new Transform.Builder() .withScope(this) .withIdentifier('Transform') .withCacheStorage(cache) .withSource(podcastGenerator) .withTransformExpression(transformExpression) .build(); // The host synthesizer will synthesize the conversations // associated with the host using the host voice. // Note that longform voices are only available in the us-east-1 region. const hostSynthesizer = new PollySynthesizer.Builder() .withScope(this) .withIdentifier('HostSynthesizer') .withCacheStorage(cache) .withSource(transform, when('data.metadata.custom.voice').equals('Ruth')) .withLanguageOverride('en') .withVoiceMapping('en', cdk.Aws.REGION === 'us-east-1' ? v.longform('Ruth') : v.neural('Ruth')) .build(); // The guest synthesizer will synthesize the conversations // associated with the guest using the guest voice. // Note that longform voices are only available in the us-east-1 region. const guestSynthesizer = new PollySynthesizer.Builder() .withScope(this) .withIdentifier('GuestSynthesizer') .withCacheStorage(cache) .withSource(transform, when('data.metadata.custom.voice').equals('Gregory')) .withLanguageOverride('en') .withVoiceMapping('en', cdk.Aws.REGION === 'us-east-1' ? v.longform('Gregory') : v.neural('Gregory')) .build(); // The reducer middleware will aggregate all synthesized voice // documents into a single event. const voiceReducer = new Reducer.Builder() .withScope(this) .withIdentifier('PollyReducer') .withCacheStorage(cache) .withSources([ hostSynthesizer, guestSynthesizer ]) .withReducerStrategy(new TimeWindowStrategy.Builder() .withTimeWindow(cdk.Duration.minutes(2)) .withJitter(cdk.Duration.seconds(10)) .build()) .build(); // The FFMPEG processor will concatenate the audio files // generated by the speech synthesizer. const ffmpeg = new FfmpegProcessor.Builder() .withScope(this) .withIdentifier('FfmpegProcessor') .withCacheStorage(cache) .withVpc(vpc) .withSource(voiceReducer) .withIntent(concat) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(ffmpeg) .build(); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-podcast-generator/stack.ts#L64-L248
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
PodcastGeneratorStack.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by the containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-podcast-generator/stack.ts#L255-L279
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
RagPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'An end-to-end RAG pipeline using Amazon Bedrock and Amazon OpenSearch.', ...env }); // The VPC in which OpenSearch will be deployed. const vpc = this.createVpc('Vpc'); // The OpenSearch domain. const openSearch = new OpenSearchDomain(this, 'Domain', { vpc }); // The source bucket. const bucket = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// ////// Pipeline Data Sources ////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(bucket) .build(); /////////////////////////////////////////// /// Pipeline Document Converters /// /////////////////////////////////////////// // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert text-oriented documents (Docx, Markdown, HTML, etc) to text. const pandocConverter = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('PandocConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert audio recordings to text. const transcribe = new TranscribeAudioProcessor.Builder() .withScope(this) .withIdentifier('TranscribeTextProcessor') .withCacheStorage(cache) .withSource(trigger) .withOutputFormats('vtt') .build(); // Convert the VTT transcription file to a summarized // version of the conversation. const textProcessor = new AnthropicTextProcessor.Builder() .withScope(this) .withIdentifier('TextProcessor') .withCacheStorage(cache) .withSource(transcribe) .withModel(AnthropicTextModel.ANTHROPIC_CLAUDE_V3_SONNET) .withRegion('us-east-1') .withPrompt(` Give a very comprehensive description of the content of this transcription file with these constraints: - Summarize all the data points of the transcript. - Focus only on the content of the transcript, not the formatting. - Don't say "This is a transcription of an audio file" or anything similar, just output the summary. - The output should be spread in multiple paragraphs. `) .build(); /////////////////////////////////////////// ////////// Text Splitter ////////// /////////////////////////////////////////// // Split the text into chunks. const textSplitter = new RecursiveCharacterTextSplitter.Builder() .withScope(this) .withIdentifier('RecursiveCharacterTextSplitter') .withCacheStorage(cache) .withChunkSize(2000) .withSources([ trigger, pdfConverter, pandocConverter, textProcessor ]) .build(); ///////////////////////////////////// //// Embeddings with Bedrock //// ///////////////////////////////////// // Creates embeddings for the text using a Cohere embedding // model hosted on Amazon Bedrock. const embeddingProcessor = new CohereEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('CohereEmbeddingProcessor') .withCacheStorage(cache) .withSource(textSplitter) // You can specify the embedding model to use. .withModel(CohereEmbeddingModel.COHERE_EMBED_MULTILINGUAL_V3) // You can also specify a region that supports Amazon Bedrock. .withRegion('us-east-1') .build(); /////////////////////////////////////////// //// Pipeline Storage Providers //// /////////////////////////////////////////// // Vector storage for text. new OpenSearchVectorStorageConnector.Builder() .withScope(this) .withIdentifier('TextVectorStorage') .withCacheStorage(cache) .withEndpoint(openSearch.domain) .withSource(embeddingProcessor) .withVpc(vpc) .withIncludeDocument(true) .withIndex(new OpenSearchVectorIndexDefinition.Builder() .withIndexName('text-vectors') .withKnnMethod('hnsw') .withKnnEngine('nmslib') .withSpaceType('l2') .withDimensions(1024) .withParameters({ 'ef_construction': 512, 'm': 16 }) .build() ) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucket', { description: 'The name of the source bucket.', value: bucket.bucketName }); // Display the OpenSearch endpoint. new cdk.CfnOutput(this, 'OpenSearchEndpoint', { description: 'The endpoint of the OpenSearch domain.', value: `https://${openSearch.domain.domainEndpoint}` }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-rag-pipeline/stack.ts#L45-L201
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
RagPipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 24 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-rag-pipeline/stack.ts#L208-L228
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
getContent
const getContent = (question: string, chunks: string[]) => { const content = [{ text: USER_PROMPT.replace('{{ question }}', question) }]; // Add the document to the prompt. for (const chunk of chunks) { content.push({ text: `<document>${chunk}</document>` }); } return (content); }
/** * Creates the content array to pass to the model. * @param question The question to ask the model. * @param chunks The chunks to pass to the model. * @returns a promise to an array of messages to pass to the model. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-rag-pipeline/cli/src/common/query-llm.ts#L50-L63
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
SearchEnginePipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'An end-to-end search engine multi-modal ingestion pipeline using Amazon OpenSearch.', ...env }); // The VPC in which OpenSearch will be deployed. const vpc = this.createVpc('Vpc'); // The OpenSearch domain. const openSearch = new OpenSearchDomain(this, 'Domain', { vpc }); // The source bucket. const bucket = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// ////// Pipeline Data Sources ////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(bucket) .build(); /////////////////////////////////////////// /// Pipeline Document Converters /// /////////////////////////////////////////// // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert text-oriented documents (Docx, Markdown, HTML, etc) to text. const pandocConverter = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('PandocConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Resize images to a width of 512px and convert them to PNG. const imageTransform = new SharpImageTransform.Builder() .withScope(this) .withIdentifier('SharpTransform') .withCacheStorage(cache) .withSource(trigger) .withSharpTransforms( sharp() .resize(512) .png() ) .build(); /////////////////////////////////////////// ////////// Topic Modeling ////////// /////////////////////////////////////////// // Performs topic modeling by extracting keywords // from text using KeyBERT. const keybertProcessor = new KeybertTextProcessor.Builder() .withScope(this) .withIdentifier('KeybertProcessor') .withCacheStorage(cache) .withSources([ trigger, pdfConverter, pandocConverter ]) .withVpc(vpc) .build(); /////////////////////////////////////////// ////////// Text Splitter ////////// /////////////////////////////////////////// // Split the text into chunks. const textSplitter = new RecursiveCharacterTextSplitter.Builder() .withScope(this) .withIdentifier('RecursiveCharacterTextSplitter') .withCacheStorage(cache) .withChunkSize(2048) .withSource(keybertProcessor) .build(); ///////////////////////////////////// //// Embeddings with Bedrock //// ///////////////////////////////////// // Create embeddings for each chunk of text using // the Cohere embedding multilingual model hosted // on Amazon Bedrock. const cohereProcessor = new CohereEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('CohereEmbeddingProcessor') .withCacheStorage(cache) .withSource(textSplitter) .withRegion('us-east-1') .withModel(CohereEmbeddingModel.COHERE_EMBED_MULTILINGUAL_V3) .build(); // Create embeddings for images using CLIP. const clipProcessor = new ClipImageProcessor.Builder() .withScope(this) .withIdentifier('ClipImageProcessor') .withCacheStorage(cache) .withSource(imageTransform) .withVpc(vpc) .build(); /////////////////////////////////////////// //// Pipeline Storage Providers //// /////////////////////////////////////////// // Vector storage for text. new OpenSearchVectorStorageConnector.Builder() .withScope(this) .withIdentifier('TextVectorStorage') .withCacheStorage(cache) .withEndpoint(openSearch.domain) .withSource(cohereProcessor) .withVpc(vpc) .withIncludeDocument(true) .withIndex(new OpenSearchVectorIndexDefinition.Builder() .withIndexName('text-vectors') .withKnnMethod('hnsw') .withKnnEngine('nmslib') .withSpaceType('l2') .withDimensions(1024) .withParameters({ 'ef_construction': 512, 'm': 16 }) .build() ) .build(); // Vector storage for images. new OpenSearchVectorStorageConnector.Builder() .withScope(this) .withIdentifier('ImageVectorStorage') .withCacheStorage(cache) .withEndpoint(openSearch.domain) .withSource(clipProcessor) .withVpc(vpc) .withIndex(new OpenSearchVectorIndexDefinition.Builder() .withIndexName('image-vectors') .withKnnMethod('hnsw') .withKnnEngine('nmslib') .withSpaceType('cosinesimil') .withDimensions(512) .withParameters({ 'ef_construction': 512, 'm': 16 }) .build() ) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucket', { description: 'The name of the source bucket.', value: bucket.bucketName }); // Display the OpenSearch endpoint. new cdk.CfnOutput(this, 'OpenSearchEndpoint', { description: 'The endpoint of the OpenSearch domain.', value: `https://${openSearch.domain.domainEndpoint}` }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-search-engine/stack.ts#L46-L228
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
SearchEnginePipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 24 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-search-engine/stack.ts#L235-L255
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
VideoChapteringStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline extracting chapters associated with key moments in videos.', ...env }); // The VPC in which the FFMPEG processor will be deployed. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded videos. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // The FFMPEG processor extracts the audio from the video. const audioExtractor = new FfmpegProcessor.Builder() .withScope(this) .withIdentifier('FfmpegProcessor') .withCacheStorage(cache) .withVpc(vpc) .withSource(trigger) .withIntent(audioExtraction) .build(); // We are using the `TranscribeAudioProcessor` component to transcribe // audio into a VTT file. const transcribe = new TranscribeAudioProcessor.Builder() .withScope(this) .withIdentifier('Transcribe') .withCacheStorage(cache) .withVpc(vpc) .withSource(audioExtractor) .withOutputFormats('vtt') .build(); // The `StructuredEntityExtractor` component will extract // the chapters from the transcribed audio. const chapterCreator = new StructuredEntityExtractor.Builder() .withScope(this) .withIdentifier('StructuredEntityExtractor') .withCacheStorage(cache) .withRegion('us-east-1') .withSource(transcribe) .withSchema(schema) .withInstructions(instructions) .build(); // The reducer middleware will aggregate the input video // and the produced JSON describing the key moments. const reducer = new Reducer.Builder() .withScope(this) .withIdentifier('Reducer') .withCacheStorage(cache) .withSources([ trigger, chapterCreator ]) .withReducerStrategy(new StaticCounterStrategy.Builder() .withEventCount(2) .build() ) .build(); // The FFMPEG processor will extract the chapters // as separate videos. const ffmpeg = new FfmpegProcessor.Builder() .withScope(this) .withIdentifier('ChapterExtractor') .withCacheStorage(cache) .withVpc(vpc) .withSource(reducer) .withIntent(shorten) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSources([ chapterCreator, ffmpeg ]) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-video-chaptering-service/stack.ts#L59-L189
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
VideoChapteringStack.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by the containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-video-chaptering-service/stack.ts#L196-L220
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
vttTimeToSeconds
const vttTimeToSeconds = (time: string) => { const parts = time.split(':'); const seconds = parseInt(parts[0]) * 3600 + parseInt(parts[1]) * 60 + parseFloat(parts[2]); return (seconds); };
/** * Converts the given VTT time in 'HH:MM:SS.sss' format to seconds * relative to the start of the video. * @param time the VTT time to convert. * @returns an integer representing the time in seconds relative to * the start of the video. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-video-chaptering-service/funclets/ffmpeg.ts#L53-L57
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
VideoSubtitlingStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline creating multi-lingual subtitles for videos.', ...env }); // The VPC in which the FFMPEG processor will be deployed. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded videos. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // The FFMPEG processor extracts the audio from the video. const audioExtractor = new FfmpegProcessor.Builder() .withScope(this) .withIdentifier('FfmpegProcessor') .withCacheStorage(cache) .withVpc(vpc) .withSource(trigger) .withIntent(audioExtraction) .build(); // We are using the `TranscribeAudioProcessor` component to transcribe // audio into a VTT file. const transcribe = new TranscribeAudioProcessor.Builder() .withScope(this) .withIdentifier('Transcribe') .withCacheStorage(cache) .withVpc(vpc) .withSource(audioExtractor) .withOutputFormats('srt') .build(); // The subtitle processor will parse the subtitle file produced // by the transcribe processor into both plain text, and structured JSON. // The plain text version will be passed to the translate middleware. const parser = new SubtitleProcessor.Builder() .withScope(this) .withIdentifier('SubtitleProcessor') .withCacheStorage(cache) .withSource(transcribe) .withOutputFormats('text', 'json') .build(); // Translate the transcriptions into multiple languages. const translate = new TranslateTextProcessor.Builder() .withScope(this) .withIdentifier('Translate') .withCacheStorage(cache) .withSource(parser) .withOutputLanguages(OUTPUT_LANGUAGES) .build(); // This condition ensures that the `reducer` only receives // the subtitle JSON description from the `parser`, and not // the plain text translation. const condition = new Condition.Builder() .withScope(this) .withIdentifier('Condition') .withCacheStorage(cache) .withSource(parser) .withConditional(async (event: CloudEvent) => { return (event.data().document().mimeType() === 'application/json'); }) .build(); // The reducer middleware will aggregate the video, translated subtitles, // as well as the JSON structured document which maps subtitles // to the timing information. const reducer = new Reducer.Builder() .withScope(this) .withIdentifier('Reducer') .withCacheStorage(cache) .withSources([ trigger, translate ]) .withReducerStrategy(new StaticCounterStrategy.Builder() // Input video + JSON subtitle + translations .withEventCount(OUTPUT_LANGUAGES.length + 2) .build() ) .build(); // Listen for the JSON description emitted by the subtitle processor. condition.onMatch(reducer); // The `transform` middleware will re-package the plain text translations // into the SRT format. const transform = new Transform.Builder() .withScope(this) .withIdentifier('Transform') .withCacheStorage(cache) .withSource(reducer) .withTransformExpression(repackageSubtitles) .build(); // The FFMPEG processor will merge the subtitles with the video. const ffmpeg = new FfmpegProcessor.Builder() .withScope(this) .withIdentifier('MergeSubtitle') .withCacheStorage(cache) .withVpc(vpc) .withSource(transform) .withIntent(merge) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(ffmpeg) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-video-subtitle-service/stack.ts#L58-L220
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
VideoSubtitlingStack.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by the containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/end-to-end-use-cases/building-a-video-subtitle-service/stack.ts#L227-L251
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
DeflatePipelineStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline storing documents into Zip and Tar archives.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// const sharpTransforms = []; // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Creating the sharp transforms for each size. for (const [idx, size] of sizes.entries()) { sharpTransforms.push(new SharpImageTransform.Builder() .withScope(this) .withIdentifier(`SharpTransform-${idx}`) .withCacheStorage(cache) .withSource(trigger) .withSharpTransforms( sharp().resize(size.width) ) .build()); } // Reduce the original image, and the resized images. const reducer = new Reducer.Builder() .withScope(this) .withIdentifier('Reducer') .withCacheStorage(cache) .withSources([ trigger, ...sharpTransforms ]) .withReducerStrategy(new StaticCounterStrategy.Builder() .withEventCount(4) .build()) .build(); // ZIP documents. const zip = new ZipDeflateProcessor.Builder() .withScope(this) .withIdentifier('ZipProcessor') .withCacheStorage(cache) .withSource(reducer) .build(); // TAR documents. const tar = new TarDeflateProcessor.Builder() .withScope(this) .withIdentifier('TarProcessor') .withCacheStorage(cache) .withSource(reducer) .withGzip(true) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSources([ zip, tar ]) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/archive-processing-pipelines/deflate-pipeline/stack.ts#L49-L159
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
InflatePipelineStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline inflating ZIP and TAR archives.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Unzips ZIP archives. const zip = new ZipInflateProcessor.Builder() .withScope(this) .withIdentifier('UnzipProcessor') .withCacheStorage(cache) .withSource(trigger) .build(); // Inflates TAR files. const tar = new TarInflateProcessor.Builder() .withScope(this) .withIdentifier('TarProcessor') .withCacheStorage(cache) .withSource(trigger) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSources([zip, tar]) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/archive-processing-pipelines/inflate-pipeline/stack.ts#L55-L135
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
KnowledgeGraphPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline extracting semantic ontology from documents.', ...env }); if (!process.env.NEO4J_URI) { throw new Error(` Missing the NEO4J_URI environment variable. `); } if (!process.env.NEO4J_CREDENTIALS_SECRET_NAME) { throw new Error(` Missing the NEO4J_CREDENTIALS_SECRET_NAME environment variable. `); } // The Neo4j credentials secret. const neo4jCredentials = secrets.Secret.fromSecretNameV2( this, 'Neo4jCredentials', process.env.NEO4J_CREDENTIALS_SECRET_NAME ); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert text-oriented documents (Docx, Markdown, HTML, etc) to text. const pandocConverter = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('PandocConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Resize images before they are handled by the `SemanticOntologyExtractor`. const imageTransform = new SharpImageTransform.Builder() .withScope(this) .withIdentifier('SharpTransform') .withCacheStorage(cache) .withSource(trigger) .withSharpTransforms( sharp() .resize(500) .png() ) .build(); // We are using the `SemanticOntologyExtractor` to extract // semantic information from the documents. const extractor = new SemanticOntologyExtractor.Builder() .withScope(this) .withIdentifier('SemanticOntologyExtractor') .withCacheStorage(cache) .withRegion('us-east-1') .withSources([ pdfConverter, pandocConverter, imageTransform, trigger ]) .build(); // Write the results to the Neo4j database. new Neo4jStorageConnector.Builder() .withScope(this) .withIdentifier('Neo4jStorageConnector') .withCacheStorage(cache) .withSource(extractor) .withUri(process.env.NEO4J_URI as string) .withCredentials(neo4jCredentials) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/data-extraction-pipelines/knowledge-graph-pipeline/stack.ts#L61-L174
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
MetadataStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A simple pipeline extracting metadata from audio, image and video files.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Extracts metadata from audio files. const audioMetadata = new AudioMetadataExtractor.Builder() .withScope(this) .withIdentifier('AudioMetadata') .withCacheStorage(cache) .withSource(trigger) .build(); // Extracts metadata from images. const imageMetadata = new ImageMetadataExtractor.Builder() .withScope(this) .withIdentifier('ImageMetadata') .withCacheStorage(cache) .withSource(trigger) .build(); // Extracts metadata from video files. const videoMetadata = new VideoMetadataExtractor.Builder() .withScope(this) .withIdentifier('VideoMetadata') .withCacheStorage(cache) .withSource(trigger) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSources([audioMetadata, imageMetadata, videoMetadata]) // We ask the S3 storage provider to only copy the resulting // document metadata and not the source documents. .withCopyDocuments(false) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/data-extraction-pipelines/metadata-extraction-pipeline/stack.ts#L55-L146
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
StructuredDataExtractionPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline extracting structured data from documents.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert text-oriented documents (Docx, Markdown, HTML, etc) to text. const pandocConverter = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('PandocConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // We are using the `StructuredEntityExtractor` to extract // structured data from the documents. const extractor = new StructuredEntityExtractor.Builder() .withScope(this) .withIdentifier('StructuredEntityExtractor') .withCacheStorage(cache) .withRegion('us-east-1') .withSources([ pdfConverter, pandocConverter, trigger ]) .withSchema(schema) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withSource(extractor) .withDestinationBucket(destination) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/data-extraction-pipelines/structured-data-extraction-pipeline/stack.ts#L50-L145
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
TopicModelingStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using topic modeling on text documents using the KeyBERT model.', ...env }); // The VPC in which the EFS cache for the KeyBERT model will be deployed. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger // Extract the main topics from text documents. .pipe( new KeybertTextProcessor.Builder() .withScope(this) .withIdentifier('KeybertTextProcessor') .withCacheStorage(cache) .withVpc(vpc) .withSource(trigger) // Optionally specify the top n topics to retrieve. .withTopN(10) .build() ) // Write the results to the destination bucket. .pipe( new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/data-extraction-pipelines/topic-modeling-pipeline/stack.ts#L45-L126
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
TopicModelingStack.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 3, subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by KeyBERT containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/data-extraction-pipelines/topic-modeling-pipeline/stack.ts#L133-L157
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
BedrockLanceDbPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'An embedding storage pipeline using Amazon Bedrock and LanceDB.', ...env }); // The VPC required by the EFS storage. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The EFS file system used to store the embeddings. const fileSystem = new efs.FileSystem(this, 'FileSystem', { vpc, removalPolicy: cdk.RemovalPolicy.DESTROY, throughputMode: efs.ThroughputMode.ELASTIC, encrypted: true, vpcSubnets: { subnetType: ec2.SubnetType.PRIVATE_ISOLATED } }); // The cache storage. const cache = new CacheStorage(this, 'CacheStorage', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert text-oriented documents (Docx, Markdown, HTML, etc) to text. const pandocConverter = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('PandocConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // We use the `RecursiveCharacterTextSplitter` to split // input text into smaller chunks. This is required to ensure // that the generated embeddings are relevant. const textSplitter = new RecursiveCharacterTextSplitter.Builder() .withScope(this) .withIdentifier('RecursiveCharacterTextSplitter') .withCacheStorage(cache) .withSources([ pdfConverter, pandocConverter, trigger ]) .withChunkSize(4096) .build(); // Creates embeddings for text chunks using Amazon Titan. const embeddingProcessor = new TitanEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('BedrockEmbeddingProcessor') .withCacheStorage(cache) .withSource(textSplitter) .withRegion('us-east-1') .build(); // Store the embeddings in LanceDB. new LanceDbStorageConnector.Builder() .withScope(this) .withIdentifier('LanceDbStorageConnector') .withCacheStorage(cache) .withSource(embeddingProcessor) .withVectorSize(1024) .withStorageProvider(new EfsStorageProvider.Builder() .withScope(this) .withIdentifier('EfsStorage') .withFileSystem(fileSystem) .withVpc(vpc) .build()) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/bedrock-lancedb-pipeline/stack.ts#L53-L161
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
BedrockLanceDbPipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, maxAzs: 1, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by the embedding containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/bedrock-lancedb-pipeline/stack.ts#L168-L192
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
BedrockMultimodalPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'An embedding storage pipeline using Amazon Bedrock multimodal embedding models.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The S3 bucket used to store the embeddings. const storage = new s3.Bucket(this, 'Storage', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'CacheStorage', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger // Resize images to a width of 512px and convert them to PNG. .pipe( new SharpImageTransform.Builder() .withScope(this) .withIdentifier('SharpTransform') .withCacheStorage(cache) .withSharpTransforms( sharp() .resize(512) .png() ) .build() ) // Creates embeddings for text chunks using Amazon Titan. .pipe( new TitanEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('BedrockEmbeddingProcessor') .withCacheStorage(cache) .withModel(TitanEmbeddingModel.AMAZON_TITAN_EMBED_IMAGE_V1) .withRegion('us-east-1') .build() ) // Store the embeddings in LanceDB. .pipe( new LanceDbStorageConnector.Builder() .withScope(this) .withIdentifier('LanceDbStorageConnector') .withCacheStorage(cache) .withVectorSize(768) .withStorageProvider(new S3StorageProvider.Builder() .withScope(this) .withIdentifier('S3Storage') .withBucket(storage) .build()) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/bedrock-multimodal-pipeline/stack.ts#L44-L131
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
BedrockEmbeddingPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'An embedding storage pipeline using Amazon Bedrock and OpenSearch.', ...env }); // The VPC in which OpenSearch will be deployed. const vpc = this.createVpc('Vpc'); // The OpenSearch domain. const domain = new OpenSearchDomain(this, 'Domain', { vpc }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'CacheStorage', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert text-oriented documents (Docx, Markdown, HTML, etc) to text. const pandocConverter = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('PandocConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // We use the `RecursiveCharacterTextSplitter` to split // input text into smaller chunks. This is required to ensure // that the generated embeddings are relevant. const textSplitter = new RecursiveCharacterTextSplitter.Builder() .withScope(this) .withIdentifier('RecursiveCharacterTextSplitter') .withCacheStorage(cache) .withSources([ pdfConverter, pandocConverter, trigger ]) .withChunkSize(4096) .build(); // Creates embeddings for text chunks using Amazon Titan. const embeddingProcessor = new TitanEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('BedrockEmbeddingProcessor') .withCacheStorage(cache) .withSource(textSplitter) // You can optionally specify the embedding model to use. .withModel(TitanEmbeddingModel.AMAZON_TITAN_EMBED_TEXT_V2) // You can also use a region that supports Amazon Bedrock. .withRegion('us-east-1') .build(); // The vector storage will create a new index and store the vectors // from previous middlewares into OpenSearch. new OpenSearchVectorStorageConnector.Builder() .withScope(this) .withIdentifier('OpenSearchVectorStorageConnector') .withCacheStorage(cache) .withEndpoint(domain.domain) .withSource(embeddingProcessor) .withVpc(vpc) // Specifies that the text associated with the embeddings // should be stored in OpenSearch. .withIncludeDocument(true) .withIndex(new OpenSearchVectorIndexDefinition.Builder() .withIndexName('vector-index') .withKnnMethod('hnsw') .withKnnEngine('nmslib') .withSpaceType('l2') // The dimensions of Amazon Titan embeddings. .withDimensions(1024) .withParameters({ 'ef_construction': 512, 'm': 16 }) .build() ) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/bedrock-opensearch-pipeline/stack.ts#L54-L168
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
BedrockEmbeddingPipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/bedrock-opensearch-pipeline/stack.ts#L175-L191
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
BedrockPineconePipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'An embedding storage pipeline using Amazon Bedrock and Pinecone.', ...env }); // Checking whether environment variables are defined. if (!process.env.PINECONE_API_KEY_SECRET_NAME) { throw new Error(` Missing the PINECONE_API_KEY_SECRET_NAME environment variable. `); } // The Pinecone API key. const pineconeApiKey = secrets.Secret.fromSecretNameV2( this, 'PineconeApiKey', process.env.PINECONE_API_KEY_SECRET_NAME ); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'CacheStorage', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // We use the `RecursiveCharacterTextSplitter` to split // input text into smaller chunks. This is required to ensure // that the generated embeddings are relevant. const textSplitter = new RecursiveCharacterTextSplitter.Builder() .withScope(this) .withIdentifier('RecursiveCharacterTextSplitter') .withCacheStorage(cache) .withSource(trigger) .withChunkSize(4096) .build(); // Creates embeddings for text chunks using Amazon Titan. const embeddingProcessor = new TitanEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('BedrockEmbeddingProcessor') .withCacheStorage(cache) .withSource(textSplitter) .withModel(TitanEmbeddingModel.AMAZON_TITAN_EMBED_TEXT_V1) .withRegion('us-east-1') .build(); // Store the embeddings in Pinecone. new PineconeStorageConnector.Builder() .withScope(this) .withIdentifier('PineconeStorageConnector') .withCacheStorage(cache) .withSource(embeddingProcessor) .withIndexName('bedrock-index') .withApiKey(pineconeApiKey) .withIncludeText(true) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/bedrock-pinecone-pipeline/stack.ts#L53-L138
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
BedrockQdrantPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'An embedding storage pipeline using Amazon Bedrock and Qdrant.', ...env }); // Checking whether environment variables are defined. if (!process.env.QDRANT_API_KEY_SECRET_NAME) { throw new Error(` Missing the QDRANT_API_KEY_SECRET_NAME environment variable. `); } const qdrantApiKey = secrets.Secret.fromSecretNameV2( this, 'QdrantApiKey', process.env.QDRANT_API_KEY_SECRET_NAME ); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'CacheStorage', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // We use the `RecursiveCharacterTextSplitter` to split // input text into smaller chunks. This is required to ensure // that the generated embeddings are relevant. const textSplitter = new RecursiveCharacterTextSplitter.Builder() .withScope(this) .withIdentifier('RecursiveCharacterTextSplitter') .withCacheStorage(cache) .withSource(trigger) .withChunkSize(4096) .build(); // Creates embeddings for text chunks using Amazon Titan. const embeddingProcessor = new TitanEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('BedrockEmbeddingProcessor') .withCacheStorage(cache) .withSource(textSplitter) .withModel(TitanEmbeddingModel.AMAZON_TITAN_EMBED_TEXT_V1) .withRegion('us-east-1') .build(); // Store the embeddings in Qdrant. new QdrantStorageConnector.Builder() .withScope(this) .withIdentifier('QdrantStorageConnector') .withCacheStorage(cache) .withSource(embeddingProcessor) .withCollectionName('aws') .withUrl('https://<example>.cloud.qdrant.io:6333') .withApiKey(qdrantApiKey) .withStoreText(true) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/bedrock-qdrant-pipeline/stack.ts#L53-L138
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ClipEmbeddingsPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline creating embeddings for images using CLIP.', ...env }); // The VPC required by sentence transformers models. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket where the embeddings are stored. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const ingestion = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Creates embeddings for images using the OpenAI // CLIP model, running on a GPU instance by default. // @see the `withComputeType` method to change the // compute type to CPU. const clip = new ClipImageProcessor.Builder() .withScope(this) .withIdentifier('ClipProcessor') .withCacheStorage(cache) .withVpc(vpc) .withSource(ingestion) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(clip) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/clip-embeddings-pipeline/stack.ts#L46-L124
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ClipEmbeddingsPipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, maxAzs: 1, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by the embedding containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/clip-embeddings-pipeline/stack.ts#L131-L155
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
CohereEmbeddingPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline storage pipeline using Cohere models and OpenSearch.', ...env }); // The VPC in which OpenSearch will be deployed. const vpc = this.createVpc('Vpc'); // The OpenSearch collection. const collection = new oss.Collection(this, 'Collection', { name: 'vector-collection', description: 'A collection used to store embeddings.', vpc, type: 'VECTORSEARCH' }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const ingestion = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // We use the `RecursiveCharacterTextSplitter` to split // input text into smaller chunks. This is required to ensure // that the generated embeddings are relevant. const textSplitter = new RecursiveCharacterTextSplitter.Builder() .withScope(this) .withIdentifier('RecursiveCharacterTextSplitter') .withCacheStorage(cache) .withSource(ingestion) .withChunkSize(1024) .build(); // Creates embeddings for the text using a Cohere embedding // model hosted on Amazon Bedrock. const embeddingProcessor = new CohereEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('CohereEmbeddingProcessor') .withCacheStorage(cache) .withSource(textSplitter) // You can specify the embedding model to use. .withModel(CohereEmbeddingModel.COHERE_EMBED_MULTILINGUAL_V3) // You can also use a region that supports Amazon Bedrock. .withRegion('us-east-1') .build(); // The vector storage will create a new index and store the vectors // from previous middlewares into OpenSearch. new OpenSearchVectorStorageConnector.Builder() .withScope(this) .withIdentifier('OpenSearchVectorStorageConnector') .withCacheStorage(cache) .withEndpoint(collection) .withSource(embeddingProcessor) .withVpc(vpc) // Specifies that the text associated with the embeddings // should be stored in OpenSearch. .withIncludeDocument(true) .withIndex(new OpenSearchVectorIndexDefinition.Builder() .withIndexName('vector-index') .withKnnMethod('hnsw') .withKnnEngine('nmslib') .withSpaceType('l2') // The dimensions of Amazon Titan embeddings. .withDimensions(1024) .withParameters({ 'ef_construction': 512, 'm': 16 }) .build() ) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/cohere-opensearch-pipeline/stack.ts#L47-L145
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
CohereEmbeddingPipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public and private subnet * for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/cohere-opensearch-pipeline/stack.ts#L152-L168
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
OllamaLancedbPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'An embedding storage pipeline using Ollama and LanceDB.', ...env }); // The VPC required by the EFS storage. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The S3 bucket used to store the embeddings. const storage = new s3.Bucket(this, 'Storage', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'CacheStorage', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert text-oriented documents (Docx, Markdown, HTML, etc) to text. const pandocConverter = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('PandocConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // We use the `RecursiveCharacterTextSplitter` to split // input text into smaller chunks. This is required to ensure // that the generated embeddings are relevant. const textSplitter = new RecursiveCharacterTextSplitter.Builder() .withScope(this) .withIdentifier('RecursiveCharacterTextSplitter') .withCacheStorage(cache) .withSources([ pdfConverter, pandocConverter, trigger ]) .withChunkSize(4096) .build(); // Creates embeddings for text chunks using Ollama. const embeddingProcessor = new OllamaEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('OllamaEmbeddingProcessor') .withCacheStorage(cache) .withSource(textSplitter) .withVpc(vpc) // The batch size controls how many chunks the Ollama embedding processor // will process in a single batch. .withBatchSize(10) .withModel(OllamaEmbeddingModel.NOMIC_EMBED_TEXT) .withInfrastructure(new InfrastructureDefinition.Builder() .withMaxMemory(15 * 1024) .withGpus(1) .withInstanceType(ec2.InstanceType.of( ec2.InstanceClass.G4DN, ec2.InstanceSize.XLARGE2 )) .build()) .build(); // Store the embeddings in LanceDB. new LanceDbStorageConnector.Builder() .withScope(this) .withIdentifier('LanceDbStorageConnector') .withCacheStorage(cache) .withSource(embeddingProcessor) .withVectorSize(768) .withStorageProvider(new S3StorageProvider.Builder() .withScope(this) .withIdentifier('S3Storage') .withBucket(storage) .build()) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/ollama-lancedb-pipeline/stack.ts#L57-L174
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
OllamaLancedbPipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, maxAzs: 1, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by the embedding containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/ollama-lancedb-pipeline/stack.ts#L181-L205
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
PannsEmbeddingPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'An audio embedding pipeline using PANNS and OpenSearch.', ...env }); // The VPC in which OpenSearch will be deployed. const vpc = this.createVpc('Vpc'); // The OpenSearch domain. const domain = new OpenSearchDomain(this, 'Domain', { vpc }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'CacheStorage', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Creates embeddings for audio documents. const embeddingProcessor = new PannsEmbeddingProcessor.Builder() .withScope(this) .withIdentifier('PannsEmbeddingProcessor') .withCacheStorage(cache) .withSource(trigger) .withVpc(vpc) .build(); // The vector storage will create a new index and store the vectors // from previous middlewares into OpenSearch. new OpenSearchVectorStorageConnector.Builder() .withScope(this) .withIdentifier('OpenSearchVectorStorageConnector') .withCacheStorage(cache) .withEndpoint(domain.domain) .withSource(embeddingProcessor) .withVpc(vpc) .withIncludeDocument(false) .withIndex(new OpenSearchVectorIndexDefinition.Builder() .withIndexName('vector-index') .withKnnMethod('hnsw') .withKnnEngine('nmslib') .withSpaceType('cosinesimil') // The dimensions of PANNS embeddings. .withDimensions(2048) .withParameters({ 'ef_construction': 512, 'm': 16 }) .build() ) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/panns-opensearch-pipeline/stack.ts#L46-L124
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
PannsEmbeddingPipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 24 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/panns-opensearch-pipeline/stack.ts#L131-L151
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
SentenceTransformersPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline creating embeddings using sentence transformers.', ...env }); // The VPC required by sentence transformers models. const vpc = this.createVpc('Vpc'); // The OpenSearch collection. const collection = new oss.Collection(this, 'Collection', { name: 'vector-collection', description: 'A collection used to store embeddings.', vpc, type: 'VECTORSEARCH' }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket where input documents are uploaded. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Monitor a bucket for uploaded objects. const ingestion = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // We use the `RecursiveCharacterTextSplitter` to split // input text into smaller chunks. This is required to ensure // that the generated embeddings are relevant. const textSplitter = new RecursiveCharacterTextSplitter.Builder() .withScope(this) .withIdentifier('RecursiveCharacterTextSplitter') .withCacheStorage(cache) .withSource(ingestion) .withChunkSize(1024) .build(); // Creates embeddings for text using Sentence Transformers // models. const sentenceTransformers = new SentenceTransformers.Builder() .withScope(this) .withIdentifier('SentenceTransformers') .withCacheStorage(cache) .withVpc(vpc) .withSource(textSplitter) // Optionally specify an embedding model to use. .withModel(SentenceTransformersModel.ALL_MPNET_BASE_V2) .build(); // The vector storage will create a new index and store the vectors // from previous middlewares into OpenSearch. new OpenSearchVectorStorageConnector.Builder() .withScope(this) .withIdentifier('OpenSearchVectorStorageConnector') .withCacheStorage(cache) .withEndpoint(collection) .withSource(sentenceTransformers) .withVpc(vpc) // Specifies that the text associated with the embeddings // should be stored in OpenSearch. .withIncludeDocument(true) .withIndex(new OpenSearchVectorIndexDefinition.Builder() .withIndexName('vector-index') .withKnnMethod('hnsw') .withKnnEngine('nmslib') .withSpaceType('cosinesimil') // The dimensions of ALL_MPNET_BASE_V2 embeddings. .withDimensions(768) .withParameters({ 'ef_construction': 512, 'm': 16 }) .build() ) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/sentence-transformers-pipeline/stack.ts#L48-L146
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
SentenceTransformersPipeline.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, maxAzs: 1, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by embeddings containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/embedding-pipelines/sentence-transformers-pipeline/stack.ts#L153-L177
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ConditionalPipelineStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline demonstrating how to use conditionals.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // We combine a filter to only keep JSON documents, // and the `Condition` middleware to check if the // document matches the expected structure. const condition = new Condition.Builder() .withScope(this) .withIdentifier('Condition') .withCacheStorage(cache) // Only listen for JSON documents. .withSource(trigger, when('data.document.type').equals('application/json')) // 👇 The below expression will be executed in the Cloud at runtime. .withConditional(async (event: CloudEvent) => { const document = event.data().document(); // Load the document in memory. const data = JSON.parse( (await document.data().asBuffer()).toString('utf-8') ); return (data.version === '1.0.0'); }) .build(); // If the condition is met, we store the document in the // destination bucket. condition.onMatch( new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/flow-control-pipelines/conditional-pipeline/stack.ts#L46-L134
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
DelayPipelineStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline showing how to delay a pipeline execution.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger // Create the first passthrough node. .pipe( new Passthrough.Builder() .withScope(this) .withIdentifier('FirstPassthrough') .withCacheStorage(cache) .withSource(trigger) .build() ) // Delay the pipeline execution by 30 seconds. .pipe( new Delay.Builder() .withScope(this) .withIdentifier('Delay') .withCacheStorage(cache) .withTime(cdk.Duration.seconds(30)) .build() ) // Create the second passthrough node. .pipe( new Passthrough.Builder() .withScope(this) .withIdentifier('SecondPassthrough') .withCacheStorage(cache) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/flow-control-pipelines/delay-pipeline/stack.ts#L45-L113
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageOutpaintingPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using Amazon Bedrock and Amazon Titan to perform color guided image generation.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // The prompt to use for the image generation. const prompt = ` a small amazon brown cardboard robot walking on a table `.trim(); // 👇 Amazon colors. const palette = ['#ff9900', '#0079c1', '#146eb4', '#ff6a00']; // Create the S3 trigger monitoring the bucket // for uploaded images. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger .pipe( // Ensure the input image dimensions are compatible with the // dimensions expected by the Titan model. We resize the image // to 1024x1024, and convert it to PNG. new SharpImageTransform.Builder() .withScope(this) .withIdentifier('ImageTransform') .withCacheStorage(cache) .withSharpTransforms( sharp() .resize({ width: 1024, height: 1024, fit: 'contain' }) .png() ) .build() ) .pipe( // Modify the input images using Titan on Amazon Bedrock. new TitanImageGenerator.Builder() .withScope(this) .withIdentifier('ImageGenerator') .withCacheStorage(cache) .withRegion('us-east-1') .withTask(new ColorGuidedGenerationTask.Builder() .withTextPrompt(prompt) .withTextNegativePrompt('low quality, blurry, or poorly lit') // Using the input image as a reference. .withReferenceImage(r.reference(r.document())) .withColors(palette) .withImageGenerationParameters(new ImageGenerationParameters.Builder() .withNumberOfImages(5) .withQuality('premium') .withWidth(1280) .withHeight(768) .build()) .build() ) .build() ) .pipe( // Store the generated images in the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/generative-pipelines/color-guided-image-pipeline/stack.ts#L50-L162
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageArticlePipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline generating images associated with articles.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket where results are stored. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects in the bucket. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // Convert text-oriented documents (Docx, Markdown, HTML, etc) to text. const pandocConverter = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('PandocConverter') .withCacheStorage(cache) .withSource(trigger) .build(); // We are using the `AnthropicTextProcessor` component to generate // a prompt for image generation given a document. const promptGenerator = new AnthropicTextProcessor.Builder() .withScope(this) .withIdentifier('AnthropicTextProcessor') .withCacheStorage(cache) .withSources([ pdfConverter, pandocConverter, trigger ]) .withRegion('us-east-1') .withModel(AnthropicTextModel.ANTHROPIC_CLAUDE_V3_HAIKU) .withPrompt(` Here is a text document. I want you to generate a one sentence prompt used to generate an image associated with this document. - Don't specify a description of what is being generated, such as "Here is", or "This is". - Just provide the prompt and nothing else. `) .withModelParameters({ temperature: 0.5, max_tokens: 512 }) .build(); // Create new images for the article using SDXL on Amazon Bedrock. const sdxlGenerator = new SdxlImageGenerator.Builder() .withScope(this) .withIdentifier('ImageGenerator') .withCacheStorage(cache) .withSource(promptGenerator) // You can override the region to use for Amazon Bedrock. // @see https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html#bedrock-regions .withRegion('us-east-1') // We reference the content of the input document as the prompt. .withPrompt(r.reference(r.document())) .withNegativePrompts([ 'low resolution', 'low quality' ]) // Customize the style of output images. .withModelParameters({ style_preset: 'digital-art' }) .build(); // Write both the original document and the end result // to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withSources([ trigger, sdxlGenerator ]) .withDestinationBucket(destination) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/generative-pipelines/image-article-pipeline/stack.ts#L51-L181
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageConditioningPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using Amazon Bedrock and Amazon Titan to perform image conditioning generation.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // The prompt to use for the image generation. const prompt = 'a tiger as a hand drawn sketch'; // Create the S3 trigger monitoring the bucket // for uploaded images. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger .pipe( // Ensure the input image dimensions are compatible with the // dimensions expected by the Titan model. We resize the image // to 1024x1024, and convert it to PNG. new SharpImageTransform.Builder() .withScope(this) .withIdentifier('ImageTransform') .withCacheStorage(cache) .withSharpTransforms( sharp() .resize({ width: 1024, height: 1024, fit: 'contain' }) .png() ) .build() ) .pipe( // Image conditioning generation using Amazon Titan. new TitanImageGenerator.Builder() .withScope(this) .withIdentifier('ImageGenerator') .withCacheStorage(cache) .withRegion('us-east-1') .withImageModel(TitanImageModel.TITAN_IMAGE_GENERATOR_V2) .withTask(new TextToImageTask.Builder() .withPrompt(prompt) .withControlMode('CANNY_EDGE') .withConditionImage(r.reference(r.document())) .withImageGenerationParameters(new ImageGenerationParameters.Builder() .withWidth(1024) .withHeight(1024) .build()) .build()) .build() ) .pipe( // Store the generated images in the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/generative-pipelines/image-conditioning-pipeline/stack.ts#L51-L154
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
time
const time = (offset = 5): Date => { const date = new Date(); date.setMinutes(date.getMinutes() + offset); return (date); };
/** * @returns a date based on the local timezone * with a given offset which is by default 5 minutes. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/generative-pipelines/image-generation-pipeline/stack.ts#L37-L41
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageGenerationPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline generating images using multiple Bedrock models.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The destination bucket where results are stored. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Trigger /////// /////////////////////////////////////////// // Schedules the execution of the pipeline 5 minutes // after the deployment of the stack. const trigger = new SchedulerEventTrigger.Builder() .withScope(this) .withIdentifier('SchedulerEventTrigger') .withCacheStorage(cache) .withSchedule( scheduler.ScheduleExpression.at(time()) ) .build(); //////////////////////////////////////////// /////// Bedrock Image Generators /////// //////////////////////////////////////////// const prompt = 'A cat riding a flying rocket'; // Create new images using SDXL on Amazon Bedrock. const sdxlGenerator = new SdxlImageGenerator.Builder() .withScope(this) .withIdentifier('ImageGenerator') .withCacheStorage(cache) .withSource(trigger) // You can override the region to use for Amazon Bedrock. // @see https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html#bedrock-regions .withRegion('us-east-1') .withPrompt(prompt) .withNegativePrompts([ 'low resolution', 'low quality' ]) .build(); // Create new images using Amazon Titan. const amazonGenerator = new TitanImageGenerator.Builder() .withScope(this) .withIdentifier('TitanImageGenerator') .withCacheStorage(cache) .withSource(trigger) .withRegion('us-east-1') .withTask(new TextToImageTask.Builder() .withPrompt(prompt) .withNegativePrompt('low resolution, low quality') .build() ) .build(); /////////////////////////////////////////// /////// Storage Connector /////// /////////////////////////////////////////// // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withSources([ sdxlGenerator, amazonGenerator ]) .withDestinationBucket(destination) .build(); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); // Display the trigger time in stderr. console.error(`💁 The pipeline will be triggered at ${time().toLocaleString()}`); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/generative-pipelines/image-generation-pipeline/stack.ts#L52-L149
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageOutpaintingPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using Amazon Bedrock and Amazon Titan to perform image outpainting.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded images. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger .pipe( // Ensure the input image dimensions are compatible with the // dimensions expected by the Titan model. We resize the image // to 1024x1024, and convert it to PNG. new SharpImageTransform.Builder() .withScope(this) .withIdentifier('ImageTransform') .withCacheStorage(cache) .withSharpTransforms( sharp() .resize({ width: 1024, height: 1024, fit: 'contain' }) .png() ) .build() ) .pipe( // Modify the input images using Titan on Amazon Bedrock. new TitanImageGenerator.Builder() .withScope(this) .withIdentifier('ImageGenerator') .withCacheStorage(cache) .withRegion('us-east-1') .withTask(new ImageOutpaintingTask.Builder() .withTextPrompt('Beautiful garden and swimming pool') .withMaskPrompt('house') .build() ) .build() ) .pipe( // Store the generated images in the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/generative-pipelines/image-outpainting-pipeline/stack.ts#L48-L143
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageToImageStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using Amazon Bedrock and SDXL to transform input images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Generates a short description of the image using the // Anthropic Claude v3 Haiku multi-modal model. const anthropic = new AnthropicTextProcessor.Builder() .withScope(this) .withIdentifier('Anthropic') .withCacheStorage(cache) .withSource(trigger) .withModel(AnthropicTextModel.ANTHROPIC_CLAUDE_V3_HAIKU) .withPrompt(` Create a short prompt of one sentence to generate an image similar to the provided image(s). `) .withModelParameters({ max_tokens: 256 }) .build(); // Generates new images using SDXL on Amazon Bedrock based on the // description of previous images. const imageGenerator = new SdxlImageGenerator.Builder() .withScope(this) .withIdentifier('ImageGenerator') .withCacheStorage(cache) .withSource(anthropic) .withRegion('us-east-1') // We use the output of the previous middleware // as the prompt for generating new images. .withPrompt(r.reference(r.document())) .build(); // Write both the initial image and the generated image // to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .withSources([ trigger, imageGenerator ]) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/generative-pipelines/image-to-image-pipeline/stack.ts#L47-L144
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
SdxlImageInpainting.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using Amazon Bedrock and SDXL to perform image inpainting.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); // We upload the mask image in the source S3 bucket under // the `mask/` prefix. new s3deploy.BucketDeployment(this, 'DeployMask', { sources: [s3deploy.Source.asset('./assets/mask/')], destinationBucket: source, destinationKeyPrefix: 'mask/' }); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects in the `images/` prefix. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket({ bucket: source, prefix: 'images/' }) .build(); // Ensure the input image dimensions are compatible with the // dimensions expected by the SDXL model. We resize the image // to 512x512, and convert it to PNG. const imageTransform = new SharpImageTransform.Builder() .withScope(this) .withIdentifier('ImageTransform') .withCacheStorage(cache) .withSource(trigger) .withSharpTransforms( sharp() .resize(512, 512) .png() ) .build(); // Modify the input images using SDXL on Amazon Bedrock. const imageGenerator = new SdxlImageGenerator.Builder() .withScope(this) .withIdentifier('ImageGenerator') .withCacheStorage(cache) .withSource(imageTransform) // You can override the region to use for Amazon Bedrock. // @see https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html#bedrock-regions .withRegion('us-east-1') .withPrompt('A glowing red cloud') .withModelParameters({ // We reference the input document as the `init_image` parameter. init_image: r.reference(r.document()), // We reference the mask image as the `mask_image` parameter. mask_image: r.reference( r.url(`s3://${source.bucketName}/mask/mask.png`) ), mask_source: 'MASK_IMAGE_BLACK' }) .build(); // We grant the Bedrock image generator read access to the // input bucket, such that it can have access to the mask image. source.grantRead(imageGenerator); // Write both the resized image and the generated image // to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .withSources([ imageTransform, imageGenerator ]) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/generative-pipelines/sdxl-inpainting-pipeline/stack.ts#L47-L163
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
TitanInpaintingPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using Amazon Bedrock and Amazon Titan to perform image inpainting.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded images. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger .pipe( // Ensure the input image dimensions are compatible with the // dimensions expected by the Titan model. We resize the image // to 1024x1024, and convert it to PNG. new SharpImageTransform.Builder() .withScope(this) .withIdentifier('ImageTransform') .withCacheStorage(cache) .withSharpTransforms( sharp() .resize({ width: 1024, height: 1024, fit: 'contain' }) .png() ) .build() ) .pipe( // Modify the input images using Titan on Amazon Bedrock. new TitanImageGenerator.Builder() .withScope(this) .withIdentifier('ImageGenerator') .withCacheStorage(cache) .withRegion('us-east-1') .withTask(new ImageInpaintingTask.Builder() .withTextPrompt('Modern house') .withMaskPrompt('house') .build() ) .build() ) .pipe( // Write both the resized image and the generated image // to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/generative-pipelines/titan-inpainting-pipeline/stack.ts#L48-L144
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
CannyEdgeDetectionPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline performing canny edge detection on images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination images bucket. const destination = new s3.Bucket(this, 'DestinationBucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger .pipe( // Perform canny edge detection on uploaded images // and extract the edges. new CannyEdgeDetector.Builder() .withScope(this) .withIdentifier('CannyEdgeDetector') .withCacheStorage(cache) .withLowerThreshold(100) .withUpperThreshold(200) .withApertureSize(3) .withL2Gradient(false) .build() ) .pipe( // Write images to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('DestinationStorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/canny-edge-detection-pipeline/stack.ts#L44-L123
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
FaceDetectionStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using computer vision to detect faces on images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // The Rekognition image processor will // identify faces in processed images. const rekognition = new RekognitionImageProcessor.Builder() .withScope(this) .withIdentifier('Rekognition') .withCacheStorage(cache) .withSource(trigger) .withIntent( r.detect() .faces(r.confidence(80)) ) .build(); // Create a blurring processor that will blur // faces in detected images by the Rekognition processor. const layerProcessor = new ImageLayerProcessor.Builder() .withScope(this) .withIdentifier('ImageLayer') .withCacheStorage(cache) .withSource(rekognition) .withLayers( l.pixelate(l.faces()), l.highlight(l.landmarks()) ) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(layerProcessor) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/face-detection-pipeline/stack.ts#L46-L136
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
FaceExtractionPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using computer vision to extract faces from images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // The Rekognition image processor will // identify faces in processed images. const rekognition = new RekognitionImageProcessor.Builder() .withScope(this) .withIdentifier('Rekognition') .withCacheStorage(cache) .withSource(trigger) .withIntent( r.detect() .faces(r.confidence(80)) ) .build(); // The face extractor uses a funclet leveraging the Sharp library // to yield the faces detected in the image into separate images. const faceExtractor = new SharpImageTransform.Builder() .withScope(this) .withIdentifier('FaceExtractor') .withCacheStorage(cache) .withSource(rekognition) .withSharpTransforms(funclet) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(faceExtractor) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/face-extraction-pipeline/stack.ts#L75-L162
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageBackgroundRemovalStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline demonstrating how to automatically remove image backgrounds using Rembg.', ...env }); // The VPC in which the Rembg middleware will be deployed. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger // Remove the background from images. .pipe( new RembgImageProcessor.Builder() .withScope(this) .withIdentifier('RembgImageProcessor') .withCacheStorage(cache) .withVpc(vpc) .build() ) // Write the results to the destination bucket. .pipe( new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/image-background-removal/stack.ts#L44-L122
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageBackgroundRemovalStack.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by the containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/image-background-removal/stack.ts#L129-L153
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageCaptioning.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline demonstrating how to implement image captioning using the BLIP2 model.', ...env }); // The VPC in which the BLIP2 model will be deployed. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger // Caption images using the BLIP2 model. .pipe( new Blip2ImageProcessor.Builder() .withScope(this) .withIdentifier('ImageProcessor') .withCacheStorage(cache) .withVpc(vpc) .build() ) // Write the results to the destination bucket. .pipe( new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/image-captioning-pipeline/stack.ts#L44-L122
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageCaptioning.createVpc
private createVpc(id: string): ec2.IVpc { return (new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, subnetConfiguration: [{ // Used by NAT Gateways to provide Internet access // to the containers. name: 'public', subnetType: ec2.SubnetType.PUBLIC, cidrMask: 28 }, { // Used by the containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }, { // Used by EFS. name: 'isolated', subnetType: ec2.SubnetType.PRIVATE_ISOLATED, cidrMask: 28 }] })); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/image-captioning-pipeline/stack.ts#L129-L153
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageHashingPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline computing the hash of images using different algorithms.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger // Compute the hash of images using different algorithms. .pipe( new HashingImageProcessor.Builder() .withScope(this) .withIdentifier('HashingImageProcessor') .withCacheStorage(cache) // Optionally specify which algorithms to use. .withAverageHashing(true) .withPerceptualHashing(true) .withDifferenceHashing(true) .withWaveletHashing(true) .withColorHashing(true) .build() ) // Write the images to the destination bucket. .pipe( new S3StorageConnector.Builder() .withScope(this) .withIdentifier('SharpStorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/image-hashing-pipeline/stack.ts#L44-L124
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageModerationPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline demonstrating how to classify moderated images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The moderated images bucket. const moderated = new s3.Bucket(this, 'Moderated', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The safe images bucket. const safe = new s3.Bucket(this, 'Safe', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // The Rekognition image processor will // identify moderated labels that have a confidence // that is at least 90%. const rekognition = new RekognitionImageProcessor.Builder() .withScope(this) .withIdentifier('RekognitionImageProcessor') .withCacheStorage(cache) .withSource(trigger) .withIntent( r.detect() .labels(r.moderate(r.confidence(90))) ) .build(); // A reference to the moderations counter in the document metadata. const subject = 'data.metadata.properties.attrs.stats.moderations'; // Writes the results to the moderated bucket when // moderated labels exist in the image metadata. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('ModeratedStorage') .withCacheStorage(cache) .withDestinationBucket(moderated) // We use a conditional to check whether the moderated // counter is greater than 0 to capture moderated images. .withSource(rekognition, when(subject).gt(0)) .build(); // Writes the results to the safe bucket when moderated // labels do not exist in the image metadata. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('SafeStorage') .withCacheStorage(cache) .withDestinationBucket(safe) // We use a conditional to check whether the moderated // counter is equal to 0 to capture safe images. .withSource(rekognition, when(subject).equals(0)) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the moderated bucket information in the console. new cdk.CfnOutput(this, 'ModeratedBucketName', { description: 'The name of the bucket containing moderated images.', value: moderated.bucketName }); // Display the safe bucket information in the console. new cdk.CfnOutput(this, 'SafeBucketName', { description: 'The name of the bucket containing safe images.', value: safe.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/image-moderation-pipeline/stack.ts#L38-L149
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageResizePipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline resizing images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // The image resizing operation using the Sharp middleware // implements a funclet yielding the different sizes of images. const imageResize = new SharpImageTransform.Builder() .withScope(this) .withIdentifier('SharpTransform') .withCacheStorage(cache) .withSource(trigger) .withSharpTransforms(async function*(event: CloudEvent, sharp: SharpFunction) { const sizes = [ { width: 100, height: 100 }, { width: 200, height: 200 }, { width: 300, height: 300 } ]; // Load the image in memory. const buffer = await event.data().document().data().asBuffer(); // Resize the image to the specified sizes. for (const size of sizes) { yield sharp(buffer) .resize(size.width, size.height) .png(); } }) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(imageResize) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/image-resize-pipeline/stack.ts#L43-L133
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageTransformsStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline applying transformations on images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Create a Sharp transform which will : // - Resize images to a width of 500px // - Grayscale images // - Flip images // - Convert images to PNG const imageTransform = new SharpImageTransform.Builder() .withScope(this) .withIdentifier('SharpTransform') .withCacheStorage(cache) .withSource(trigger) .withSharpTransforms( sharp() .resize(500) .grayscale() .flip() .png() ) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(imageTransform) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/image-transforms-pipeline/stack.ts#L43-L126
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
ImageWatermarkingPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline applying watermarks on images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); // We upload the watermark image in the source S3 bucket under // the `watermark/` prefix. new s3deploy.BucketDeployment(this, 'DeployWatermark', { sources: [s3deploy.Source.asset('./assets/watermark/')], destinationBucket: source, destinationKeyPrefix: 'watermark/' }); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket({ bucket: source, prefix: 'images/' }) .build(); // Create a Sharp transform that will apply a watermark // on input images. const imageTransform = new SharpImageTransform.Builder() .withScope(this) .withIdentifier('SharpTransform') .withCacheStorage(cache) .withSource(trigger) .withSharpTransforms( sharp() .resize(500) .composite([{ input: r.reference( r.url(`s3://${source.bucketName}/watermark/watermark.png`) ) }]) .png() ) .build(); // We grant the Sharp middleware read access to the // input bucket, such that it can have access to the watermark image. source.grantRead(imageTransform); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(imageTransform) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/image-watermarking-pipeline/stack.ts#L45-L140
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
LaplacianVariancePipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline computing the Laplacian variance of images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The sharp images bucket. const sharp = new s3.Bucket(this, 'SharpBucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The blurry images bucket. const blurry = new s3.Bucket(this, 'BlurryBucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Compute the Laplacian variance of the images. const laplacian = new LaplacianImageProcessor.Builder() .withScope(this) .withIdentifier('LaplacianImageProcessor') .withCacheStorage(cache) .withSource(trigger) // Optionally specify the depth level. .withDepth(Depth.CV_64F) .build(); // Write the sharp images to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('SharpStorageConnector') .withCacheStorage(cache) .withSource(laplacian, when('data.metadata.properties.attrs.variance').gte(THRESHOLD)) .withDestinationBucket(sharp) .build(); // Write the blurry images to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('BlurryStorageConnector') .withCacheStorage(cache) .withSource(laplacian, when('data.metadata.properties.attrs.variance').lt(THRESHOLD)) .withDestinationBucket(blurry) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the sharp bucket information in the console. new cdk.CfnOutput(this, 'SharpBucketName', { description: 'The name of the sharp bucket destination.', value: sharp.bucketName }); // Display the blurry bucket information in the console. new cdk.CfnOutput(this, 'BlurryBucketName', { description: 'The name of the blurry bucket destination.', value: blurry.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/laplacian-variance-pipeline/stack.ts#L49-L147
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
TitanImageBackgroundRemovalStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline demonstrating how to remove image backgrounds using the Amazon Titan model.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger .pipe( // Ensure the input image dimensions are compatible with the // dimensions expected by the Titan model. new SharpImageTransform.Builder() .withScope(this) .withIdentifier('ImageTransform') .withCacheStorage(cache) .withSharpTransforms( sharp() .resize(800) .png() ) .build() ) // Remove the background from images. .pipe( new TitanImageGenerator.Builder() .withScope(this) .withIdentifier('TitanImageGenerator') .withCacheStorage(cache) .withRegion('us-east-1') .withImageModel(TitanImageModel.TITAN_IMAGE_GENERATOR_V2) .withTask(new BackgroundRemovalTask.Builder() // We reference the current document. .withImage(r.reference(r.document())) .build()) .build() ) // Write the results to the destination bucket. .pipe( new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/titan-image-background-removal/stack.ts#L49-L143
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
TitanObjectRemovalPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using Amazon Bedrock and Amazon Titan to perform object removal in images.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded images. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); trigger .pipe( // Ensure the input image dimensions are compatible with the // dimensions expected by the Titan model. We resize the image // to 1024x1024, and convert it to PNG. new SharpImageTransform.Builder() .withScope(this) .withIdentifier('ImageTransform') .withCacheStorage(cache) .withSharpTransforms( sharp() .resize({ width: 1024, height: 1024, fit: 'contain' }) .png() ) .build() ) .pipe( // Remove cats from the image using an inpainting task, // and not providing any textual prompt. new TitanImageGenerator.Builder() .withScope(this) .withIdentifier('ImageGenerator') .withCacheStorage(cache) .withRegion('us-east-1') .withTask(new ImageInpaintingTask.Builder() .withMaskPrompt('cat') .build() ) .build() ) .pipe( // Store the generated images in the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .build() ); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/image-processing-pipelines/titan-object-removal-pipeline/stack.ts#L49-L144
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
EncryptionPipelineStack.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline using KMS to encrypt a pipeline.', ...env }); // The customer managed key (CMK) used to encrypt // the storage buckets. const bucketKey = new kms.Key(this, 'BucketKey', { enableKeyRotation: true, removalPolicy: cdk.RemovalPolicy.DESTROY }); // The customer managed key (CMK) used to encrypt the data // handled by middlewares (i.e SQS queues, SNS topics, local storage). const key = new kms.Key(this, 'Key', { enableKeyRotation: true, removalPolicy: cdk.RemovalPolicy.DESTROY }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryptionKey: bucketKey, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryptionKey: bucketKey, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', { encryptionKey: key }); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .withKmsKey(key) .build(); // Convert input documents to specific formats // using Pandoc. const pandoc = new PandocTextConverter.Builder() .withScope(this) .withIdentifier('Pandoc') .withCacheStorage(cache) .withSource(trigger) .withKmsKey(key) .withConversions( from('markdown').to('html'), from('docx').to('html') ) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(pandoc) .withKmsKey(key) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/security-hardened-pipelines/pipeline-encryption/stack.ts#L46-L142
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
VpcPrivatePipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline deployed within a private VPC.', ...env }); // The VPC in which the EFS cache for the KeyBERT model will be deployed. const vpc = this.createVpc('Vpc'); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The destination bucket. const destination = new s3.Bucket(this, 'Destination', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .withVpc(vpc) .build(); // The Rekognition image processor will // identify faces in processed images. const rekognition = new RekognitionImageProcessor.Builder() .withScope(this) .withIdentifier('Rekognition') .withCacheStorage(cache) .withSource(trigger) .withVpc(vpc) .withIntent( r.detect() .faces(r.confidence(80)) ) .build(); // Create a blurring processor that will blur // faces in detected images by the Rekognition processor. const layerProcessor = new ImageLayerProcessor.Builder() .withScope(this) .withIdentifier('ImageLayer') .withCacheStorage(cache) .withSource(rekognition) .withVpc(vpc) .withLayers( l.pixelate(l.faces()), l.highlight(l.landmarks()) ) .build(); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('Storage') .withCacheStorage(cache) .withDestinationBucket(destination) .withSource(layerProcessor) .withVpc(vpc) .build(); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); // Display the destination bucket information in the console. new cdk.CfnOutput(this, 'DestinationBucketName', { description: 'The name of the destination bucket.', value: destination.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/security-hardened-pipelines/vpc-private-pipeline/stack.ts#L47-L144
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
VpcPrivatePipeline.createVpc
private createVpc(id: string): ec2.IVpc { const vpc = new ec2.Vpc(this, id, { enableDnsSupport: true, enableDnsHostnames: true, ipAddresses: ec2.IpAddresses.cidr('10.0.0.0/20'), maxAzs: 1, natGateways: 0, subnetConfiguration: [{ // Used by KeyBERT containers. name: 'private', subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, cidrMask: 24 }] }); // Add a CloudWatch VPC endpoint to the VPC. vpc.addInterfaceEndpoint('CloudWatchEndpoint', { service: ec2.InterfaceVpcEndpointAwsService.CLOUDWATCH_LOGS }); // Add an S3 VPC endpoint to the VPC. vpc.addGatewayEndpoint('S3Endpoint', { service: ec2.GatewayVpcEndpointAwsService.S3 }); // Add an SQS VPC endpoint to the VPC. vpc.addInterfaceEndpoint('SQSEndpoint', { service: ec2.InterfaceVpcEndpointAwsService.SQS }); // Add an SNS VPC endpoint to the VPC. vpc.addInterfaceEndpoint('SNSEndpoint', { service: ec2.InterfaceVpcEndpointAwsService.SNS }); // Add a Rekognition VPC endpoint to the VPC. vpc.addInterfaceEndpoint('RekognitionEndpoint', { service: ec2.InterfaceVpcEndpointAwsService.REKOGNITION }); return (vpc); }
/** * @param id the VPC identifier. * @returns a new VPC with a public, private and isolated * subnets for the pipeline. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/security-hardened-pipelines/vpc-private-pipeline/stack.ts#L151-L192
4285173e80584eedfc1a8424d3d1b6c1a7038088
project-lakechain
github_2023
awslabs
typescript
MultiOutputPipeline.constructor
constructor(scope: Construct, id: string, env: cdk.StackProps) { super(scope, id, { description: 'A pipeline forwarding its results to multiple AWS services.', ...env }); /////////////////////////////////////////// /////// S3 Storage /////// /////////////////////////////////////////// // The source bucket. const source = new s3.Bucket(this, 'Bucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // The cache storage. const cache = new CacheStorage(this, 'Cache', {}); /////////////////////////////////////////// /////// Lakechain Pipeline /////// /////////////////////////////////////////// // Create the S3 trigger monitoring the bucket // for uploaded objects. const trigger = new S3EventTrigger.Builder() .withScope(this) .withIdentifier('Trigger') .withCacheStorage(cache) .withBucket(source) .build(); // Convert PDF documents to text. const pdfConverter = new PdfTextConverter.Builder() .withScope(this) .withIdentifier('PdfConverter') .withCacheStorage(cache) .withSource(trigger) .build(); ///////////////////////////////////////////// /////// S3 Storage Connector /////// ///////////////////////////////////////////// // The S3 Storage Connector bucket. const s3StorageConnectorBucket = new s3.Bucket(this, 'S3StorageConnectorBucket', { encryption: s3.BucketEncryption.S3_MANAGED, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, autoDeleteObjects: true, removalPolicy: cdk.RemovalPolicy.DESTROY, enforceSSL: true }); // Write the results to the destination bucket. new S3StorageConnector.Builder() .withScope(this) .withIdentifier('S3StorageConnector') .withCacheStorage(cache) .withDestinationBucket(s3StorageConnectorBucket) .withSource(pdfConverter) .build(); // Display the S3 Storage Connector bucket information in the console. new cdk.CfnOutput(this, 'S3StorageConnectorBucketName', { description: 'The name of the S3 Storage Connector bucket.', value: s3StorageConnectorBucket.bucketName }); ////////////////////////////////////////////// /////// SQS Storage Connector /////// ////////////////////////////////////////////// // The SQS Storage Connector queue. const sqsStorageConnectorQueue = new sqs.Queue(this, 'SQSStorageConnectorQueue', { encryption: sqs.QueueEncryption.SQS_MANAGED, removalPolicy: cdk.RemovalPolicy.DESTROY }); // Write the results to the destination queue. new SqsStorageConnector.Builder() .withScope(this) .withIdentifier('SQSStorageConnector') .withCacheStorage(cache) .withDestinationQueue(sqsStorageConnectorQueue) .withSource(pdfConverter) .build(); // Display the SQS Storage Connector queue information in the console. new cdk.CfnOutput(this, 'SQSStorageConnectorQueueUrl', { description: 'The URL of the SQS Storage Connector queue.', value: sqsStorageConnectorQueue.queueUrl }); /////////////////////////////////////////////// /////// Firehose Storage Connector /////// /////////////////////////////////////////////// // Create the Firehose delivery stream. const { deliveryStream, firehoseStorageConnectorBucket } = this.createDeliveryStream(); // Write the results to the destination delivery stream. new FirehoseStorageConnector.Builder() .withScope(this) .withIdentifier('FirehoseStorageConnector') .withCacheStorage(cache) .withDestinationStream(deliveryStream) .withSource(pdfConverter) .build(); // Display the Firehose Storage Connector bucket information in the console. new cdk.CfnOutput(this, 'FirehoseStorageConnectorBucketName', { description: 'The name of the Firehose Storage Connector bucket.', value: firehoseStorageConnectorBucket.bucketName }); // Display the source bucket information in the console. new cdk.CfnOutput(this, 'SourceBucketName', { description: 'The name of the source bucket.', value: source.bucketName }); }
/** * Stack constructor. */
https://github.com/awslabs/project-lakechain/blob/4285173e80584eedfc1a8424d3d1b6c1a7038088/examples/simple-pipelines/storage-connector-pipeline/stack.ts#L57-L180
4285173e80584eedfc1a8424d3d1b6c1a7038088