repo_name
string
dataset
string
owner
string
lang
string
func_name
string
code
string
docstring
string
url
string
sha
string
openai-deno-build
github_2023
openai
typescript
FileBatches.uploadAndPoll
async uploadAndPoll( vectorStoreId: string, { files, fileIds = [] }: { files: Uploadable[]; fileIds?: string[] }, options?: Core.RequestOptions & { pollIntervalMs?: number; maxConcurrency?: number; }, ): Promise<VectorStoreFileBatch> { if (files == null || files.length == 0) { throw new Error( `No \`files\` provided to process. If you've already uploaded files you should use \`.createAndPoll()\` instead`, ); } const configuredConcurrency = options?.maxConcurrency ?? 5; // We cap the number of workers at the number of files (so we don't start any unnecessary workers) const concurrencyLimit = Math.min(configuredConcurrency, files.length); const client = this._client; const fileIterator = files.values(); const allFileIds: string[] = [...fileIds]; // This code is based on this design. The libraries don't accommodate our environment limits. // https://stackoverflow.com/questions/40639432/what-is-the-best-way-to-limit-concurrency-when-using-es6s-promise-all async function processFiles(iterator: IterableIterator<Uploadable>) { for (let item of iterator) { const fileObj = await client.files.create({ file: item, purpose: "assistants", }, options); allFileIds.push(fileObj.id); } } // Start workers to process results const workers = Array(concurrencyLimit).fill(fileIterator).map( processFiles, ); // Wait for all processing to complete. await allSettledWithThrow(workers); return await this.createAndPoll(vectorStoreId, { file_ids: allFileIds, }); }
/** * Uploads the given files concurrently and then creates a vector store file batch. * * The concurrency limit is configurable using the `maxConcurrency` parameter. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/file-batches.ts#L172-L218
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
processFiles
async function processFiles(iterator: IterableIterator<Uploadable>) { for (let item of iterator) { const fileObj = await client.files.create({ file: item, purpose: "assistants", }, options); allFileIds.push(fileObj.id); } }
// This code is based on this design. The libraries don't accommodate our environment limits.
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/file-batches.ts#L197-L205
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Files.create
create( vectorStoreId: string, body: FileCreateParams, options?: Core.RequestOptions, ): Core.APIPromise<VectorStoreFile> { return this._client.post(`/vector_stores/${vectorStoreId}/files`, { body, ...options, headers: { "OpenAI-Beta": "assistants=v2", ...options?.headers }, }); }
/** * Create a vector store file by attaching a * [File](https://platform.openai.com/docs/api-reference/files) to a * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/files.ts#L16-L26
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Files.retrieve
retrieve( vectorStoreId: string, fileId: string, options?: Core.RequestOptions, ): Core.APIPromise<VectorStoreFile> { return this._client.get(`/vector_stores/${vectorStoreId}/files/${fileId}`, { ...options, headers: { "OpenAI-Beta": "assistants=v2", ...options?.headers }, }); }
/** * Retrieves a vector store file. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/files.ts#L31-L40
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Files.del
del( vectorStoreId: string, fileId: string, options?: Core.RequestOptions, ): Core.APIPromise<VectorStoreFileDeleted> { return this._client.delete( `/vector_stores/${vectorStoreId}/files/${fileId}`, { ...options, headers: { "OpenAI-Beta": "assistants=v2", ...options?.headers }, }, ); }
/** * Delete a vector store file. This will remove the file from the vector store but * the file itself will not be deleted. To delete the file, use the * [delete file](https://platform.openai.com/docs/api-reference/files/delete) * endpoint. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/files.ts#L79-L91
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Files.createAndPoll
async createAndPoll( vectorStoreId: string, body: FileCreateParams, options?: Core.RequestOptions & { pollIntervalMs?: number }, ): Promise<VectorStoreFile> { const file = await this.create(vectorStoreId, body, options); return await this.poll(vectorStoreId, file.id, options); }
/** * Attach a file to the given vector store and wait for it to be processed. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/files.ts#L96-L103
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Files.poll
async poll( vectorStoreId: string, fileId: string, options?: Core.RequestOptions & { pollIntervalMs?: number }, ): Promise<VectorStoreFile> { const headers: { [key: string]: string } = { ...options?.headers, "X-Stainless-Poll-Helper": "true", }; if (options?.pollIntervalMs) { headers["X-Stainless-Custom-Poll-Interval"] = options.pollIntervalMs .toString(); } while (true) { const fileResponse = await this.retrieve(vectorStoreId, fileId, { ...options, headers, }).withResponse(); const file = fileResponse.data; switch (file.status) { case "in_progress": let sleepInterval = 5000; if (options?.pollIntervalMs) { sleepInterval = options.pollIntervalMs; } else { const headerInterval = fileResponse.response.headers.get( "openai-poll-after-ms", ); if (headerInterval) { const headerIntervalMs = parseInt(headerInterval); if (!isNaN(headerIntervalMs)) { sleepInterval = headerIntervalMs; } } } await sleep(sleepInterval); break; case "failed": case "completed": return file; } } }
/** * Wait for the vector store file to finish processing. * * Note: this will return even if the file failed to process, you need to check * file.last_error and file.status to handle these cases */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/files.ts#L111-L156
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Files.upload
async upload( vectorStoreId: string, file: Uploadable, options?: Core.RequestOptions, ): Promise<VectorStoreFile> { const fileInfo = await this._client.files.create({ file: file, purpose: "assistants", }, options); return this.create(vectorStoreId, { file_id: fileInfo.id }, options); }
/** * Upload a file to the `files` API and then attach it to the given vector store. * * Note the file will be asynchronously processed (you can use the alternative * polling helper method to wait for processing to complete). */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/files.ts#L164-L174
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Files.uploadAndPoll
async uploadAndPoll( vectorStoreId: string, file: Uploadable, options?: Core.RequestOptions & { pollIntervalMs?: number }, ): Promise<VectorStoreFile> { const fileInfo = await this.upload(vectorStoreId, file, options); return await this.poll(vectorStoreId, fileInfo.id, options); }
/** * Add a file to a vector store and poll until processing is complete. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/files.ts#L179-L186
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
VectorStores.create
create( body: VectorStoreCreateParams, options?: Core.RequestOptions, ): Core.APIPromise<VectorStore> { return this._client.post("/vector_stores", { body, ...options, headers: { "OpenAI-Beta": "assistants=v2", ...options?.headers }, }); }
/** * Create a vector store. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/vector-stores.ts#L20-L29
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
VectorStores.retrieve
retrieve( vectorStoreId: string, options?: Core.RequestOptions, ): Core.APIPromise<VectorStore> { return this._client.get(`/vector_stores/${vectorStoreId}`, { ...options, headers: { "OpenAI-Beta": "assistants=v2", ...options?.headers }, }); }
/** * Retrieves a vector store. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/vector-stores.ts#L34-L42
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
VectorStores.update
update( vectorStoreId: string, body: VectorStoreUpdateParams, options?: Core.RequestOptions, ): Core.APIPromise<VectorStore> { return this._client.post(`/vector_stores/${vectorStoreId}`, { body, ...options, headers: { "OpenAI-Beta": "assistants=v2", ...options?.headers }, }); }
/** * Modifies a vector store. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/vector-stores.ts#L47-L57
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
VectorStores.del
del( vectorStoreId: string, options?: Core.RequestOptions, ): Core.APIPromise<VectorStoreDeleted> { return this._client.delete(`/vector_stores/${vectorStoreId}`, { ...options, headers: { "OpenAI-Beta": "assistants=v2", ...options?.headers }, }); }
/** * Delete a vector store. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/beta/vector-stores/vector-stores.ts#L86-L94
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Jobs.create
create( body: JobCreateParams, options?: Core.RequestOptions, ): Core.APIPromise<FineTuningJob> { return this._client.post("/fine_tuning/jobs", { body, ...options }); }
/** * Creates a fine-tuning job which begins the process of creating a new model from * a given dataset. * * Response includes details of the enqueued job including job status and the name * of the fine-tuned models once complete. * * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/fine-tuning/jobs/jobs.ts#L24-L29
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Jobs.retrieve
retrieve( fineTuningJobId: string, options?: Core.RequestOptions, ): Core.APIPromise<FineTuningJob> { return this._client.get(`/fine_tuning/jobs/${fineTuningJobId}`, options); }
/** * Get info about a fine-tuning job. * * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/fine-tuning/jobs/jobs.ts#L36-L41
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Jobs.cancel
cancel( fineTuningJobId: string, options?: Core.RequestOptions, ): Core.APIPromise<FineTuningJob> { return this._client.post( `/fine_tuning/jobs/${fineTuningJobId}/cancel`, options, ); }
/** * Immediately cancel a fine-tune job. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/fine-tuning/jobs/jobs.ts#L69-L77
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Parts.create
create( uploadId: string, body: PartCreateParams, options?: Core.RequestOptions, ): Core.APIPromise<UploadPart> { return this._client.post( `/uploads/${uploadId}/parts`, Core.multipartFormRequestOptions({ body, ...options }), ); }
/** * Adds a * [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. * A Part represents a chunk of bytes from the file you are trying to upload. * * Each Part can be at most 64 MB, and you can add Parts until you hit the Upload * maximum of 8 GB. * * It is possible to add multiple Parts in parallel. You can decide the intended * order of the Parts when you * [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/uploads/parts.ts#L21-L30
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Uploads.create
create( body: UploadCreateParams, options?: Core.RequestOptions, ): Core.APIPromise<Upload> { return this._client.post("/uploads", { body, ...options }); }
/** * Creates an intermediate * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object * that you can add * [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. * Currently, an Upload can accept at most 8 GB in total and expires after an hour * after you create it. * * Once you complete the Upload, we will create a * [File](https://platform.openai.com/docs/api-reference/files/object) object that * contains all the parts you uploaded. This File is usable in the rest of our * platform as a regular File object. * * For certain `purpose`s, the correct `mime_type` must be specified. Please refer * to documentation for the supported MIME types for your use case: * * - [Assistants](https://platform.openai.com/docs/assistants/tools/file-search/supported-files) * * For guidance on the proper filename extensions for each purpose, please follow * the documentation on * [creating a File](https://platform.openai.com/docs/api-reference/files/create). */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/uploads/uploads.ts#L34-L39
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Uploads.cancel
cancel( uploadId: string, options?: Core.RequestOptions, ): Core.APIPromise<Upload> { return this._client.post(`/uploads/${uploadId}/cancel`, options); }
/** * Cancels the Upload. No Parts may be added after an Upload is cancelled. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/uploads/uploads.ts#L44-L49
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
openai-deno-build
github_2023
openai
typescript
Uploads.complete
complete( uploadId: string, body: UploadCompleteParams, options?: Core.RequestOptions, ): Core.APIPromise<Upload> { return this._client.post(`/uploads/${uploadId}/complete`, { body, ...options, }); }
/** * Completes the * [Upload](https://platform.openai.com/docs/api-reference/uploads/object). * * Within the returned Upload object, there is a nested * [File](https://platform.openai.com/docs/api-reference/files/object) object that * is ready to use in the rest of the platform. * * You can specify the order of the Parts by passing in an ordered list of the Part * IDs. * * The number of bytes uploaded upon completion must match the number of bytes * initially specified when creating the Upload object. No Parts may be added after * an Upload is completed. */
https://github.com/openai/openai-deno-build/blob/28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe/resources/uploads/uploads.ts#L66-L75
28ffdaa2c107c98db5eec50cf284f4fd37ad8fbe
explorer-kit
github_2023
solana-fm
typescript
patchIdlByAppending
const patchIdlByAppending = (patch: Patch, idl?: AnchorIdl | ShankIdl | string) => { if (idl) { if (!checkIdlIsString(idl)) { if (patch.type === PatchType.APPEND) { const idlToReturn = Object.assign({}, idl); const keysToAppend = Object.keys(patch.patch); keysToAppend.forEach((key) => { if (key in idlToReturn) { if (Array.isArray(idlToReturn[key as keyof typeof idlToReturn])) { idlToReturn[key as keyof typeof idlToReturn] = idlToReturn[key as keyof typeof idlToReturn].concat( patch.patch[key as keyof typeof patch.patch] ); } } }); return idlToReturn; } } } return idl; };
// TODO: Remember to check for the slots deployed when patching the IDL.
https://github.com/solana-fm/explorer-kit/blob/0b67fd478284659cdf46d0f9d34715ec45d6dc62/packages/explorerkit-idls/src/idls-patcher/patcher.ts#L40-L63
0b67fd478284659cdf46d0f9d34715ec45d6dc62
explorer-kit
github_2023
solana-fm
typescript
SolanaFMParser.constructor
constructor(idl: IdlItem, programHash: string, accountHash?: string) { this.idlItem = idl; this.programHash = programHash; this.accountHash = accountHash; }
/** * Creates a new instance of the FMClient class. * @param idl The IDL that's to be used for initalizing the client. * @param programHash The program hash. * @param accountHash The account hash. */
https://github.com/solana-fm/explorer-kit/blob/0b67fd478284659cdf46d0f9d34715ec45d6dc62/packages/explorerkit-translator/src/SolanaFMParser.ts#L45-L49
0b67fd478284659cdf46d0f9d34715ec45d6dc62
explorer-kit
github_2023
solana-fm
typescript
SolanaFMParser.createParser
public createParser(parserType: ParserType): Parser { switch (parserType) { case ParserType.ACCOUNT: return createAccountParser(this.idlItem, this.programHash, this.accountHash); case ParserType.INSTRUCTION: return createInstructionParser(this.idlItem, this.programHash); case ParserType.EVENT: return createEventParser(this.idlItem, this.programHash); case ParserType.ERROR: return createErrorParser(this.idlItem); default: return null; } }
/** * Creates a parser based on the specified parser type. * @param parserType The parser type. * @returns A parser instance. */
https://github.com/solana-fm/explorer-kit/blob/0b67fd478284659cdf46d0f9d34715ec45d6dc62/packages/explorerkit-translator/src/SolanaFMParser.ts#L56-L73
0b67fd478284659cdf46d0f9d34715ec45d6dc62
explorer-kit
github_2023
solana-fm
typescript
SolanaFMParser.getProgramHash
public getProgramHash(): string { return this.programHash; }
/** * Gets the program hash. * @returns The program hash. */
https://github.com/solana-fm/explorer-kit/blob/0b67fd478284659cdf46d0f9d34715ec45d6dc62/packages/explorerkit-translator/src/SolanaFMParser.ts#L79-L81
0b67fd478284659cdf46d0f9d34715ec45d6dc62
explorer-kit
github_2023
solana-fm
typescript
KinobiTreeGenerator.constructLayout
public constructLayout( treeGeneratorType: KinobiTreeGeneratorType = KinobiTreeGeneratorType.INSTRUCTIONS, interfaceDiscriminantMode: boolean = false, interfacePrefixString?: string ): Map<number | string, FMShankSerializer> { return this._constructLayout(treeGeneratorType, interfaceDiscriminantMode, interfacePrefixString); }
/** * Constructs a map with the discriminant as the key and the serializer as the value for a single shank-generated IDL * @param treeGeneratorType - The type of tree generator to construct * @param interfaceDiscriminantMode - Uses 8-bytes discriminant for the map key name if true, otherwise uses the discriminant value from the IDL * @param interfacePrefixString - String to prefix the interface name with * @returns A map with the discriminant as the key and the serializer as the value */
https://github.com/solana-fm/explorer-kit/blob/0b67fd478284659cdf46d0f9d34715ec45d6dc62/packages/explorerkit-translator/src/helpers/KinobiTreeGenerator.ts#L153-L159
0b67fd478284659cdf46d0f9d34715ec45d6dc62
explorer-kit
github_2023
solana-fm
typescript
sanitySerializeMetadataAccount
const sanitySerializeMetadataAccount = ( accountDataBuffer: Buffer, accountSerializer: FMShankSerializer, idl: ShankIdl ) => { try { // Tries to Deserialize the account const decodedAccountData = accountSerializer.serializer?.deserialize(accountDataBuffer); if (decodedAccountData && decodedAccountData[0]) { return decodedAccountData[0]; } } catch (error) { if (error) { const typeLayouts = new KinobiTreeGenerator(idl).constructLayout(KinobiTreeGeneratorType.TYPES); // Initialize all the serializer for each specific type const keySerializer = typeLayouts.get("Key")?.serializer; const dataSerializer = typeLayouts.get("Data")?.serializer; const tokenStandardSerializer = typeLayouts.get("TokenStandard")?.serializer; const collectionSerializer = typeLayouts.get("Collection")?.serializer; const usesSerializer = typeLayouts.get("Uses")?.serializer; const collectionDetailsSerializer = typeLayouts.get("CollectionDetails")?.serializer; const programmableConfigSerializer = typeLayouts.get("ProgrammableConfig")?.serializer; if ( keySerializer && dataSerializer && tokenStandardSerializer && collectionSerializer && usesSerializer && collectionDetailsSerializer && programmableConfigSerializer ) { let cursor = 0; let customStruct: { [typeName: string]: any } = {}; // Slowly Deserialize the account till editionNonce // key const [key, _] = keySerializer.deserialize(accountDataBuffer, cursor); cursor += keySerializer.maxSize ?? 1; customStruct[typeLayouts.get("Key")!.instructionName] = key; // updateAuthority const [updateAuthority] = publicKey().deserialize(accountDataBuffer, cursor); cursor += 32; customStruct["updateAuthority"] = updateAuthority; // mint const [mint] = publicKey().deserialize(accountDataBuffer, cursor); cursor += 32; customStruct["mint"] = mint; // data // dataDelta is the final offset of the data so we have to minus to get the proper bytes read from buffer const [data, dataDelta] = dataSerializer.deserialize(accountDataBuffer, cursor); cursor += dataDelta - cursor; customStruct[typeLayouts.get("Data")!.instructionName] = data; // primarySaleHappened const [primarySaleHappened] = bool().deserialize(accountDataBuffer, cursor); cursor += 1; customStruct["primarySaleHappened"] = primarySaleHappened; // isMutable const [isMutable] = bool().deserialize(accountDataBuffer, cursor); cursor += 1; customStruct["isMutable"] = isMutable; // editionNonce const [editionNonce, editionNonceDelta] = option(u8()).deserialize(accountDataBuffer, cursor); cursor += editionNonceDelta - cursor; customStruct["editionNonce"] = editionNonce; // ----------------- // Possibly corrupted section // ----------------- // NOTE: that we avoid trying to deserialize any subsequent fields if a // previous one was found to be corrupted just to save work // tokenStandard const [tokenStandard, tokenDelta, tokenCorrupted] = tryReadType( tokenStandardSerializer, accountDataBuffer, cursor ); cursor += tokenDelta; customStruct[typeLayouts.get("TokenStandard")!.instructionName] = tokenStandard; // collection const [collection, collectionDelta, collectionCorrupted] = tokenCorrupted ? [null, 1, true] : tryReadType(collectionSerializer, accountDataBuffer, cursor); cursor += collectionDelta; customStruct[typeLayouts.get("Collection")!.instructionName] = collection; // uses const [uses, usesDelta, usesCorrupted] = tokenCorrupted || collectionCorrupted ? [null, 1, true] : tryReadType(usesSerializer, accountDataBuffer, cursor); cursor += usesDelta; customStruct[typeLayouts.get("Uses")!.instructionName] = uses; // collection_details const [collectionDetails, collectionDetailsDelta] = tokenCorrupted || collectionCorrupted || usesCorrupted ? [null, 1, true] : tryReadType(collectionDetailsSerializer, accountDataBuffer, cursor); cursor += collectionDetailsDelta; customStruct[typeLayouts.get("CollectionDetails")!.instructionName] = collectionDetails; // programmable_config const [programmableConfig, programmableConfigDelta] = tokenCorrupted || collectionCorrupted || usesCorrupted ? [null, 1, true] : tryReadType(programmableConfigSerializer, accountDataBuffer, cursor); cursor += programmableConfigDelta; customStruct[typeLayouts.get("ProgrammableConfig")!.instructionName] = programmableConfig; return customStruct; } } throw new Error(`Error parsing metadata account data - ${accountDataBuffer.toString("base64")}`, { cause: { decoderError: error, programId: idl.metadata.address ?? "", }, }); } return null; };
// Source: https://github.com/metaplex-foundation/metaplex-program-library/blob/master/token-metadata/js/src/custom/metadata-deserializer.ts
https://github.com/solana-fm/explorer-kit/blob/0b67fd478284659cdf46d0f9d34715ec45d6dc62/packages/explorerkit-translator/src/parsers/v2/account/metadata.ts#L93-L227
0b67fd478284659cdf46d0f9d34715ec45d6dc62
explorer-kit
github_2023
solana-fm
typescript
serializeExtension
const serializeExtension = ( extensionIDL: Idl, dataBuffer: Uint8Array, mapTypes?: boolean, accountKeys?: string[] ): ExtensionTypes | null => { const extensionSerializerLayout = new KinobiTreeGenerator(extensionIDL).constructLayout(); const extensionIxDiscriminant = Buffer.from(dataBuffer).readUint8(1); const extensionSerializer = extensionSerializerLayout.get(extensionIxDiscriminant); // This is to slice the initial discriminant from the parent token instruction data const dataToSerialize = dataBuffer.slice(1); if (extensionSerializer) { const decodedShankData = extensionSerializer.serializer?.deserialize(dataToSerialize); if (decodedShankData && decodedShankData[0]) { const filteredIdlInstruction = extensionIDL.instructions?.filter( (instruction) => instruction.discriminant?.value === extensionIxDiscriminant ); if (mapTypes) { decodedShankData[0] = mapDataTypeToName( decodedShankData[0], filteredIdlInstruction[0]?.args, filteredIdlInstruction[0]?.discriminant ); } if (filteredIdlInstruction.length > 0) { const instructionAccounts = filteredIdlInstruction[0]?.accounts; const mappedAccountKeys = mapMultisigAccountKeysToName(accountKeys, instructionAccounts); return { extensionInstructionName: extensionSerializer.instructionName, extensionInstructionData: { ...convertBNToNumberInObject(decodedShankData[0]), ...mappedAccountKeys }, }; } return { extensionInstructionName: extensionSerializer.instructionName, extensionInstructionData: convertBNToNumberInObject(decodedShankData[0]), }; } } return null; };
/** * Default Token 2022 Extension Serializer * @param {Idl} extensionIDL - The IDL of the transfer fee extension * @param {Uint8Array} dataBuffer - The data buffer containing the extension instruction data. * @param {boolean} [mapTypes=false] - Whether to map the data types to their names. * @param {string[]} [accountKeys=[]] - The account keys associated with the instruction. * @returns {ExtensionTypes | null} The serialized transfer fee extension instruction data. */
https://github.com/solana-fm/explorer-kit/blob/0b67fd478284659cdf46d0f9d34715ec45d6dc62/packages/explorerkit-translator/src/parsers/v2/instruction/token-v2.ts#L369-L416
0b67fd478284659cdf46d0f9d34715ec45d6dc62
ts-chan
github_2023
joeycumines
typescript
CircularBuffer.empty
get empty(): boolean { return this.size === 0; }
// True if the buffer is empty.
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/buffer.ts#L18-L20
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
CircularBuffer.full
get full(): boolean { return this.size === this.capacity; }
// True if the buffer is full.
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/buffer.ts#L23-L25
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
CircularBuffer.length
get length(): number { return this.size; }
// The number of elements in the buffer.
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/buffer.ts#L28-L30
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
CircularBuffer.push
push(item: T): void { if (this.size >= this.capacity) { throw new Error('ts-chan: buffer full'); } this.buffer[this.tail] = item; this.tail = (this.tail + 1) % this.capacity; this.size++; }
// Adds an item to the buffer. Throws an error if full.
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/buffer.ts#L33-L40
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
CircularBuffer.shift
shift(): T | undefined { if (this.empty) { return undefined; } const item = this.buffer[this.head]; this.buffer[this.head] = undefined!; this.head = (this.head + 1) % this.capacity; this.size--; return item; }
// Removes and returns the oldest item from the buffer, or undefined (empty).
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/buffer.ts#L43-L52
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
CircularBuffer.peek
peek(): T | undefined { if (this.empty) { return undefined; } return this.buffer[this.head]; }
// Returns the oldest item without removing it, or undefined if empty.
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/buffer.ts#L55-L60
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
CircularBuffer.reset
reset(): void { this.head = 0; this.tail = 0; this.size = 0; }
// WARNING: Use with caution - prefer the `clear` method if GC is a concern.
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/buffer.ts#L64-L68
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
CircularBuffer.clear
clear() { while (!this.empty) { this.shift(); } this.reset(); }
// Clears the buffer and removes references to all items.
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/buffer.ts#L71-L76
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Chan.capacity
get capacity(): number { if (this.#buffer === undefined) { return 0; } return this.#buffer.capacity; }
/** * Returns the maximum number of items the channel can buffer. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L74-L79
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Chan.length
get length(): number { if (this.#buffer === undefined) { return 0; } return this.#buffer.length; }
/** * Returns the number of items in the channel buffer. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L84-L89
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Chan.concurrency
get concurrency(): number { return this.#sends.length - this.#recvs.length; }
/** * Returns an integer representing the number of blocking operations. * Positive values indicate senders, while negative values indicate * receivers. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L96-L98
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Chan.setUnsafe
setUnsafe(unsafe: boolean): this { this.unsafe = unsafe; return this; }
/** * Sets the {@link .unsafe} property, and returns this. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L103-L106
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Chan.trySend
trySend(value: T): boolean { if (!this.#open) { throw new SendOnClosedChannelError(); } if (this.#recvs.length !== 0) { const recv = this.#recvs.shift()!; recv(value, true); return true; } this.#fillBuffer(); if (this.#buffer !== undefined && !this.#buffer.full) { this.#buffer.push(value); return true; } return false; }
/** * Performs a synchronous send operation on the channel, returning true if * it succeeds, or false if there are no waiting receivers, and the channel * is full. * * Will throw {@link SendOnClosedChannelError} if the channel is closed. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L115-L130
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Chan.send
async send(value: T, abort?: AbortSignal): Promise<void> { abort?.throwIfAborted(); let yieldGeneration: number | undefined; let yieldPromise: Promise<number> | undefined; if (!this.unsafe) { yieldGeneration = getYieldGeneration(); yieldPromise = yieldToMacrotaskQueue(); } try { if (this.trySend(value)) { return; } return await new Promise<void>((resolve, reject) => { let listener: (() => void) | undefined; const callback: SenderCallback<T> = (err: unknown, ok: boolean) => { if (abort !== undefined) { try { abort.removeEventListener('abort', listener!); } catch (e: unknown) { reject(e); throw e; } } if (!ok) { reject(err); throw err; } resolve(); return value; }; if (abort !== undefined) { listener = () => { try { try { abort!.removeEventListener('abort', listener!); } finally { this.removeSender(callback); } reject(abort.reason); } catch (e: unknown) { reject(e); } }; // shouldn't be necessary, unless (for some ungodly reason) some callback within trySend triggered the abort abort.throwIfAborted(); abort.addEventListener('abort', listener); } this.#sends.push(callback); }); } finally { if ( yieldGeneration !== undefined && getYieldGeneration() === yieldGeneration ) { await yieldPromise; } } }
/** * Sends a value to the channel, returning a promise that resolves when it * has been received, and rejects on error, or on abort signal. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L136-L195
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Chan.tryRecv
tryRecv(): IteratorResult<T, T | undefined> | undefined { this.#fillBuffer(); if (this.#buffer !== undefined && !this.#buffer.empty) { const result = this.#buffer.shift()!; this.#fillBuffer(); return {value: result}; } if (this.#sends.length !== 0) { const callback = this.#sends.shift()!; return {value: callback(undefined, true)}; } if (!this.#open) { return { value: this.#newDefaultValue?.(), done: true, }; } return undefined; }
/** * Like {@link trySend}, this performs a synchronous recv operation on the * channel, returning undefined if no value is available, or an iterator * result, which models the received value, and whether the channel is open. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L202-L220
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Chan.recv
async recv(abort?: AbortSignal): Promise<IteratorResult<T, T | undefined>> { abort?.throwIfAborted(); let yieldGeneration: number | undefined; let yieldPromise: Promise<number> | undefined; if (!this.unsafe) { yieldGeneration = getYieldGeneration(); yieldPromise = yieldToMacrotaskQueue(); } try { { const result = this.tryRecv(); if (result !== undefined) { return result; } } return await new Promise((resolve, reject) => { let listener: (() => void) | undefined; const callback: ReceiverCallback<T> = (value, ok) => { try { if (ok) { resolve({value}); } else { try { resolve({ value: this.#newDefaultValue?.(), done: true, }); } catch (e: unknown) { reject(e); throw e; } } } finally { if (abort !== undefined) { abort.removeEventListener('abort', listener!); } } }; if (abort !== undefined) { listener = () => { try { try { abort!.removeEventListener('abort', listener!); } finally { this.removeReceiver(callback); } reject(abort.reason); } catch (e: unknown) { reject(e); } }; // shouldn't be necessary, unless (for some ungodly reason) some callback within tryRecv triggered the abort abort.throwIfAborted(); abort.addEventListener('abort', listener); } this.#recvs.push(callback); }); } finally { if ( yieldGeneration !== undefined && getYieldGeneration() === yieldGeneration ) { await yieldPromise; } } }
/** * Receives a value from the channel, returning a promise that resolves with * an iterator (the value OR indicator that the channel is closed, possibly * with a default value), or rejects on error, or on abort signal. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L227-L294
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Chan.close
close(): void { if (!this.#open) { throw new CloseOfClosedChannelError(); } this.#open = false; let lastError: unknown; if (this.#recvs.length !== 0) { for (let i = 0; i < this.#recvs.length; i++) { const callback = this.#recvs[i]; this.#recvs[i] = undefined!; try { callback(this.#newDefaultValue?.(), false); } catch (e: unknown) { lastError = e ?? lastError ?? new Error('ts-chan: chan: recv: error closing channel'); } } this.#recvs.length = 0; } else { if (this.#buffer !== undefined) { while (!this.#buffer.full && this.#sends.length !== 0) { const callback = this.#sends.shift()!; let value: T; try { value = callback(undefined, true); } catch (e) { lastError = e ?? lastError ?? new Error('ts-chan: chan: send: error closing channel'); continue; } this.#buffer.push(value); } } if (this.#sends.length !== 0) { const err = new SendOnClosedChannelError(); for (let i = 0; i < this.#sends.length; i++) { const callback = this.#sends[i]; this.#sends[i] = undefined!; try { callback(err, false); } catch (e: unknown) { if (e !== err) { lastError = e ?? lastError ?? new Error('ts-chan: chan: send: error closing channel'); } } } this.#sends.length = 0; } } if (lastError !== undefined) { throw lastError; } }
/** * Closes the channel, preventing further sending of values. * * See also {@link Sender} and {@link Sender.close}, which this implements. * * - Once a channel is closed, no more values can be sent to it. * - If the channel is buffered and there are still values in the buffer when * the channel is closed, receivers will continue to receive those values * until the buffer is empty. * - Attempting to send to a closed channel will result in an error and * unblock any senders. * - If the channel is already closed, calling `close` again will throw a * {@link CloseOfClosedChannelError}. * - This method should be used to signal the end of data transmission or * prevent potential deadlocks. * * @throws {CloseOfClosedChannelError} When attempting to close a channel * that is already closed. * @throws {Error} When an error occurs while closing the channel, and no * other specific error is thrown. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L354-L418
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
ChanIterator.next
next(): IteratorResult<T> { switch (this.#outcome) { case undefined: { const result = this.#chan.tryRecv(); if (result !== undefined) { return result; } // note: not necessarily a permanent condition return {done: true, value: undefined}; } case 'Return': return {done: true, value: undefined}; case 'Throw': throw this.#error; } }
/** * Next iteration. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L494-L509
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
ChanIterator.return
return(): IteratorResult<T> { if (this.#outcome === undefined) { this.#outcome = 'Return'; } return {done: true, value: undefined}; }
/** * Ends the iterator, which is an idempotent operation. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L514-L519
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
ChanIterator.throw
throw(e?: unknown): IteratorResult<T> { if (this.#outcome === undefined) { this.#outcome = 'Throw'; this.#error = e; } return {done: true, value: undefined}; }
/** * Ends the iterator with an error, which is an idempotent operation. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L524-L530
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
ChanAsyncIterator.next
async next(): Promise<IteratorResult<T>> { try { return await this.#chan.recv(this.#abort.signal); } catch (e) { if (e === chanAsyncIteratorReturned) { return {done: true, value: undefined}; } throw e; } }
/** * Next iteration. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L563-L572
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
ChanAsyncIterator.return
async return(): Promise<IteratorResult<T>> { this.#abort.abort(chanAsyncIteratorReturned); return {done: true, value: undefined}; }
/** * Ends the iterator, which is an idempotent operation. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L577-L580
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
ChanAsyncIterator.throw
async throw(e?: unknown): Promise<IteratorResult<T>> { this.#abort.abort(e); return {done: true, value: undefined}; }
/** * Ends the iterator with an error, which is an idempotent operation. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/chan.ts#L585-L588
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
SelectFactory.clear
clear() { this.#values.clear(); if (this.#select !== undefined) { for (const c of this.#select.cases) { const s = c[selectState]; if (s.send instanceof SenderProxy) { s.send[proxyKey] = undefined; } else if (s.recv instanceof ReceiverProxy) { s.recv[proxyKey] = undefined; } else { throw new Error( 'ts-chan: select-factory: unexpected error that should never happen: invalid case' ); } } } }
/** * Clears references to values to send, receives and senders, but not the * select cases themselves. Use cases include avoiding retaining references * between iterations of a loop, if such references are not needed, or may * be problematic. * * WARNING: Must not be called concurrently with {@link Select.wait} (on the * underlying instance for this factory). Calling this method then calling * either {@link Select.wait} or {@link Select.poll} (prior to another * {@link with}) may result in an error. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select-factory.ts#L54-L70
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
SelectFactory.with
with<T extends readonly SelectFactoryCase<any>[] | []>( cases: T ): Select< { readonly [K in keyof T]: T[K] extends SelectFactoryCaseSender<infer U> ? SelectCaseSender<U> : T[K] extends SelectFactoryCaseReceiver<infer U> ? SelectCaseReceiver<U> : T[K] extends SelectFactoryCase<infer U> ? SelectCase<U> : never; } & { readonly length: T['length']; } > { // TODO: consider hooks to automatically clear values on selection, to avoid retaining references any longer than necessary if (this.#select === undefined) { // initial select - init this.#select = new Select(mapInitialCases(cases, this.#values)); return this.#select; } // subsequent select - set values + validate in a single iteration // (returns the existing select, which will pick up the values) if (this.#select.cases.length !== cases.length) { throw new Error( `ts-chan: select-factory: invalid number of cases: expected ${this.#select.cases.length} got ${cases.length}` ); } for (let i = 0; i < this.#select.cases.length; i++) { const v = cases[i]; const c = this.#select.cases[i][selectState]; if ('send' in v && v.send !== undefined) { if (!(c.send instanceof SenderProxy)) { throw new Error( `ts-chan: select-factory: invalid case at ${i}: unexpected sender: ${v.send}` ); } c.send[proxyKey] = v.send; this.#values.set(i, v.value); } else if ('recv' in v && v.recv !== undefined) { if (!(c.recv instanceof ReceiverProxy)) { throw new Error( `ts-chan: select-factory: invalid case at ${i}: unexpected receiver: ${v.recv}` ); } c.recv[proxyKey] = v.recv; } else { let d: unknown; try { d = JSON.stringify(v); } catch { d = v; } throw new Error(`ts-chan: select-factory: invalid case at ${i}: ${d}`); } } return this.#select; }
/** * With should be to configure and retrieve (or initialize) the underlying * {@link Select} instance. * * Must be called with the same number of cases each time, with each case * having the same direction. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select-factory.ts#L79-L141
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Select.promises
static promises< T extends readonly (SelectCase<any> | PromiseLike<any>)[] | [], >( cases: T ): Select< { readonly [K in keyof T]: T[K] extends SelectCaseSender<infer U> ? SelectCaseSender<U> : T[K] extends SelectCaseReceiver<infer U> ? SelectCaseReceiver<U> : T[K] extends SelectCasePromise<infer U> ? SelectCasePromise<U> : T[K] extends SelectCase<infer U> ? SelectCase<U> : T[K] extends PromiseLike<infer U> ? SelectCasePromise<Awaited<U>> : never; } & { readonly length: T['length']; } > { return new Select(cases as any); }
/** * Promises is a convenience method for creating a select instance with * promise cases, or a mix of both promises and other cases. * * Note that the behavior is identical to passing the same array to the * constructor. The constructor's typing is more strict, to simplify * implementations which encapsulate or construct select instances. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L101-L123
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Select.cases
get cases(): { readonly [K in keyof T]: T[K]; } { return this.#cases; }
/** * Retrieves the cases associated with this select instance. * * Each case corresponds to an input case (including order). * After selecting a case, via {@link Select.poll} or {@link Select.wait}, * received values may be retrieved by calling {@link Select.recv} with the * corresponding case. * * @returns T * * @example * Accessing a (typed) received value: * ```ts * import {recv, Chan, Select} from 'ts-chan'; * * const ch1 = new Chan<number>(); * const ch2 = new Chan<string>(); * * void sendsToCh1ThenEventuallyClosesIt(); * void sendsToCh2(); * * const select = new Select([recv(ch1), recv(ch2)]); * for (let running = true; running;) { * const i = await select.wait(); * switch (i) { * case 0: { * const v = select.recv(select.cases[i]); * if (v.done) { * running = false; * break; * } * console.log(`rounded value: ${Math.round(v.value)}`); * break; * } * case 1: { * const v = select.recv(select.cases[i]); * if (v.done) { * throw new Error('ch2 unexpectedly closed'); * } * console.log(`uppercase string value: ${v.value.toUpperCase()}`); * break; * } * default: * throw new Error('unreachable'); * } * } * ``` */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L173-L177
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Select.length
get length(): number { return this.#pending.length; }
/** * Retrieves the number of the cases that are currently pending. * * Will return the length of {@link cases}, less the number of _promise_ * cases that have been resolved and received (or ignored). */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L185-L187
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Select.pending
get pending(): Array< T[number] extends SelectCasePromise<infer U> ? Exclude<SelectCasePromise<U>[typeof selectState]['pval'], undefined> : never > { const pending: any[] = []; // so they're in case order for (const c of this.#cases) { // note: cleared on consume if (c[selectState].wait !== undefined) { pending.push(c[selectState].pval); } } return pending; }
/** * Returns all the original values of all pending promise cases (cases that * haven't been consumed or ignored), in case order. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L193-L207
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Select.setUnsafe
setUnsafe(unsafe: boolean): this { this.unsafe = unsafe; return this; }
/** * Sets the {@link .unsafe} property, and returns this. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L212-L215
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Select.poll
poll(): number | undefined { this.#throwIfInUse(); // sanity check - stop should always have been called if (this.#semaphore.token !== undefined) { throw new Error( 'ts-chan: select: poll: unexpected error that should never happen: stop token not cleared' ); } // consume the last poll/wait, if it hasn't been consumed already this.#clearNext(); // note: set to false at the end if no case is ready, or if the case was a promise if (this.#reshuffle) { this.#pending = fisherYatesShuffle(this.#pending); } else { this.#reshuffle = true; } for (const pending of this.#pending) { // in all cases, a non-undefined ok means this case is up next if (pending.ok !== undefined) { if (pending.wait !== undefined) { // promise cases will be removed on recv, meaning we don't need to re-shuffle this.#reshuffle = false; } this.#next = pending.cidx; return this.#next; } if (pending.send !== undefined) { if (pending.cscb !== undefined) { throw new Error( 'ts-chan: select: poll: unexpected error that should never happen: cscb set' ); } this.#semaphore.token = {}; try { const scb = newLockedSenderCallback( pending.lscb, this.#semaphore.token ); if (!pending.send.addSender(scb)) { this.#next = pending.cidx; return this.#next; } pending.send.removeSender(scb); } finally { this.#semaphore.token = undefined; } } else if (pending.recv !== undefined) { if (pending.crcb !== undefined) { throw new Error( 'ts-chan: select: poll: unexpected error that should never happen: crcb set' ); } this.#semaphore.token = {}; try { const rcb = newLockedReceiverCallback( pending.lrcb, this.#semaphore.token ); if (!pending.recv.addReceiver(rcb)) { this.#next = pending.cidx; return this.#next; } pending.recv.removeReceiver(rcb); } finally { this.#semaphore.token = undefined; } } } this.#reshuffle = false; return undefined; }
/** * Poll returns the next case that is ready, or undefined if none are * ready. It must not be called concurrently with {@link Select.wait} or * {@link Select.recv}. * * This is effectively a non-blocking version of {@link Select.wait}, and * fills the same role as the `default` select case, in Go's select * statement. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L226-L303
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Select.wait
async wait(abort?: AbortSignal): Promise<number> { abort?.throwIfAborted(); let yieldGeneration: number | undefined; let yieldPromise: Promise<number> | undefined; if (!this.unsafe) { yieldGeneration = getYieldGeneration(); yieldPromise = yieldToMacrotaskQueue(); } try { // need to call poll first - avoid accidentally buffering receives // (also consumes any this.#next value) { const i = this.poll(); if (i !== undefined) { return i; } } this.#waiting = true; try { // identifies misuse of callbacks + indicates if stop is allowed // stop will consume this token, ensuring it's only performed once // (the "select next communication" behavior doesn't apply to promises) const token: SelectSemaphoreToken = {stop: true}; let i: number | undefined; let err: unknown; let rejectOnAbort: Promise<void> | undefined; let abortListener: (() => void) | undefined; if (abort !== undefined) { rejectOnAbort = new Promise((resolve, reject) => { abortListener = () => { try { err ??= this.#stop(token); abort!.removeEventListener('abort', abortListener!); reject(abort.reason); } catch (e: unknown) { err ??= e; reject(e); } }; }); if (abortListener === undefined) { throw new Error( 'ts-chan: select: next: promise executor not called synchronously' ); } // shouldn't be necessary unless (for some ungodly reason) some callback within poll triggered the abort abort.throwIfAborted(); abort.addEventListener('abort', abortListener); } this.#semaphore.token = token; try { // WARNING: This implementation relies on all then functions being // called prior to allowing further calls to any of the methods. // (Due to the mechanism used to pass down the semaphore token.) let promise = Promise.race(this.#pending); if (rejectOnAbort !== undefined) { this.#buf2elem ??= [undefined, undefined]; this.#buf2elem[0] = promise; this.#buf2elem[1] = rejectOnAbort; promise = Promise.race(this.#buf2elem); } i = await promise; } finally { if (this.#buf2elem !== undefined) { this.#buf2elem[0] = undefined; this.#buf2elem[1] = undefined; } err ??= this.#stop(token); abort?.removeEventListener('abort', abortListener!); } if (err !== undefined) { throw err; } if ( !Number.isSafeInteger(i) || i < 0 || i >= this.#cases.length || this.#cases[i][selectState].pidx === undefined || this.#pending[this.#cases[i][selectState].pidx!] !== this.#cases[i][selectState] ) { throw new Error( `ts-chan: select: unexpected error that should never happen: invalid index: ${i}` ); } this.#next = i; return i; } finally { this.#waiting = false; } } finally { if ( yieldGeneration !== undefined && getYieldGeneration() === yieldGeneration ) { await yieldPromise; } } }
/** * Wait returns a promise that will resolve with the index of the next case * that is ready, or reject with the first error. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L309-L417
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
Select.recv
recv<T>(v: SelectCase<T>): IteratorResult<T, T | undefined> { this.#throwIfInUse(); if ( v?.[selectState]?.cidx === undefined || this.#cases[v[selectState].cidx] !== v ) { throw new Error('ts-chan: select: case not found'); } let result: | (IteratorResult<T, T | undefined> & { err?: undefined; }) | { value: unknown; err: true; } | undefined; if ( v[selectState].cidx === this.#next && v[selectState].pidx !== undefined && this.#pending[v[selectState].pidx] === v[selectState] ) { if (v[selectState].recv !== undefined) { switch (v[selectState].ok) { case true: result = { value: v[selectState].next, }; break; case false: result = { value: v[selectState].next, done: true, }; break; } } else if (v[selectState].wait !== undefined) { switch (v[selectState].ok) { case true: // resolved result = { value: v[selectState].next, }; break; case false: // rejected result = { value: v[selectState].next, err: true, }; break; } } else { throw new Error('ts-chan: select: case not receivable'); } } if (result === undefined) { throw new Error('ts-chan: select: case not ready'); } this.#clearNext(); if (result.err) { throw result.value; } return result; }
/** * Consume the result of a ready case. */
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L422-L493
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
mapPendingValues
const mapPendingValues = <T extends readonly SelectCase<any>[] | []>( cases: T, stop: Exclude<typeof stopForMapPendingValue, undefined>, selectSemaphore: Exclude<typeof selectSemaphoreForMapPendingValue, undefined> ): T => { if ( stopForMapPendingValue !== undefined || selectSemaphoreForMapPendingValue !== undefined ) { throw new Error( 'ts-chan: select: unexpected error that should never happen: stop vars set' ); } stopForMapPendingValue = stop; selectSemaphoreForMapPendingValue = selectSemaphore; try { return cases.map(mapPendingValue) as T; } finally { stopForMapPendingValue = undefined; selectSemaphoreForMapPendingValue = undefined; } };
// Converts any non-cases to the promise variant, returns a new array.
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L566-L587
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
mapPendingValue
const mapPendingValue = <T extends SelectCase<any>>(v: T, i: number): T => { if ( stopForMapPendingValue === undefined || selectSemaphoreForMapPendingValue === undefined ) { throw new Error( 'ts-chan: select: unexpected error that should never happen: stop vars not set' ); } const stop = stopForMapPendingValue; const selectSemaphore = selectSemaphoreForMapPendingValue; if (!isSelectCase(v)) { v = wait(v as any) as T; } if (v[selectState].cidx !== undefined) { throw new Error('ts-chan: select: case reused'); } v[selectState].cidx = i; // note: pidx set on shuffle let pendingResolve: ((value: number) => void) | undefined; let pendingReject: ((reason: unknown) => void) | undefined; if (v[selectState].send !== undefined) { const s = v[selectState]; s.lscb = (token, err, ok) => { if (token !== selectSemaphore.token) { throw new Error( 'ts-chan: select: send: channel protocol misuse: callback called after remove' ); } // always this callback (instance - bound token) or already undefined s.cscb = undefined; if (!ok) { // failed to send - reject with error pendingReject?.(err); pendingReject = undefined; pendingResolve = undefined; // throw err, as dictated by the protocol throw err; } try { stop(token); const result = s.expr(); s.ok = true; pendingResolve?.(i); return result; } catch (e) { pendingReject?.(e); throw e; } finally { pendingResolve = undefined; pendingReject = undefined; } }; // kicks off send s.then = (onfulfilled, onrejected) => { return new Promise<number>((resolve, reject) => { if (selectSemaphore.token === undefined) { throw errThenCalledAfterStop; } if (s.cscb !== undefined) { throw new Error( 'ts-chan: select: send: unexpected error that should never happen: already added sender' ); } const scb = newLockedSenderCallback(s.lscb, selectSemaphore.token); pendingResolve = resolve; pendingReject = reject; try { if (s.send.addSender(scb)) { // added, all we can do is wait for the callback s.cscb = scb; return; } } catch (e) { pendingResolve = undefined; pendingReject = undefined; throw e; } // sanity check - scb should have been called synchronously if (pendingResolve !== undefined || pendingReject !== undefined) { pendingResolve = undefined; pendingReject = undefined; throw new Error( 'ts-chan: select: send: channel protocol misuse: addSender returned false but did not call the callback synchronously' ); } }).then(onfulfilled, onrejected); }; } else if (v[selectState].recv !== undefined) { const s = v[selectState]; s.lrcb = (token: SelectSemaphoreToken, val, ok) => { if (token !== selectSemaphore.token) { throw new Error( 'ts-chan: select: recv: channel protocol misuse: callback called after remove' ); } // always this callback (instance - bound token) or already undefined s.crcb = undefined; try { s.next = val; s.ok = ok; // after handling the data but before resolve - in case it throws (it calls external code) stop(token); pendingResolve?.(i); } catch (e) { pendingReject?.(e); throw e; } finally { pendingResolve = undefined; pendingReject = undefined; } }; // kicks off recv s.then = (onfulfilled, onrejected) => { return new Promise<number>((resolve, reject) => { if (selectSemaphore.token === undefined) { throw errThenCalledAfterStop; } if (s.crcb !== undefined) { throw new Error( 'ts-chan: select: recv: unexpected error that should never happen: already added receiver' ); } const rcb = newLockedReceiverCallback(s.lrcb, selectSemaphore.token); pendingResolve = resolve; pendingReject = reject; try { if (s.recv.addReceiver(rcb)) { // added, all we can do is wait for the callback s.crcb = rcb; return; } } catch (e) { pendingResolve = undefined; pendingReject = undefined; throw e; } // sanity check - rcb should have been called synchronously if (pendingResolve !== undefined || pendingReject !== undefined) { pendingResolve = undefined; pendingReject = undefined; throw new Error( 'ts-chan: select: recv: channel protocol misuse: addReceiver returned false but did not call the callback synchronously' ); } }).then(onfulfilled, onrejected); }; } else if ('pval' in v[selectState]) { const s = v[selectState]; s.wait = Promise.resolve(s.pval) .then(v => { s.ok = true; s.next = v; }) .catch(e => { s.ok = false; s.next = e; }); s.then = (onfulfilled, onrejected) => { if (selectSemaphore.token === undefined) { return Promise.reject(errThenCalledAfterStop); } const token = selectSemaphore.token; return s.wait .then(() => { stop(token); return i; }) .then(onfulfilled, onrejected); }; } else { let d: unknown; try { d = JSON.stringify(v); } catch { d = v; } throw new Error(`ts-chan: select: invalid case at ${i}: ${d}`); } return v; };
// Part of the implementation of {@link mapPendingValues}, should never be
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/src/select.ts#L591-L794
ab43685af09431c1ca391a11c27e44641f3f139e
ts-chan
github_2023
joeycumines
typescript
shuffleArray
function shuffleArray<T>(array: T[]): T[] { for (let i = array.length - 1; i > 0; i--) { const j = Math.floor(Math.random() * (i + 1)); [array[i], array[j]] = [array[j], array[i]]; } return array; }
// Helper function: Fisher-Yates Shuffle algorithm
https://github.com/joeycumines/ts-chan/blob/ab43685af09431c1ca391a11c27e44641f3f139e/test/select.test.ts#L467-L473
ab43685af09431c1ca391a11c27e44641f3f139e
nuxt-auth-utils
github_2023
atinux
typescript
createUserValidationError
function createUserValidationError(validateError?: any) { throw createError({ status: validateError?.statusCode || 400, message: validateError?.message || 'User Validation Error', data: validateError, }) }
// eslint-disable-next-line @typescript-eslint/no-explicit-any
https://github.com/atinux/nuxt-auth-utils/blob/05db8f05352c544c2b00514e6b83239f8d9dde98/src/runtime/server/lib/webauthn/register.ts#L140-L146
05db8f05352c544c2b00514e6b83239f8d9dde98
serverless-registry
github_2023
cloudflare
typescript
pushLayer
async function pushLayer(layerDigest: string, readableStream: ReadableStream, totalLayerSize: number) { const headers = new Headers({ authorization: cred, }); const layerExistsURL = `${proto}://${imageHost}/v2${imageRepositoryPath}/blobs/${layerDigest}`; const layerExistsResponse = await fetch(layerExistsURL, { headers, method: "HEAD", }); if (!layerExistsResponse.ok && layerExistsResponse.status !== 404) { throw new Error(`${layerExistsURL} responded ${layerExistsResponse.status}: ${await layerExistsResponse.text()}`); } if (layerExistsResponse.ok) { console.log(`${layerDigest} already exists...`); return; } const createUploadURL = `${proto}://${imageHost}/v2${imageRepositoryPath}/blobs/uploads/`; const createUploadResponse = await fetch(createUploadURL, { headers, method: "POST", }); if (!createUploadResponse.ok) { throw new Error( `${createUploadURL} responded ${createUploadResponse.status}: ${await createUploadResponse.text()}`, ); } const maxChunkLength = +(createUploadResponse.headers.get("oci-chunk-max-length") ?? 100 * 1024 * 1024); if (isNaN(maxChunkLength)) { throw new Error(`oci-chunk-max-length header is malformed (not a number)`); } const reader = readableStream.getReader(); const uploadId = createUploadResponse.headers.get("docker-upload-uuid"); if (uploadId === null) { throw new Error("Docker-Upload-UUID not defined in headers"); } function parseLocation(location: string) { if (location.startsWith("/")) { return `${proto}://${imageHost}${location}`; } return location; } let location = createUploadResponse.headers.get("location") ?? `/v2${imageRepositoryPath}/blobs/uploads/${uploadId}`; const maxToWrite = Math.min(maxChunkLength, totalLayerSize); let end = Math.min(maxChunkLength, totalLayerSize); let written = 0; let previousReadable: ReadableLimiter | undefined; let totalLayerSizeLeft = totalLayerSize; while (totalLayerSizeLeft > 0) { const range = `0-${Math.min(end, totalLayerSize) - 1}`; const current = new ReadableLimiter(reader as ReadableStreamDefaultReader, maxToWrite, previousReadable); const patchChunkUploadURL = parseLocation(location); // we have to do fetchNode because Bun doesn't allow setting custom Content-Length. // https://github.com/oven-sh/bun/issues/10507 const patchChunkResult = await fetchNode(patchChunkUploadURL, { method: "PATCH", body: current, headers: new Headers({ "range": range, "authorization": cred, "content-length": `${Math.min(totalLayerSizeLeft, maxToWrite)}`, }), }); if (!patchChunkResult.ok) { throw new Error( `uploading chunk ${patchChunkUploadURL} returned ${patchChunkResult.status}: ${await patchChunkResult.text()}`, ); } const rangeResponse = patchChunkResult.headers.get("range"); if (rangeResponse !== range) { throw new Error(`unexpected Range header ${rangeResponse}, expected ${range}`); } previousReadable = current; totalLayerSizeLeft -= previousReadable.written; written += previousReadable.written; end += previousReadable.written; location = patchChunkResult.headers.get("location") ?? location; if (totalLayerSizeLeft != 0) console.log(layerDigest + ":", totalLayerSizeLeft, "upload bytes left."); } const range = `0-${written - 1}`; const uploadURL = new URL(parseLocation(location)); uploadURL.searchParams.append("digest", layerDigest); const response = await fetch(uploadURL.toString(), { method: "PUT", headers: new Headers({ Range: range, Authorization: cred, }), }); if (!response.ok) { throw new Error(`${uploadURL.toString()} failed with ${response.status}: ${await response.text()}`); } console.log("Pushed", layerDigest); }
// pushLayer accepts the target digest, the stream to read from, and the total layer size.
https://github.com/cloudflare/serverless-registry/blob/e92b4c9d072c7170550db804941cb28294af010d/push/index.ts#L219-L323
e92b4c9d072c7170550db804941cb28294af010d
serverless-registry
github_2023
cloudflare
typescript
RegistryTokens.createPrivateAndPublicKey
static async createPrivateAndPublicKey(): Promise<[string, string]> { const key = (await crypto.subtle.generateKey({ name: "ECDSA", namedCurve: "P-256" }, true, [ "sign", "verify", ])) as CryptoKeyPair; const exportedPrivateKey = btoa(JSON.stringify(await crypto.subtle.exportKey("jwk", key.privateKey))); const exportedPublicKey = btoa(JSON.stringify(await crypto.subtle.exportKey("jwk", key.publicKey))); return [exportedPrivateKey, exportedPublicKey]; }
/** * Very util function that showcases how do we generate private and public keys * * @example * // Sample usage: * try { * const [privateKey, publicKey] = await RegistryTokens.createPrivateAndPublicKey(); * const registryTokens = await newRegistryTokens(publicKey); * const token = await registryTokens.createToken("some-account-id", ["pull", "push"], 30, privateKey, "https://hello.com"); * const result = await registryTokens.verifyToken(request, token); * console.log(JSON.stringify(result)); * } catch (err) { * console.log("Error generating keys:", err.message); * } */
https://github.com/cloudflare/serverless-registry/blob/e92b4c9d072c7170550db804941cb28294af010d/src/token.ts#L43-L51
e92b4c9d072c7170550db804941cb28294af010d
serverless-registry
github_2023
cloudflare
typescript
GarbageCollector.markForInsertion
async markForInsertion(namespace: string): Promise<string> { const uid = crypto.randomUUID(); // mark that there is an on-going insertion const deletion = await this.registry.put(`${namespace}/insertion/${uid}`, uid); if (deletion === null) throw new Error("unreachable"); // set last_update so GC is able to invalidate await this.registry.put(`${namespace}/insertion/last_update`, null, { customMetadata: { timestamp: `${Date.now()}-${crypto.randomUUID()}` }, }); return uid; }
// to start inserting data that might conflight with GC.
https://github.com/cloudflare/serverless-registry/blob/e92b4c9d072c7170550db804941cb28294af010d/src/registry/garbage-collector.ts#L92-L103
e92b4c9d072c7170550db804941cb28294af010d
serverless-registry
github_2023
cloudflare
typescript
appendStreamKnownLength
const appendStreamKnownLength = async (stream: ReadableStream, size: number) => { // This is the normal code-path, hopefully by hinting with headers on the POST call all clients respect this if ( (state.chunks.length === 0 || (state.chunks[state.chunks.length - 1].size === size && state.chunks[state.chunks.length - 1].type === "multi-part-chunk")) && size <= MAXIMUM_CHUNK && size >= MINIMUM_CHUNK ) { state.chunks.push({ type: "multi-part-chunk", size, uploadId: uuid, }); const part = await upload.uploadPart(state.parts.length + 1, stream); state.parts.push(part); return; } // This happens when maximum chunk's is surpassed, so we basically have to split this stream. // You can test very easy this branch of code by putting MAXIMUM_CHUNK == MINIMUM_CHUNK and docker pushing against the server. if (size > MAXIMUM_CHUNK) { for await (const [reader, chunkSize] of split(stream, size, MAXIMUM_CHUNK)) { await appendStreamKnownLength(reader, chunkSize); } return undefined; } const lastChunk = state.chunks.length ? state.chunks[state.chunks.length - 1] : undefined; // This is a bad scenario, we uploaded a chunk and we have to copy. if ( env.PUSH_COMPATIBILITY_MODE === "full" && lastChunk && (lastChunk.type === "small-chunk" || lastChunk.type === "multi-part-chunk-no-same-size") ) { // nullability: getChunkStream for small-chunk always returns stream const chunkStream = (await getChunkBlob(env, lastChunk))!; // pop as we're going to override last part state.chunks.pop(); state.parts.pop(); const blob = await readerToBlob(stream); const streamCombined = new Blob([chunkStream, blob]); await appendStreamKnownLength(limit(streamCombined.stream(), size + lastChunk.size), size + lastChunk.size); return; } // Only allow this branch when the last pushed chunk is multi-part-chunk // as we will upload the part directly. This is a normal workflow if the client is a good citizen if ( (lastChunk && lastChunk.size > size) || (size < MINIMUM_CHUNK && (!lastChunk || lastChunk.type === "multi-part-chunk")) ) { const path = getHelperR2Path(uuid); state.chunks.push({ type: size < MINIMUM_CHUNK ? "small-chunk" : "multi-part-chunk-no-same-size", size, uploadId: uuid, r2Path: path, }); if (env.PUSH_COMPATIBILITY_MODE === "full") { const [stream1, stream2] = limit(stream, size).tee(); const partTask = upload.uploadPart(state.parts.length + 1, stream1); // We can totally disable this, however we are risking that the client sends another small chunk. // Maybe instead we can throw range error const dateInOneHour = new Date(); dateInOneHour.setTime(dateInOneHour.getTime() + 60 * 60 * 1000); const headers = { // https://www.rfc-editor.org/rfc/rfc1123 date format // Objects will typically be removed from a bucket within 24 hours of the x-amz-expiration value. "x-amz-expiration": dateInOneHour.toUTCString(), } as const; const r2RegistryObjectTask = env.REGISTRY.put(path, stream2, { httpMetadata: new Headers(headers), customMetadata: headers, }); state.parts.push(await partTask); await r2RegistryObjectTask; return; } state.parts.push(await upload.uploadPart(state.parts.length + 1, stream)); return; } // we know here that size >= MINIMUM_CHUNK and size >= lastChunk.size, this is just super inefficient, maybe in the future just throw RangeError here... if (env.PUSH_COMPATIBILITY_MODE === "full" && lastChunk && size >= lastChunk.size) { console.warn( "The client is being a bad citizen by trying to send a new chunk bigger than the chunk it sent. If this is giving problems disable this codepath altogether", ); for await (const [chunk, chunkSize] of split(stream, size, lastChunk.size)) { await appendStreamKnownLength(chunk, chunkSize); } return undefined; } if (env.PUSH_COMPATIBILITY_MODE === "full") { throw new ServerError("unreachable", 500); } return new RangeError(stateHash, state); };
// This function tries to handle the following cases:
https://github.com/cloudflare/serverless-registry/blob/e92b4c9d072c7170550db804941cb28294af010d/src/registry/r2.ts#L517-L620
e92b4c9d072c7170550db804941cb28294af010d
amica
github_2023
semperai
typescript
AmicaLife.loadIdleTextPrompt
public async loadIdleTextPrompt(prompts: string[] | null) { if (prompts === null) { idleEvents.forEach((prompt) => this.mainEvents.enqueue({ events: prompt }), ); } else { if (prompts.length > 0) { this.mainEvents.clear(); prompts.forEach((prompt: string) => basedPrompt.idleTextPrompt.push(prompt), ); } } }
// Function for loaded idle text prompt
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L69-L82
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.insertFront
public insertFront(event: AmicaLifeEvents) { const newQueue = new Queue<AmicaLifeEvents>(); newQueue.enqueue(event); while (!this.mainEvents.isEmpty()) { newQueue.enqueue(this.mainEvents.dequeue()!); } this.mainEvents = newQueue; }
// Function to insert event to the front of the mainEvents Queue
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L85-L94
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.removeEvent
public removeEvent(eventName: string) { const newQueue = new Queue<AmicaLifeEvents>(); let found = false; while (!this.mainEvents.isEmpty()) { const event = this.mainEvents.dequeue(); if (event && event.events !== eventName) { newQueue.enqueue(event); } else { found = true; } } this.mainEvents = newQueue; }
// Function to remove a specific event from the mainEvents queue
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L97-L111
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.containsEvent
public containsEvent(eventName: string): boolean { let contains = false; this.mainEvents.forEach((event) => { if (event.events === eventName) { contains = true; } }); return contains; }
// Function to check if a specific event exists in the mainEvents queue
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L114-L124
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.receiveMessageFromUser
public receiveMessageFromUser(message: string) { if (message.toLowerCase().includes('news')) { console.log("Triggering news function call."); this.insertFront({events: "News"}); } // Re-enqueue subconcious event after get the user input (1 Subconcious events per idle cycle) (!this.containsEvent("Subconcious")) ? this.mainEvents.enqueue({ events: "Subconcious" }) : null; this.pause(); this.wakeFromSleep(); this.triggerMessage = true; }
// Function to check message from user
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L129-L141
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.processingIdle
public async processingIdle() { // Preventing duplicate processingIdle loop if (this.isProcessingIdleRunning) { return; } this.isProcessingIdleRunning = true; console.log("Starting Amica Life"); while (config("amica_life_enabled") === "true") { // Check if amica is in idle state trigger processingEvent loop if (!this.chat?.isAwake()) { this.processingEvent(); } await wait(50); } this.isProcessingIdleRunning = false; this.isProcessingEventRunning = false; this.triggerMessage = false; console.log("Stopping idle loop"); }
// }
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L152-L171
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.checkSleep
private async checkSleep() { if (!this.isSleep) { const chat = this.chat; if (!chat) { console.error("Chat instance is not available"); return; } const idleTime = chat.idleTime(); // If character being idle morethan 120 sec or 2 min, play handle sleep event if (!this.containsEvent("Sleep")) { if (idleTime > parseInt(config("time_to_sleep_sec"))) { this.insertFront({ events: "Sleep" }); } } } }
// Function to check for sleep event if idleTime > time_to_sleep add Sleep event to the front of amica queue
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L243-L258
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.checkPause
private async checkPause() { if (this.isPause) { console.log("Amica Life Paused"); await new Promise<void>((resolve) => { const checkPause = setInterval(() => { if (!this.isPause) { clearInterval(checkPause); resolve(console.log("Amica Life Initiated")); } }, 50); }); } }
// Function to pause the processingEvent loop is pauseFlag is true/false
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L261-L273
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.checkResume
private async checkResume(): Promise<boolean> { if (this.isPause === true && !this.isSleep && this.isSettingOff) { this.resume(); return true; } return false; }
// Function to resume the processingEvent loop from pause
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L276-L282
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.checkSettingOff
public checkSettingOff(off: boolean) { if (off) { this.isSettingOff = true; this.wakeFromSleep(); this.chat?.updateAwake(); // Update awake when user exit the setting page this.resume(); } else { this.isSettingOff = false; this.pause(); } }
// Function to pause/resume the loop when setting page is open/close
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L285-L295
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AmicaLife.updatedIdleTime
public updatedIdleTime() { const idleTimeSec = Math.min( parseInt(config("time_before_idle_sec")) * 1.25, 240, ); // updateConfig("time_before_idle_sec", idleTimeSec.toString()); // removed for staging //console.log(`Updated time before idle to ${idleTimeSec} seconds`); }
// Update time before idle increase by 1.25 times
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/amicaLife.ts#L300-L308
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
handleVRMAnimationEvent
async function handleVRMAnimationEvent(viewer: Viewer, amicaLife: AmicaLife) { let randomAnimation; do { randomAnimation = animationList[Math.floor(Math.random() * animationList.length)]; } while (basename(randomAnimation) === previousAnimation); // Store the current animation as the previous one for the next call previousAnimation = basename(randomAnimation); // removed for staging logs. //console.log("Handling idle event (animation):", previousAnimation); try { if (viewer) { const animation = await loadVRMAnimation(randomAnimation); if (!animation) { throw new Error("Loading animation failed"); } // @ts-ignore const duration = await viewer.model!.playAnimation(animation, previousAnimation); requestAnimationFrame(() => { viewer.resetCameraLerp(); }); // Set timeout for the duration of the animation setTimeout(() => { amicaLife.eventProcessing = false; console.timeEnd("processing_event VRMA"); }, duration * 1000); } } catch (error) { console.error("Error loading animation:", error); } }
// Handles the VRM animation event.
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/eventHandler.ts#L55-L85
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
handleTextEvent
async function handleTextEvent(chat: Chat, amicaLife: AmicaLife) { // Randomly select the idle text prompts const randomIndex = Math.floor( Math.random() * basedPrompt.idleTextPrompt.length, ); const randomTextPrompt = basedPrompt.idleTextPrompt[randomIndex]; // removed for staging logs. //console.log("Handling idle event (text):", randomTextPrompt); try { await chat.receiveMessageFromUser?.(randomTextPrompt, true); amicaLife.eventProcessing = false; console.timeEnd(`processing_event IdleTextPrompts`); } catch (error) { console.error( "Error occurred while sending a message through chat instance:", error, ); } }
// Handles text-based idle events.
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/amicaLife/eventHandler.ts#L89-L107
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
Chat.receiveMessageFromUser
public async receiveMessageFromUser(message: string, amicaLife: boolean) { if (message === null || message === "") { return; } console.time("performance_interrupting"); console.debug("interrupting..."); await this.interrupt(); console.timeEnd("performance_interrupting"); await wait(0); console.debug("wait complete"); if (!amicaLife) { console.log("receiveMessageFromUser", message); this.amicaLife?.receiveMessageFromUser(message); if (!/\[.*?\]/.test(message)) { message = `[neutral] ${message}`; } this.updateAwake(); this.bubbleMessage("user", message); } // make new stream const messages: Message[] = [ { role: "system", content: config("system_prompt") }, ...this.messageList!, { role: "user", content: amicaLife ? message : this.currentUserMessage }, ]; // console.debug('messages', messages); await this.makeAndHandleStream(messages); }
// this happens either from text or from voice / whisper completion
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/chat/chat.ts#L348-L382
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
getExtra
async function getExtra(messages: Message[]) { const headers: Record<string, string> = { "Content-Type": "application/json", }; const prompt = buildPrompt(messages); const stop_sequence: string[] = [`${config("name")}:`, ...`${config("koboldai_stop_sequence")}`.split("||")]; const res = await fetch(`${config("koboldai_url")}/api/extra/generate/stream`, { headers: headers, method: "POST", body: JSON.stringify({ prompt, stop_sequence }), }); const reader = res.body?.getReader(); if (res.status !== 200 || ! reader) { throw new Error(`KoboldAi chat error (${res.status})`); } const stream = new ReadableStream({ async start(controller: ReadableStreamDefaultController) { const decoder = new TextDecoder("utf-8"); try { let buffer = ""; while (true) { const { done, value } = await reader.read(); if (done) break; buffer += decoder.decode(value); let eolIndex; while ((eolIndex = buffer.indexOf('\n')) >= 0) { const line = buffer.substring(0, eolIndex).trim(); buffer = buffer.substring(eolIndex + 1); if (line.startsWith('data:')) { try { const json = JSON.parse(line.substring(5)); const messagePiece = json.token; if (messagePiece) { controller.enqueue(messagePiece); } } catch (error) { console.error("JSON parsing error:", error, "in line:", line); } } } } } catch (error) { console.error("Stream error:", error); controller.error(error); } finally { reader.releaseLock(); controller.close(); } }, async cancel() { await reader?.cancel(); reader.releaseLock(); } }); return stream; }
// koboldcpp / stream support
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/chat/koboldAiChat.ts#L14-L78
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
getNormal
async function getNormal(messages: Message[]) { const headers: Record<string, string> = { "Content-Type": "application/json", }; const prompt = buildPrompt(messages); const stop_sequence: string[] = [`${config("name")}:`, ...`${config("koboldai_stop_sequence")}`.split("||")]; const res = await fetch(`${config("koboldai_url")}/api/v1/generate`, { headers: headers, method: "POST", body: JSON.stringify({ prompt, stop_sequence }), }); const json = await res.json(); if (json.results.length === 0) { throw new Error(`KoboldAi result length 0`); } const text = json.results.map((row: {text: string}) => row.text).join(''); const stream = new ReadableStream({ async start(controller: ReadableStreamDefaultController) { try { text.split(' ').map((word: string) => word + ' ').forEach((word: string) => { controller.enqueue(word); }); } catch (error) { controller.error(error); } finally { controller.close(); } }, }); return stream; }
// koboldai / no stream support
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/chat/koboldAiChat.ts#L81-L120
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
userInputToSystem
const userInputToSystem = (input: string) => { const mapping: { [key: string]: string } = { ...Object.fromEntries(emotions .filter(e => e[0] === e[0].toUpperCase()) .map(e => [e.toLowerCase(), e])) }; return mapping[input.toLowerCase()] || input; };
// Convert user input to system format e.g. ["suspicious"] -> ["Sus"], ["sleep"] -> ["Sleep"]
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/chat/messages.ts#L33-L41
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
AutoBlink.setEnable
public setEnable(isAuto: boolean) { this._isAutoBlink = isAuto; // 目が閉じている場合、目が開くまでの時間を返す if (!this._isOpen) { return this._remainingTime; } return 0; }
/** * 自動瞬きをON/OFFする。 * * 目を閉じている(blinkが1の)時に感情表現を入れてしまうと不自然になるので、 * 目が開くまでの秒を返し、その時間待ってから感情表現を適用する。 * @param isAuto * @returns 目が開くまでの秒 */
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/emoteController/autoBlink.ts#L28-L37
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
scheduleAudioPlayback
const scheduleAudioPlayback = async ( newAudioData: Float32Array, nowTime: number | undefined, viewer: any ) => { const sampleRate = audioContext.sampleRate; const newBuffer = audioContext.createBuffer(1, newAudioData.length, sampleRate); newBuffer.copyToChannel(newAudioData, 0); const sourceNode = viewer.model?._lipSync?.audio.createBufferSource(); sourceNode.buffer = newBuffer; sourceNode.connect(viewer.model?._lipSync?.audio.destination!); sourceNode.connect(viewer.model?._lipSync?.analyser!); const startTime = Math.max(scheduledEndTimeRef.current, nowTime || 0); sourceNode.start(startTime); scheduledEndTimeRef.current = startTime + newBuffer.duration; if (sourceNodeRef.current && sourceNodeRef.current.buffer) { const currentEndTime = scheduledEndTimeRef.current; // Use the manual tracking of the end time if (currentEndTime <= nowTime!) { sourceNodeRef.current.disconnect(); } } sourceNodeRef.current = sourceNode!; };
// Audio source node
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/moshi/hooks/useAudioPlayback.ts#L7-L33
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
Model.loadAnimation
public async loadAnimation( animation: VRMAnimation | THREE.AnimationClip, ): Promise<void> { const { vrm, mixer } = this; if (vrm == null || mixer == null) { throw new Error("You have to load VRM first"); } const clip = animation instanceof THREE.AnimationClip ? animation : animation.createAnimationClip(vrm); mixer.stopAllAction(); this._currentAction = mixer.clipAction(clip); this._currentAction.play(); }
/** * VRMアニメーションを読み込む * * https://github.com/vrm-c/vrm-specification/blob/master/specification/VRMC_vrm_animation-1.0/README.ja.md */
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/vrmViewer/model.ts#L276-L291
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
Model.speak
public async speak(buffer: ArrayBuffer, screenplay: Screenplay) { this.emoteController?.playEmotion(screenplay.expression); await new Promise((resolve) => { this._lipSync?.playFromArrayBuffer(buffer, () => { resolve(true); }); }); }
/** * 音声を再生し、リップシンクを行う */
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/vrmViewer/model.ts#L416-L423
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
Viewer.loadSplat
public loadSplat(url: string) { if (!this.room) { this.room = new Room(); } return this.room.loadSplat(url).then(async () => { console.log("splat loaded"); if (!this.room?.splat) return; this.room.splat.position.set(0, 4, 0); this.room.splat.rotation.set(0, 0, Math.PI); this.scene!.add(this.room.splat); }); }
// but fun experiment. maybe some use somewhere for tiny splats ?
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/vrmViewer/viewer.ts#L858-L870
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
Viewer.regenerateBVHForModel
public async regenerateBVHForModel() { if (!this.modelMeshHelper) return; this.modelBVHGenerator!.generate(this.modelMeshHelper!.geometry); if (!this.modelMeshHelper!.geometry.boundsTree) { this.modelMeshHelper!.geometry.computeBoundsTree(); } else { this.modelMeshHelper!.geometry.boundsTree.refit(); } this.modelBVHHelper!.update(); }
// TODO investigate if we can get speedup using parallel bvh generation
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/vrmViewer/viewer.ts#L875-L887
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
Viewer.resize
public resize() { if (!this.renderer) return; const parentElement = this.renderer.domElement.parentElement; if (!parentElement) return; this.renderer.setPixelRatio(window.devicePixelRatio); this.renderer.setSize( parentElement.clientWidth, parentElement.clientHeight, ); this.camera!.aspect = parentElement.clientWidth / parentElement.clientHeight; this.camera!.updateProjectionMatrix(); }
/** * canvasの親要素を参照してサイズを変更する */
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/vrmViewer/viewer.ts#L934-L949
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
Viewer.resetCamera
public resetCamera() { const headNode = this.model?.vrm?.humanoid.getNormalizedBoneNode("head"); if (headNode) { const headWPos = headNode.getWorldPosition(new THREE.Vector3()); this.camera?.position.set( this.camera.position.x, headWPos.y, this.camera.position.z, ); this.cameraControls?.target.set(headWPos.x, headWPos.y, headWPos.z); this.cameraControls?.update(); } }
/** * VRMのheadノードを参照してカメラ位置を調整する */
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/vrmViewer/viewer.ts#L977-L990
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
Viewer.createBallAtPoint
public createBallAtPoint(point: THREE.Vector3, itype: number = 0) { return; const distance = point.distanceTo(this.camera?.position as THREE.Vector3); const s = 5; const h = distance * s - Math.floor(distance * s); const getAmicaColor = () => { return this.hslToRgb(h, 1, 0.5); }; const getRoomColor = () => { return this.hslToRgb(h, 0.1, 0.4); }; const color = itype == 0 ? getAmicaColor() : getRoomColor(); const ballMaterial = new THREE.MeshBasicMaterial({ color, }); const ballGeometry = new THREE.SphereGeometry(0.005, 16, 16); const ball = new THREE.Mesh(ballGeometry, ballMaterial); ball.position.copy(point); this.scene!.add(ball); setTimeout(() => { this.scene!.remove(ball); }, 10000); }
// itype: 0 = amica, 1 = room
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/vrmViewer/viewer.ts#L1037-L1064
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
Viewer.applyWind
public applyWind(dir: THREE.Vector3, strength: number) { this.model?.vrm?.springBoneManager?.joints.forEach((e) => { e.settings.gravityDir = dir; e.settings.gravityPower = strength; }); }
// thx @ke456-png :)
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/features/vrmViewer/viewer.ts#L1205-L1210
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
VRMLookAtSmoother.revertFirstPersonBoneQuat
public revertFirstPersonBoneQuat(): void { if (this.userTarget) { const head = this.humanoid.getNormalizedBoneNode("head")!; head.quaternion.copy(this._tempFirstPersonBoneQuat); } }
/** renderしたあとに叩いて頭の回転をもとに戻す */
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/lib/VRMLookAtSmootherLoaderPlugin/VRMLookAtSmoother.ts#L168-L173
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
getChatResponseStream
const getChatResponseStream = async (messages: Message[]) => { console.debug("getChatResponseStream", messages); const chatbotBackend = config("chatbot_backend"); switch (chatbotBackend) { case "chatgpt": return getOpenAiChatResponseStream(messages); case "llamacpp": return getLlamaCppChatResponseStream(messages); case "windowai": return getWindowAiChatResponseStream(messages); case "ollama": return getOllamaChatResponseStream(messages); case "koboldai": return getKoboldAiChatResponseStream(messages); case "openrouter": return getOpenRouterChatResponseStream(messages); default: return getEchoChatResponseStream(messages); } };
// Function to simulate fetching chat response stream based on the selected backend
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/utils/askLlm.ts#L39-L59
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
OptimizedGLTFLoader.register
public register(callback: ExtensionCallback): this { // Add to our extensions array this.extensions.push(callback); // Register with the underlying loader this.loader.register(callback as any); return this; }
/** * Register a custom extension or processor * @param callback Function that receives the GLTFParser * @returns The loader instance for chaining */
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/utils/gltfOptimizer.ts#L72-L80
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
createOffscreenCanvas
function createOffscreenCanvas(width: number, height: number): OffscreenCanvas | HTMLCanvasElement { if (typeof OffscreenCanvas !== 'undefined') { return new OffscreenCanvas(width, height); } const canvas = document.createElement('canvas'); canvas.width = width; canvas.height = height; return canvas; }
// Create an off-screen canvas for image processing
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/utils/textureDownscaler.ts#L40-L48
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916
amica
github_2023
semperai
typescript
scaleImage
function scaleImage(image: TexImageSource, newWidth: number, newHeight: number): Promise<ImageBitmap | HTMLCanvasElement> { const canvas = createOffscreenCanvas(newWidth, newHeight); const ctx = canvas.getContext('2d') as OffscreenCanvasRenderingContext2D; if (!ctx) { throw new Error('Failed to create 2D context'); } // Use better quality interpolation ctx.imageSmoothingEnabled = true; ctx.imageSmoothingQuality = 'high'; // Draw the image scaled down (ctx as any).drawImage(image, 0, 0, newWidth, newHeight); // Return as ImageBitmap if supported, otherwise return canvas if (typeof createImageBitmap !== 'undefined') { return createImageBitmap(canvas); } // return canvas as HTMLCanvasElement; return canvas as any; }
// Scale down an image to new dimensions
https://github.com/semperai/amica/blob/c5829dd25a4ad93cc26bc01b4c8520fd3ef51916/src/utils/textureDownscaler.ts#L51-L73
c5829dd25a4ad93cc26bc01b4c8520fd3ef51916