repo_name string | dataset string | owner string | lang string | func_name string | code string | docstring string | url string | sha string |
|---|---|---|---|---|---|---|---|---|
GPTPortal | github_2023 | Zaki-1052 | typescript | Steps.retrieve | retrieve(
threadId: string,
runId: string,
stepId: string,
options?: Core.RequestOptions,
): Core.APIPromise<RunStep> {
return this._client.get(`/threads/${threadId}/runs/${runId}/steps/${stepId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Retrieves a run step.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/threads/runs/steps.ts#L13-L23 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | FileBatches.create | create(
vectorStoreId: string,
body: FileBatchCreateParams,
options?: Core.RequestOptions,
): Core.APIPromise<VectorStoreFileBatch> {
return this._client.post(`/vector_stores/${vectorStoreId}/file_batches`, {
body,
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Create a vector store file batch.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts#L18-L28 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | FileBatches.retrieve | retrieve(
vectorStoreId: string,
batchId: string,
options?: Core.RequestOptions,
): Core.APIPromise<VectorStoreFileBatch> {
return this._client.get(`/vector_stores/${vectorStoreId}/file_batches/${batchId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Retrieves a vector store file batch.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts#L33-L42 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | FileBatches.cancel | cancel(
vectorStoreId: string,
batchId: string,
options?: Core.RequestOptions,
): Core.APIPromise<VectorStoreFileBatch> {
return this._client.post(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/cancel`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Cancel a vector store file batch. This attempts to cancel the processing of
* files in this batch as soon as possible.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts#L48-L57 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | FileBatches.createAndPoll | async createAndPoll(
vectorStoreId: string,
body: FileBatchCreateParams,
options?: Core.RequestOptions & { pollIntervalMs?: number },
): Promise<VectorStoreFileBatch> {
const batch = await this.create(vectorStoreId, body);
return await this.poll(vectorStoreId, batch.id, options);
} | /**
* Create a vector store batch and poll until all files have been processed.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts#L62-L69 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | FileBatches.poll | async poll(
vectorStoreId: string,
batchId: string,
options?: Core.RequestOptions & { pollIntervalMs?: number },
): Promise<VectorStoreFileBatch> {
const headers: { [key: string]: string } = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' };
if (options?.pollIntervalMs) {
headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString();
}
while (true) {
const { data: batch, response } = await this.retrieve(vectorStoreId, batchId, {
...options,
headers,
}).withResponse();
switch (batch.status) {
case 'in_progress':
let sleepInterval = 5000;
if (options?.pollIntervalMs) {
sleepInterval = options.pollIntervalMs;
} else {
const headerInterval = response.headers.get('openai-poll-after-ms');
if (headerInterval) {
const headerIntervalMs = parseInt(headerInterval);
if (!isNaN(headerIntervalMs)) {
sleepInterval = headerIntervalMs;
}
}
}
await sleep(sleepInterval);
break;
case 'failed':
case 'cancelled':
case 'completed':
return batch;
}
}
} | /**
* Wait for the given file batch to be processed.
*
* Note: this will return even if one of the files failed to process, you need to
* check batch.file_counts.failed_count to handle this case.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts#L107-L146 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | FileBatches.uploadAndPoll | async uploadAndPoll(
vectorStoreId: string,
{ files, fileIds = [] }: { files: Uploadable[]; fileIds?: string[] },
options?: Core.RequestOptions & { pollIntervalMs?: number; maxConcurrency?: number },
): Promise<VectorStoreFileBatch> {
if (files == null || files.length == 0) {
throw new Error(
`No \`files\` provided to process. If you've already uploaded files you should use \`.createAndPoll()\` instead`,
);
}
const configuredConcurrency = options?.maxConcurrency ?? 5;
// We cap the number of workers at the number of files (so we don't start any unnecessary workers)
const concurrencyLimit = Math.min(configuredConcurrency, files.length);
const client = this._client;
const fileIterator = files.values();
const allFileIds: string[] = [...fileIds];
// This code is based on this design. The libraries don't accommodate our environment limits.
// https://stackoverflow.com/questions/40639432/what-is-the-best-way-to-limit-concurrency-when-using-es6s-promise-all
async function processFiles(iterator: IterableIterator<Uploadable>) {
for (let item of iterator) {
const fileObj = await client.files.create({ file: item, purpose: 'assistants' }, options);
allFileIds.push(fileObj.id);
}
}
// Start workers to process results
const workers = Array(concurrencyLimit).fill(fileIterator).map(processFiles);
// Wait for all processing to complete.
await allSettledWithThrow(workers);
return await this.createAndPoll(vectorStoreId, {
file_ids: allFileIds,
});
} | /**
* Uploads the given files concurrently and then creates a vector store file batch.
*
* The concurrency limit is configurable using the `maxConcurrency` parameter.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts#L153-L191 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | processFiles | async function processFiles(iterator: IterableIterator<Uploadable>) {
for (let item of iterator) {
const fileObj = await client.files.create({ file: item, purpose: 'assistants' }, options);
allFileIds.push(fileObj.id);
}
} | // This code is based on this design. The libraries don't accommodate our environment limits. | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts#L175-L180 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Files.create | create(
vectorStoreId: string,
body: FileCreateParams,
options?: Core.RequestOptions,
): Core.APIPromise<VectorStoreFile> {
return this._client.post(`/vector_stores/${vectorStoreId}/files`, {
body,
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Create a vector store file by attaching a
* [File](https://platform.openai.com/docs/api-reference/files) to a
* [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object).
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/files.ts#L15-L25 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Files.retrieve | retrieve(
vectorStoreId: string,
fileId: string,
options?: Core.RequestOptions,
): Core.APIPromise<VectorStoreFile> {
return this._client.get(`/vector_stores/${vectorStoreId}/files/${fileId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Retrieves a vector store file.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/files.ts#L30-L39 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Files.del | del(
vectorStoreId: string,
fileId: string,
options?: Core.RequestOptions,
): Core.APIPromise<VectorStoreFileDeleted> {
return this._client.delete(`/vector_stores/${vectorStoreId}/files/${fileId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Delete a vector store file. This will remove the file from the vector store but
* the file itself will not be deleted. To delete the file, use the
* [delete file](https://platform.openai.com/docs/api-reference/files/delete)
* endpoint.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/files.ts#L74-L83 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Files.createAndPoll | async createAndPoll(
vectorStoreId: string,
body: FileCreateParams,
options?: Core.RequestOptions & { pollIntervalMs?: number },
): Promise<VectorStoreFile> {
const file = await this.create(vectorStoreId, body, options);
return await this.poll(vectorStoreId, file.id, options);
} | /**
* Attach a file to the given vector store and wait for it to be processed.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/files.ts#L88-L95 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Files.poll | async poll(
vectorStoreId: string,
fileId: string,
options?: Core.RequestOptions & { pollIntervalMs?: number },
): Promise<VectorStoreFile> {
const headers: { [key: string]: string } = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' };
if (options?.pollIntervalMs) {
headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString();
}
while (true) {
const fileResponse = await this.retrieve(vectorStoreId, fileId, {
...options,
headers,
}).withResponse();
const file = fileResponse.data;
switch (file.status) {
case 'in_progress':
let sleepInterval = 5000;
if (options?.pollIntervalMs) {
sleepInterval = options.pollIntervalMs;
} else {
const headerInterval = fileResponse.response.headers.get('openai-poll-after-ms');
if (headerInterval) {
const headerIntervalMs = parseInt(headerInterval);
if (!isNaN(headerIntervalMs)) {
sleepInterval = headerIntervalMs;
}
}
}
await sleep(sleepInterval);
break;
case 'failed':
case 'completed':
return file;
}
}
} | /**
* Wait for the vector store file to finish processing.
*
* Note: this will return even if the file failed to process, you need to check
* file.last_error and file.status to handle these cases
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/files.ts#L103-L142 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Files.upload | async upload(
vectorStoreId: string,
file: Uploadable,
options?: Core.RequestOptions,
): Promise<VectorStoreFile> {
const fileInfo = await this._client.files.create({ file: file, purpose: 'assistants' }, options);
return this.create(vectorStoreId, { file_id: fileInfo.id }, options);
} | /**
* Upload a file to the `files` API and then attach it to the given vector store.
*
* Note the file will be asynchronously processed (you can use the alternative
* polling helper method to wait for processing to complete).
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/files.ts#L150-L157 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Files.uploadAndPoll | async uploadAndPoll(
vectorStoreId: string,
file: Uploadable,
options?: Core.RequestOptions & { pollIntervalMs?: number },
): Promise<VectorStoreFile> {
const fileInfo = await this.upload(vectorStoreId, file, options);
return await this.poll(vectorStoreId, fileInfo.id, options);
} | /**
* Add a file to a vector store and poll until processing is complete.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/files.ts#L162-L169 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | VectorStores.create | create(body: VectorStoreCreateParams, options?: Core.RequestOptions): Core.APIPromise<VectorStore> {
return this._client.post('/vector_stores', {
body,
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Create a vector store.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/vector-stores.ts#L18-L24 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | VectorStores.retrieve | retrieve(vectorStoreId: string, options?: Core.RequestOptions): Core.APIPromise<VectorStore> {
return this._client.get(`/vector_stores/${vectorStoreId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Retrieves a vector store.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/vector-stores.ts#L29-L34 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | VectorStores.update | update(
vectorStoreId: string,
body: VectorStoreUpdateParams,
options?: Core.RequestOptions,
): Core.APIPromise<VectorStore> {
return this._client.post(`/vector_stores/${vectorStoreId}`, {
body,
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Modifies a vector store.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/vector-stores.ts#L39-L49 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | VectorStores.del | del(vectorStoreId: string, options?: Core.RequestOptions): Core.APIPromise<VectorStoreDeleted> {
return this._client.delete(`/vector_stores/${vectorStoreId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
} | /**
* Delete a vector store.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/beta/vector-stores/vector-stores.ts#L76-L81 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Jobs.create | create(body: JobCreateParams, options?: Core.RequestOptions): Core.APIPromise<FineTuningJob> {
return this._client.post('/fine_tuning/jobs', { body, ...options });
} | /**
* Creates a fine-tuning job which begins the process of creating a new model from
* a given dataset.
*
* Response includes details of the enqueued job including job status and the name
* of the fine-tuned models once complete.
*
* [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/fine-tuning/jobs/jobs.ts#L22-L24 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Jobs.retrieve | retrieve(fineTuningJobId: string, options?: Core.RequestOptions): Core.APIPromise<FineTuningJob> {
return this._client.get(`/fine_tuning/jobs/${fineTuningJobId}`, options);
} | /**
* Get info about a fine-tuning job.
*
* [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/fine-tuning/jobs/jobs.ts#L31-L33 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Jobs.cancel | cancel(fineTuningJobId: string, options?: Core.RequestOptions): Core.APIPromise<FineTuningJob> {
return this._client.post(`/fine_tuning/jobs/${fineTuningJobId}/cancel`, options);
} | /**
* Immediately cancel a fine-tune job.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/fine-tuning/jobs/jobs.ts#L56-L58 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Parts.create | create(
uploadId: string,
body: PartCreateParams,
options?: Core.RequestOptions,
): Core.APIPromise<UploadPart> {
return this._client.post(
`/uploads/${uploadId}/parts`,
Core.multipartFormRequestOptions({ body, ...options }),
);
} | /**
* Adds a
* [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an
* [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object.
* A Part represents a chunk of bytes from the file you are trying to upload.
*
* Each Part can be at most 64 MB, and you can add Parts until you hit the Upload
* maximum of 8 GB.
*
* It is possible to add multiple Parts in parallel. You can decide the intended
* order of the Parts when you
* [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete).
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/uploads/parts.ts#L21-L30 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Uploads.create | create(body: UploadCreateParams, options?: Core.RequestOptions): Core.APIPromise<Upload> {
return this._client.post('/uploads', { body, ...options });
} | /**
* Creates an intermediate
* [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object
* that you can add
* [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to.
* Currently, an Upload can accept at most 8 GB in total and expires after an hour
* after you create it.
*
* Once you complete the Upload, we will create a
* [File](https://platform.openai.com/docs/api-reference/files/object) object that
* contains all the parts you uploaded. This File is usable in the rest of our
* platform as a regular File object.
*
* For certain `purpose`s, the correct `mime_type` must be specified. Please refer
* to documentation for the supported MIME types for your use case:
*
* - [Assistants](https://platform.openai.com/docs/assistants/tools/file-search/supported-files)
*
* For guidance on the proper filename extensions for each purpose, please follow
* the documentation on
* [creating a File](https://platform.openai.com/docs/api-reference/files/create).
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/uploads/uploads.ts#L34-L36 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Uploads.cancel | cancel(uploadId: string, options?: Core.RequestOptions): Core.APIPromise<Upload> {
return this._client.post(`/uploads/${uploadId}/cancel`, options);
} | /**
* Cancels the Upload. No Parts may be added after an Upload is cancelled.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/uploads/uploads.ts#L41-L43 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
GPTPortal | github_2023 | Zaki-1052 | typescript | Uploads.complete | complete(
uploadId: string,
body: UploadCompleteParams,
options?: Core.RequestOptions,
): Core.APIPromise<Upload> {
return this._client.post(`/uploads/${uploadId}/complete`, { body, ...options });
} | /**
* Completes the
* [Upload](https://platform.openai.com/docs/api-reference/uploads/object).
*
* Within the returned Upload object, there is a nested
* [File](https://platform.openai.com/docs/api-reference/files/object) object that
* is ready to use in the rest of the platform.
*
* You can specify the order of the Parts by passing in an ordered list of the Part
* IDs.
*
* The number of bytes uploaded upon completion must match the number of bytes
* initially specified when creating the Upload object. No Parts may be added after
* an Upload is completed.
*/ | https://github.com/Zaki-1052/GPTPortal/blob/16ffa0c14672c637cf27249d8f0bf146e56c29b7/node_modules/openai/src/resources/uploads/uploads.ts#L60-L66 | 16ffa0c14672c637cf27249d8f0bf146e56c29b7 |
bilibili-cleaner | github_2023 | festoney8 | typescript | KeywordFilter.buildRegExp | private buildRegExp(): void {
this.mergedRegExp = []
const validNormalParts = [] // 普通字串、普通正则
const validBackrefParts = [] // 包含反向引用的正则
for (let word of this.keywordSet) {
word = word.trim()
if (word === '' || word === '//') {
continue
}
if (word.startsWith('/') && word.endsWith('/')) {
word = word.slice(1, -1)
} else {
word = word.replace(/[*+?^${}().|[\]\\]/g, '\\$&') // 转义
}
try {
new RegExp(word, 'ius') // check syntax
if (/\\\d|\\k</.test(word.replaceAll('\\\\', ''))) {
validBackrefParts.push(word) // check backreference
} else {
validNormalParts.push(word)
}
} catch {}
}
try {
if (validNormalParts.length) {
this.mergedRegExp.push(new RegExp(validNormalParts.join('|'), 'ius'))
}
for (const regex of validBackrefParts) {
this.mergedRegExp.push(new RegExp(regex, 'ius'))
}
} catch (err) {
error('keyword filter build RegExp error', err)
}
} | /** 将关键词或正则列表合并为正则 */ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/filters/core/subFilters/keywordFilter.ts#L18-L52 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | CommentFilterCommon.checkRoot | async checkRoot(mode?: 'full' | 'incr') {
const timer = performance.now()
let revertAll = false
if (
!(
this.commentUsernameFilter.isEnable ||
this.commentContentFilter.isEnable ||
this.commentLevelFilter.isEnable ||
this.commentBotFilter.isEnable ||
this.commentCallBotFilter.isEnable ||
this.commentCallUserFilter.isEnable ||
this.commentCallUserNoReplyFilter.isEnable ||
this.commentCallUserOnlyFilter.isEnable ||
this.commentCallUserOnlyNoReplyFilter.isEnable
)
) {
revertAll = true
}
let rootComments: HTMLElement[] = []
if (ShadowInstance.shadowStore.has('BILI-COMMENT-THREAD-RENDERER')) {
rootComments = Array.from(ShadowInstance.shadowStore.get('BILI-COMMENT-THREAD-RENDERER')!).map(
(v) => v.host as HTMLElement,
)
if (mode === 'incr') {
rootComments = rootComments.filter((v) => !v.hasAttribute(settings.filterSign))
}
}
if (!rootComments.length) {
return
}
if (settings.enableDebugFilter) {
rootComments.forEach((v) => {
debug(
[
`CommentFilterCommon rootComments`,
`username: ${selectorFns.root.username(v)}`,
`content: ${selectorFns.root.content(v)}`,
`callUser: ${selectorFns.root.callUser(v)}`,
`callUserNoReply: ${selectorFns.root.callUserNoReply(v)}`,
`callUserOnly: ${selectorFns.root.callUserOnly(v)}`,
`callUserOnlyNoReply: ${selectorFns.root.callUserOnlyNoReply(v)}`,
`level: ${selectorFns.root.level(v)}`,
`isUp: ${selectorFns.root.isUp(v)}`,
`isPin: ${selectorFns.root.isPin(v)}`,
`isNote: ${selectorFns.root.isNote(v)}`,
`isLink: ${selectorFns.root.isLink(v)}`,
`isMe: ${selectorFns.root.isMe(v)}`,
].join('\n'),
)
})
}
if (isRootWhite || revertAll) {
rootComments.forEach((el) => showEle(el))
return
}
const blackPairs: SubFilterPair[] = []
this.commentUsernameFilter.isEnable && blackPairs.push([this.commentUsernameFilter, selectorFns.root.username])
this.commentContentFilter.isEnable && blackPairs.push([this.commentContentFilter, selectorFns.root.content])
this.commentLevelFilter.isEnable && blackPairs.push([this.commentLevelFilter, selectorFns.root.level])
this.commentBotFilter.isEnable && blackPairs.push([this.commentBotFilter, selectorFns.root.username])
this.commentCallBotFilter.isEnable && blackPairs.push([this.commentCallBotFilter, selectorFns.root.callBot])
this.commentCallUserFilter.isEnable && blackPairs.push([this.commentCallUserFilter, selectorFns.root.callUser])
this.commentCallUserNoReplyFilter.isEnable &&
blackPairs.push([this.commentCallUserNoReplyFilter, selectorFns.root.callUserNoReply])
this.commentCallUserOnlyFilter.isEnable &&
blackPairs.push([this.commentCallUserOnlyFilter, selectorFns.root.callUserOnly])
this.commentCallUserOnlyNoReplyFilter.isEnable &&
blackPairs.push([this.commentCallUserOnlyNoReplyFilter, selectorFns.root.callUserOnlyNoReply])
const whitePairs: SubFilterPair[] = []
this.commentIsUpFilter.isEnable && whitePairs.push([this.commentIsUpFilter, selectorFns.root.isUp])
this.commentIsPinFilter.isEnable && whitePairs.push([this.commentIsPinFilter, selectorFns.root.isPin])
this.commentIsNoteFilter.isEnable && whitePairs.push([this.commentIsNoteFilter, selectorFns.root.isNote])
this.commentIsLinkFilter.isEnable && whitePairs.push([this.commentIsLinkFilter, selectorFns.root.isLink])
this.commentIsMeFilter.isEnable && whitePairs.push([this.commentIsMeFilter, selectorFns.root.isMe])
const rootBlackCnt = await coreCheck(rootComments, true, blackPairs, whitePairs)
const time = (performance.now() - timer).toFixed(1)
debug(
`CommentFilterCommon hide ${rootBlackCnt} in ${rootComments.length} root comments, mode=${mode}, time=${time}`,
)
} | /**
* 检测一级评论
* @param mode full全量,incr增量
* @returns
*/ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/filters/variety/comment/pages/common.ts#L294-L379 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | CommentFilterCommon.checkSub | async checkSub(mode?: 'full' | 'incr') {
const timer = performance.now()
let revertAll = false
if (
!(
this.commentUsernameFilter.isEnable ||
this.commentContentFilter.isEnable ||
this.commentLevelFilter.isEnable ||
this.commentBotFilter.isEnable ||
this.commentCallBotFilter.isEnable ||
this.commentCallUserFilter.isEnable ||
this.commentCallUserNoReplyFilter.isEnable ||
this.commentCallUserOnlyFilter.isEnable ||
this.commentCallUserOnlyNoReplyFilter.isEnable
)
) {
revertAll = true
}
let subComments: HTMLElement[] = []
if (ShadowInstance.shadowStore.has('BILI-COMMENT-REPLY-RENDERER')) {
subComments = Array.from(ShadowInstance.shadowStore.get('BILI-COMMENT-REPLY-RENDERER')!).map(
(v) => v.host as HTMLElement,
)
if (mode === 'incr') {
subComments = subComments.filter((v) => !v.hasAttribute(settings.filterSign))
}
}
if (!subComments.length) {
return
}
if (settings.enableDebugFilter) {
subComments.forEach((v) => {
debug(
[
`CommentFilterCommon subComments`,
`username: ${selectorFns.sub.username(v)}`,
`content: ${selectorFns.sub.content(v)}`,
`callUser: ${selectorFns.sub.callUser(v)}`,
`callUserNoReply: ${selectorFns.sub.callUserNoReply(v)}`,
`callUserOnly: ${selectorFns.sub.callUserOnly(v)}`,
`callUserOnlyNoReply: ${selectorFns.sub.callUserOnlyNoReply(v)}`,
`level: ${selectorFns.sub.level(v)}`,
`isUp: ${selectorFns.sub.isUp(v)}`,
`isLink: ${selectorFns.sub.isLink(v)}`,
`isMe: ${selectorFns.sub.isMe(v)}`,
].join('\n'),
)
})
}
if (isSubWhite || revertAll) {
subComments.forEach((el) => showEle(el))
return
}
const blackPairs: SubFilterPair[] = []
this.commentUsernameFilter.isEnable && blackPairs.push([this.commentUsernameFilter, selectorFns.sub.username])
this.commentContentFilter.isEnable && blackPairs.push([this.commentContentFilter, selectorFns.sub.content])
this.commentLevelFilter.isEnable && blackPairs.push([this.commentLevelFilter, selectorFns.sub.level])
this.commentBotFilter.isEnable && blackPairs.push([this.commentBotFilter, selectorFns.sub.username])
this.commentCallBotFilter.isEnable && blackPairs.push([this.commentCallBotFilter, selectorFns.sub.callBot])
this.commentCallUserFilter.isEnable && blackPairs.push([this.commentCallUserFilter, selectorFns.sub.callUser])
this.commentCallUserNoReplyFilter.isEnable &&
blackPairs.push([this.commentCallUserNoReplyFilter, selectorFns.sub.callUserNoReply])
this.commentCallUserOnlyFilter.isEnable &&
blackPairs.push([this.commentCallUserOnlyFilter, selectorFns.sub.callUserOnly])
this.commentCallUserOnlyNoReplyFilter.isEnable &&
blackPairs.push([this.commentCallUserOnlyNoReplyFilter, selectorFns.sub.callUserOnlyNoReply])
const whitePairs: SubFilterPair[] = []
this.commentIsUpFilter.isEnable && whitePairs.push([this.commentIsUpFilter, selectorFns.sub.isUp])
this.commentIsLinkFilter.isEnable && whitePairs.push([this.commentIsLinkFilter, selectorFns.sub.isLink])
this.commentIsMeFilter.isEnable && whitePairs.push([this.commentIsMeFilter, selectorFns.sub.isMe])
const subBlackCnt = await coreCheck(subComments, false, blackPairs, whitePairs)
const time = (performance.now() - timer).toFixed(1)
debug(
`CommentFilterCommon hide ${subBlackCnt} in ${subComments.length} sub comments, mode=${mode}, time=${time}`,
)
} | /**
* 检测二级评论
* @param mode full全量,incr增量
* @returns
*/ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/filters/variety/comment/pages/common.ts#L386-L467 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | CommentFilterCommon.observe | observe() {
ShadowInstance.addShadowObserver(
'BILI-COMMENTS',
new MutationObserver(() => {
this.checkRoot('incr').then().catch()
}),
{
subtree: true,
childList: true,
},
)
ShadowInstance.addShadowObserver(
'BILI-COMMENT-REPLIES-RENDERER',
new MutationObserver(() => {
this.checkSub('full').then().catch()
}),
{
subtree: true,
childList: true,
},
)
} | /**
* 监听一级/二级评论container
* 使用同一Observer监视所有二级评论上级节点,所有变化只触发一次回调
*/ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/filters/variety/comment/pages/common.ts#L486-L508 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | getVideoData | const getVideoData = (video: HTMLElement): any => {
let videoData
if (!video.classList.contains('rank-item')) {
// 热门视频、每周必看
return (video as any).__vue__?.videoData
}
// 排行榜页
const rank = video.getAttribute('data-rank')
if (rank && parseInt(rank) > 0) {
videoData = (video.closest('.rank-list-wrap') as any)?.__vue__?.list?.[parseInt(rank) - 1]
}
return videoData
} | // 视频列表信息提取 | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/filters/variety/video/pages/popular.ts#L64-L76 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | VideoFilterPopular.observe | observe() {
waitForEle(document, '#app', (node: HTMLElement): boolean => {
return node.id === 'app'
}).then((ele) => {
if (!ele) {
return
}
debug('VideoFilterPopular target appear')
this.target = ele
this.checkFull()
new MutationObserver(() => {
this.checkFull() // 始终全量
}).observe(this.target, { childList: true, subtree: true })
})
} | // } | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/filters/variety/video/pages/popular.ts#L219-L235 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | VideoFilterSearch.observe | observe() {
waitForEle(document, '.search-layout', (node: HTMLElement): boolean => {
return node.className.includes('search-layout')
}).then((ele) => {
if (!ele) {
return
}
debug('VideoFilterSearch target appear')
this.target = ele
this.checkFull()
new MutationObserver(() => {
this.checkFull() // 搜索页始终全量check
}).observe(this.target, { childList: true, subtree: true })
})
} | // } | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/filters/variety/video/pages/search.ts#L182-L198 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | VideoFilterSpace.observe | observe() {
waitForEle(document, '#app', (node: HTMLElement): boolean => {
return node.id === 'app'
}).then((ele) => {
if (!ele) {
return
}
debug('VideoFilterSpace target appear')
this.target = ele
this.checkFull()
new MutationObserver(() => {
this.checkFull() // 空间页始终全量check
}).observe(this.target, { childList: true, subtree: true })
})
} | // } | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/filters/variety/video/pages/space.ts#L161-L177 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | VideoFilterVideo.observe | observe() {
waitForEle(
document,
'#reco_list, .recommend-list-v1, .recommend-list-container',
(node: HTMLElement): boolean => {
return (
node.id === 'reco_list' ||
['recommend-list-v1', 'recommend-list-container'].includes(node.className)
)
},
).then((ele) => {
if (!ele) {
return
}
debug('VideoFilterVideo target appear')
this.target = ele
this.checkFull()
new MutationObserver(() => {
this.checkFull() // 播放页始终全量check
}).observe(this.target, { childList: true, subtree: true })
})
} | // } | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/filters/variety/video/pages/video.ts#L204-L227 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | unfold | const unfold = () => {
const dynFoldNodes = document.querySelectorAll('main .bili-dyn-list__item .bili-dyn-item-fold')
if (dynFoldNodes.length) {
dynFoldNodes.forEach((e) => {
e instanceof HTMLDivElement && e.click()
})
}
} | // 大量动态下,单次耗时10ms内 | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/rules/dynamic/groups/centerDyn.ts#L87-L94 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | bv2av | const bv2av = (url: string): string => {
const XOR_CODE = 23442827791579n
const MASK_CODE = 2251799813685247n
const BASE = 58n
const data = 'FcwAPNKTMug3GV5Lj7EJnHpWsx4tb8haYeviqBz6rkCy12mUSDQX9RdoZf'
const dec = (bvid: string): number => {
const bvidArr = Array.from<string>(bvid)
;[bvidArr[3], bvidArr[9]] = [bvidArr[9], bvidArr[3]]
;[bvidArr[4], bvidArr[7]] = [bvidArr[7], bvidArr[4]]
bvidArr.splice(0, 3)
const tmp = bvidArr.reduce((pre, bvidChar) => pre * BASE + BigInt(data.indexOf(bvidChar)), 0n)
return Number((tmp & MASK_CODE) ^ XOR_CODE)
}
try {
if (url.includes('bilibili.com/video/BV')) {
const bvid = matchBvid(url)
if (bvid) {
// 保留query string中分P参数, anchor中reply定位
const urlObj = new URL(url)
const params = new URLSearchParams(urlObj.search)
let partNum = ''
if (params.has('p')) {
partNum += `?p=${params.get('p')}`
}
const aid = dec(bvid)
if (partNum || urlObj.hash) {
return `https://www.bilibili.com/video/av${aid}/${partNum}${urlObj.hash}`
}
return `https://www.bilibili.com/video/av${aid}`
}
}
return url
} catch (err) {
return url
}
} | /**
* algo by bilibili-API-collect
* @see https://www.zhihu.com/question/381784377/answer/1099438784
* @see https://github.com/SocialSisterYi/bilibili-API-collect/issues/740
* @see https://socialsisteryi.github.io/bilibili-API-collect/docs/misc/bvid_desc.html
* @param url 网址
* @returns 输出纯数字av号
*/ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/modules/rules/video/groups/basic.ts#L21-L57 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | wrapper | const wrapper = (loggingFunc: (..._args: any[]) => void | undefined, isEnable: boolean) => {
if (isEnable) {
return (...innerArgs: any[]) => {
currTime = performance.now()
const during: string = (currTime - lastTime).toFixed(1)
loggingFunc(`[bili-cleaner] ${during} / ${currTime.toFixed(0)} ms |`, ...innerArgs)
lastTime = currTime
}
}
return (..._args: any) => {}
} | /**
* 计时日志wrapper
* 输出格式: [bili-cleaner] 0.1 / 2.4 ms | XXXXXXXXXXXXXX
* 第一个时间为上一条日志到本条日志间隔, 第二个时间为页面开启总时长
* 使用 performance.now() 做精确计时
*
* @param loggingFunc console.log等带级别打印日志的函数
* @param isEnable 是否打印日志
* @returns 返回wrap后的日志函数
*/ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/utils/logger.ts#L17-L27 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | Shadow.hook | private hook() {
// eslint-disable-next-line @typescript-eslint/no-this-alias
const self = this
const origAttachShadow = Element.prototype.attachShadow
Element.prototype.attachShadow = function (init) {
const shadowRoot = origAttachShadow.call(this, init)
const tag = this.tagName
// 注入样式
const styles = self.cssStore.get(tag)
styles?.forEach((v) => {
const style = document.createElement('style')
style.textContent = v.css
style.setAttribute('bili-cleaner-css', v.className)
shadowRoot.appendChild(style)
})
// 记录节点
if (self.shadowStore.has(tag)) {
self.shadowStore.get(tag)!.add(shadowRoot)
} else {
self.shadowStore.set(tag, new Set([shadowRoot]))
}
// 监听节点
if (self.observerStore.has(tag)) {
for (const [observer, config] of self.observerStore.get(tag)!) {
observer.observe(shadowRoot, config)
}
}
return shadowRoot
}
// 官方初始化节点有时会用shadowRoot.innerHTML破坏自定义style(如BILI-RICH-TEXT, BILI-AVATAR)
const origShadowInnerHTML = Object.getOwnPropertyDescriptor(ShadowRoot.prototype, 'innerHTML')
Object.defineProperty(ShadowRoot.prototype, 'innerHTML', {
get() {
return origShadowInnerHTML!.get!.call(this)
},
set(value) {
const tagName = this.host.tagName
if (tagName && self.cssStore.has(tagName)) {
const shadowStyles = self.cssStore.get(tagName)
shadowStyles?.forEach((v) => {
value += `<style bili-cleaner-css="${v.className}">${v.css}</style>`
})
}
origShadowInnerHTML!.set!.call(this, value)
},
})
} | /**
* hook attachShadow,创建shadowRoot时注入自定义样式,启用自定义监听
* 重载ShadowRoot.innerHTML,被调用时注入自定义样式
*/ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/utils/shadow.ts#L58-L109 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | Shadow.addShadowStyle | addShadowStyle(tag: TagName, className: string, css: string) {
tag = tag.toUpperCase()
const curr = this.cssStore.get(tag)
if (curr) {
curr.add({ className: className, css: css })
} else {
this.cssStore.set(tag, new Set([{ className: className, css: css }]))
}
if (this.shadowStore.size) {
const nodes = this.shadowStore.get(tag)
nodes?.forEach((node) => {
const style = document.createElement('style')
style.textContent = css
style.setAttribute('bili-cleaner-css', className)
node.appendChild(style)
})
}
} | /**
* 新增需要在shadowDOM内注入的样式
* @param tag tagName
* @param className css类名
* @param css 样式
*/ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/utils/shadow.ts#L117-L135 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | Shadow.removeShadowStyle | removeShadowStyle(tag: TagName, className: string) {
tag = tag.toUpperCase()
const curr = this.cssStore.get(tag)
if (curr) {
for (const value of curr) {
if (value.className === className) {
curr.delete(value)
break
}
}
}
if (this.shadowStore.size) {
const nodes = this.shadowStore.get(tag)
nodes?.forEach((node) => {
node.querySelectorAll(`style[bili-cleaner-css="${className}"]`).forEach((v) => v.remove())
})
}
} | /**
* 移除需要在shadowDOM内注入的样式
* @param tag tagName
* @param className css类名
*/ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/utils/shadow.ts#L142-L160 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
bilibili-cleaner | github_2023 | festoney8 | typescript | Shadow.addShadowObserver | addShadowObserver(tag: TagName, observer: MutationObserver, config: MutationObserverInit) {
tag = tag.toUpperCase()
const curr = this.observerStore.get(tag)
if (curr) {
curr.add([observer, config])
} else {
this.observerStore.set(tag, new Set([[observer, config]]))
}
if (this.shadowStore.size) {
const nodes = this.shadowStore.get(tag)
nodes?.forEach((node) => {
observer.observe(node, config)
})
}
} | /**
* 新增shadowRoot内MutationObserver
* @param tag tagName
* @param observer MutationObserver
* @param config Observer配置
*/ | https://github.com/festoney8/bilibili-cleaner/blob/0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a/src/utils/shadow.ts#L168-L183 | 0b3b937154ce9d3d5e44be7fc422a5ede6a0b72a |
next-devtools | github_2023 | xinyao27 | typescript | doSearchSync | const doSearchSync = () => {
const res = onSearchSync?.(debouncedSearchTerm)
setOptions(transToGroupOption(res || [], groupBy))
} | /** sync search */ | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/client/src/components/ui/multiselect.tsx#L290-L293 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | doSearch | const doSearch = async () => {
setIsLoading(true)
const res = await onSearch?.(debouncedSearchTerm)
setOptions(transToGroupOption(res || [], groupBy))
setIsLoading(false)
} | /** async search */ | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/client/src/components/ui/multiselect.tsx#L313-L318 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | displayNameHandler | const displayNameHandler = (documentation: Documentation, componentDefinition: NodePath<ComponentNode>) => {
builtinHandlers.displayNameHandler(documentation, componentDefinition)
if (!documentation.get('displayName')) {
const variableDeclarator = componentDefinition.parentPath
if (variableDeclarator.node.type === 'VariableDeclarator') {
const componentName = variableDeclarator.node.id.type === 'Identifier' ? variableDeclarator.node.id.name : null
if (componentName) {
documentation.set('displayName', componentName)
}
}
}
} | // handle the case where displayName is empty | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/components.ts#L42-L53 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | isNextRouteFile | function isNextRouteFile(fileName: string): boolean {
return nextJsFilePattern.test(fileName) && NEXT_ROUTE_FILE_PATTERN.test(fileName)
} | // Helper function to check if a file is a Next.js route file | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/routes.ts#L37-L39 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | hasRouteFiles | async function hasRouteFiles(directory: string): Promise<boolean> {
const items = await fs.readdir(directory, { withFileTypes: true })
// Check if current directory has any route files
if (items.some((item) => !item.isDirectory() && isNextRouteFile(item.name))) {
return true
}
// Recursively check subdirectories
for (const item of items) {
if (
item.isDirectory() &&
!IGNORED_DIRECTORIES.has(item.name) &&
(await hasRouteFiles(join(directory, item.name)))
) {
return true
}
}
return false
} | // Helper function to check if directory or its children contain route files | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/routes.ts#L42-L62 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | createRouteNode | function createRouteNode(path: string, name: string, parentId: number, parentNode?: Route): Route {
return {
id: idCounter++,
route: `${parentNode?.id === 0 ? '' : parentNode?.route}/${name}`,
name,
parentNode: parentId,
path,
contents: [],
render: 'server',
}
} | // Create a new route node for the tree | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/routes.ts#L65-L75 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | handleDirectory | async function handleDirectory(
entity: fs.Dirent,
fullPath: string,
parentId: number,
parentNode?: Route,
): Promise<void> {
const isValidRoute = parentId === 0 || (await hasRouteFiles(fullPath))
if (isValidRoute) {
const newNode = createRouteNode(fullPath, entity.name, parentId, parentNode)
treeNodes.push(newNode)
await scanDirectoryContents(fullPath, newNode.id)
}
} | // Handle directory processing | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/routes.ts#L95-L108 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | handleRouteFile | async function handleRouteFile(fileName: string, fullPath: string, parentId: number): Promise<void> {
treeNodes[parentId].contents.push(fileName)
if (await detectClientDirective(fullPath)) {
treeNodes[parentId].render = 'client'
}
} | // Handle route file processing | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/routes.ts#L111-L116 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | getBasicMetadata | function getBasicMetadata() {
// title
const title = document.title
if (title) result.title = title
else result.missing.push('title')
// description
const description = document.querySelector('meta[name="description"]')?.getAttribute('content')
if (description) result.description = description
else result.missing.push('description')
// keywords
const keywords = document.querySelector('meta[name="keywords"]')?.getAttribute('content')
if (keywords) result.keywords = keywords.split(',').map((k) => k.trim())
// author
const author = document.querySelector('meta[name="author"]')?.getAttribute('content')
const authorLink = document.querySelector('link[rel="author"]')?.getAttribute('href')
if (author || authorLink) {
result.authors = [
{
name: author || '',
url: authorLink || '',
},
]
}
} | // Basic metadata | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/seo.ts#L19-L45 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | getOpenGraphData | function getOpenGraphData() {
const og: Record<string, any> = {}
// images is a array of objects -
const images: Record<string, string>[] = []
const ogTags = document.querySelectorAll('[property^="og:"]')
ogTags.forEach((tag) => {
const property = tag.getAttribute('property')?.replace('og:', '')
const content = tag.getAttribute('content')
if (property && content) {
if (property.startsWith('image')) {
const [, prop] = property.split(':')
const image: Record<string, string> = prop ? images.at(-1) || { url: content } : { url: content }
if (prop) {
image[prop] = content
}
if (!prop) {
images.push(image)
}
} else {
og[property] = content
}
}
})
if (images.length > 0) {
og.images = images
}
// music
const musicTags = document.querySelectorAll('[property^="music:"]')
if (musicTags.length > 0) {
musicTags.forEach((tag) => {
const property = tag.getAttribute('property')?.replace('music:', '')
const content = tag.getAttribute('content')
if (property) og[property] = content
})
}
// video
const videoTags = document.querySelectorAll('[property^="video:"]')
if (videoTags.length > 0) {
videoTags.forEach((tag) => {
const property = tag.getAttribute('property')?.replace('video:', '')
const content = tag.getAttribute('content')
if (property) og[property] = content
})
}
// article
const articleTags = document.querySelectorAll('[property^="article:"]')
if (articleTags.length > 0) {
articleTags.forEach((tag) => {
const property = tag.getAttribute('property')?.replace('article:', '')
const content = tag.getAttribute('content')
if (property) og[property] = content
})
}
// book
const bookTags = document.querySelectorAll('[property^="book:"]')
if (bookTags.length > 0) {
bookTags.forEach((tag) => {
const property = tag.getAttribute('property')?.replace('book:', '')
const content = tag.getAttribute('content')
if (property) og[property] = content
})
}
// profile
const profileTags = document.querySelectorAll('[property^="profile:"]')
if (profileTags.length > 0) {
profileTags.forEach((tag) => {
const property = tag.getAttribute('property')?.replace('profile:', '')
const content = tag.getAttribute('content')
if (property) og[property] = content
})
}
if (Object.keys(og).length > 0) {
result.openGraph = og
}
} | // Open Graph - https://ogp.me/ | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/seo.ts#L48-L129 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | getTwitterData | function getTwitterData() {
const twitter: Record<string, any> = {}
const twitterTags = document.querySelectorAll('[name^="twitter:"]')
const images: Record<string, string>[] = []
twitterTags.forEach((tag) => {
const name = tag.getAttribute('name')?.replace('twitter:', '')
const content = tag.getAttribute('content')
if (name && content) {
if (name.startsWith('image')) {
const [, prop] = name.split(':')
const image: Record<string, string> = prop ? images.at(-1) || { url: content } : { url: content }
if (prop) {
image[prop] = content
}
if (!prop) {
images.push(image)
}
} else {
twitter[name] = content
}
}
})
if (images.length > 0) {
twitter.images = images
}
if (Object.keys(twitter).length > 0) {
result.twitter = twitter
}
} | // Twitter Card | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/seo.ts#L132-L162 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | getIconsData | function getIconsData() {
const icons: Record<string, any> = {}
const iconLinks = document.querySelectorAll('link[rel*="icon"]')
iconLinks.forEach((link) => {
const rel = link.getAttribute('rel')
const href = link.getAttribute('href')
if (rel && href) {
if (rel === 'icon') {
icons.icon = href
} else if (rel === 'apple-touch-icon') {
icons.apple = href
}
}
})
if (Object.keys(icons).length > 0) {
result.icons = icons
}
} | // Icon data | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/seo.ts#L165-L184 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | getRobotsData | function getRobotsData() {
const robots = document.querySelector('meta[name="robots"]')?.getAttribute('content')
if (robots) {
const robotsObj: Record<string, boolean> = {}
robots.split(',').forEach((directive) => {
const trimmed = directive.trim()
if (trimmed.startsWith('no')) {
robotsObj[trimmed.replace('no', '')] = false
} else {
robotsObj[trimmed] = true
}
})
result.robots = robotsObj
}
} | // Robots | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/seo.ts#L187-L201 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | getVerificationData | function getVerificationData() {
const verification: Record<string, string | string[]> = {}
const verificationTags = document.querySelectorAll('meta[name$="-verification"]')
verificationTags.forEach((tag) => {
const name = tag.getAttribute('name')?.replace('-verification', '')
const content = tag.getAttribute('content')
if (name && content) {
verification[name] = content
}
})
if (Object.keys(verification).length > 0) {
result.verification = verification
}
} | // Verification | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/seo.ts#L204-L219 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | getFacebookData | function getFacebookData() {
const fbAppId = document.querySelector('meta[property="fb:app_id"]')?.getAttribute('content')
const fbAdmins = document.querySelector('meta[property="fb:admins"]')?.getAttribute('content')
if (fbAppId || fbAdmins) {
result.facebook = {} as Facebook
if (fbAppId) result.facebook.appId = fbAppId
if (fbAdmins) result.facebook.admins = [fbAdmins]
}
} | // Facebook | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/seo.ts#L222-L231 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | getJSONLDData | function getJSONLDData() {
const scripts = document.querySelectorAll('script[type="application/ld+json"]')
scripts.forEach((script) => {
const jsonRaw = (script.textContent as string) ?? '{}'
const json = JSON.parse(jsonRaw) as WithContext<any>
if (!result.jsonLd) result.jsonLd = []
result.jsonLd.push(json)
})
} | // JSON-LD | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/seo.ts#L234-L243 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
next-devtools | github_2023 | xinyao27 | typescript | getName | function getName() {
const jsonLd = result.jsonLd?.find((json) => json.name)
if (jsonLd?.name) {
result.name = jsonLd.name
return
}
const titleTag = document.querySelector('title')?.textContent
if (titleTag) {
result.name = titleTag
return
}
const h1Tag = document.querySelector('h1')?.textContent
if (h1Tag) {
result.name = h1Tag
return
}
const ogSiteName = document.querySelector('meta[property="og:site_name"]')?.getAttribute('content')
if (ogSiteName) {
result.name = ogSiteName
}
} | /**
* name
* Determine the name from multiple sources, including:
* - WebSite structured data - https://json-ld.org/
* - <title>
* - <h1>
* - og:size_name
*/ | https://github.com/xinyao27/next-devtools/blob/ec985015f48febab2e4e061ba8fd6d4238563f52/packages/core/src/features/seo.ts#L253-L273 | ec985015f48febab2e4e061ba8fd6d4238563f52 |
searchemoji | github_2023 | rotick | typescript | go | async function go () {
for (const l of locale) {
if (l.code !== 'en' && l.code !== 'zh-hans') {
await main(l.code)
}
}
} | // const badResult = ['da', 'fi', 'he', 'hu', 'id', 'it', 'th', 'tr'].map(lang => ({ code: lang })) | https://github.com/rotick/searchemoji/blob/be31747fb2868a9bdd61a7b0a1e4d5b7c8bc9d3f/scripts/generateLocale.ts#L45-L51 | be31747fb2868a9bdd61a7b0a1e4d5b7c8bc9d3f |
searchemoji | github_2023 | rotick | typescript | batch | function batch (arr: any[], size: number) {
const batches = []
/* eslint-disable @typescript-eslint/restrict-plus-operands */
for (let i = 0; i < arr.length; i += size) {
batches.push(arr.slice(i, i + size))
}
return batches
} | // async function main () { | https://github.com/rotick/searchemoji/blob/be31747fb2868a9bdd61a7b0a1e4d5b7c8bc9d3f/scripts/handle.ts#L41-L48 | be31747fb2868a9bdd61a7b0a1e4d5b7c8bc9d3f |
gratelets | github_2023 | lilnasy | typescript | idle | async function idle(page: Page) {
await page.goto("http://localhost:4321/idle")
await expect(page.locator("#counter-message")).toHaveText("server rendered")
await page.click("body")
await expect(page.locator("#counter-message")).toHaveText("hydrated")
} | // the nuances of how idle directive makes the component load cant be tested | https://github.com/lilnasy/gratelets/blob/e9de129a55e69c0a50bfa27ac218947f1d3e8611/tests-e2e/client-interaction.spec.ts#L23-L28 | e9de129a55e69c0a50bfa27ac218947f1d3e8611 |
three-pinata | github_2023 | dgreenheck | typescript | findIsolatedGeometry | function findIsolatedGeometry(fragment: Fragment): Fragment[] {
// Initialize the union-find data structure
const uf = new UnionFind(fragment.vertexCount);
// Triangles for each submesh are stored separately
const rootTriangles: Record<number, number[][]> = {};
const N = fragment.vertices.length;
const M = fragment.cutVertices.length;
const adjacencyMap = new Map<number, number>();
// Hash each vertex based on its position. If a vertex already exists
// at that location, union this vertex with the existing vertex so they are
// included in the same geometry group.
fragment.vertices.forEach((vertex, index) => {
const key = vertex.hash();
const existingIndex = adjacencyMap.get(key);
if (existingIndex === undefined) {
adjacencyMap.set(key, index);
} else {
uf.union(existingIndex, index);
}
});
// First, union each cut-face vertex with its coincident non-cut-face vertex
// The union is performed so no cut-face vertex can be a root.
for (let i = 0; i < M; i++) {
uf.union(fragment.vertexAdjacency[i], i + N);
}
// Group vertices by analyzing which vertices are connected via triangles
// Analyze the triangles of each submesh separately
const indices = fragment.triangles;
for (let submeshIndex = 0; submeshIndex < indices.length; submeshIndex++) {
for (let i = 0; i < indices[submeshIndex].length; i += 3) {
const a = indices[submeshIndex][i];
const b = indices[submeshIndex][i + 1];
const c = indices[submeshIndex][i + 2];
uf.union(a, b);
uf.union(b, c);
// Store triangles by root representative
const root = uf.find(a);
if (!rootTriangles[root]) {
rootTriangles[root] = [[], []];
}
rootTriangles[root][submeshIndex].push(a, b, c);
}
}
// New fragments created from geometry, mapped by root index
const rootFragments: Record<number, Fragment> = {};
const vertexMap: number[] = Array(fragment.vertexCount);
// Iterate over each vertex and add it to correct mesh
for (let i = 0; i < N; i++) {
const root = uf.find(i);
// If there is no fragment for this root yet, create it
if (!rootFragments[root]) {
rootFragments[root] = new Fragment();
}
rootFragments[root].vertices.push(fragment.vertices[i]);
vertexMap[i] = rootFragments[root].vertices.length - 1;
}
// Do the same for the cut-face vertices
for (let i = 0; i < M; i++) {
const root = uf.find(i + N);
rootFragments[root].cutVertices.push(fragment.cutVertices[i]);
vertexMap[i + N] =
rootFragments[root].vertices.length +
rootFragments[root].cutVertices.length -
1;
}
// Iterate over triangles and add to the correct mesh
for (const key of Object.keys(rootTriangles)) {
let i = Number(key);
// Minor optimization here:
// Access the parent directly rather than using find() since the paths
// for all indices have been compressed in the last two for loops
let root = uf.parent[i];
for (
let submeshIndex = 0;
submeshIndex < fragment.triangles.length;
submeshIndex++
) {
for (const vertexIndex of rootTriangles[i][submeshIndex]) {
const mappedIndex = vertexMap[vertexIndex];
rootFragments[root].triangles[submeshIndex].push(mappedIndex);
}
}
}
return Object.values(rootFragments);
} | /**
* Uses the union-find algorithm to find isolated groups of geometry
* within a fragment that are not connected together. These groups
* are identified and split into separate fragments.
* @returns An array of fragments
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/Fracture.ts#L91-L191 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | fillCutFaces | function fillCutFaces(
topSlice: Fragment,
bottomSlice: Fragment,
sliceNormal: Vector3,
textureScale: Vector2,
textureOffset: Vector2,
convex: boolean,
): void {
// Since the topSlice and bottomSlice both share the same cut face, we only need to calculate it
// once. Then the same vertex/triangle data for the face will be used for both slices, except
// with the normals reversed.
// First need to weld the coincident vertices for the triangulation to work properly
topSlice.weldCutFaceVertices();
// Need at least 3 vertices to triangulate
if (topSlice.cutVertices.length < 3) return;
// Triangulate the cut face
const triangulator = convex
? new Triangulator(topSlice.cutVertices, sliceNormal)
: new ConstrainedTriangulator(
topSlice.cutVertices,
topSlice.constraints,
sliceNormal,
);
const triangles: number[] = triangulator.triangulate();
// Update normal and UV for the cut face vertices
for (let i = 0; i < topSlice.cutVertices.length; i++) {
var vertex = topSlice.cutVertices[i];
var point = triangulator.points[i];
// UV coordinates are based off of the 2D coordinates used for triangulation
// During triangulation, coordinates are normalized to [0,1], so need to multiply
// by normalization scale factor to get back to the appropritate scale
const uv = new Vector2(
triangulator.normalizationScaleFactor * point.coords.x * textureScale.x +
textureOffset.x,
triangulator.normalizationScaleFactor * point.coords.y * textureScale.y +
textureOffset.y,
);
// Update normals and UV coordinates for the cut vertices
const topVertex = new MeshVertex(
vertex.position.clone(),
sliceNormal.clone(),
uv.clone(),
);
const bottomVertex = new MeshVertex(
vertex.position.clone(),
sliceNormal.clone().negate(),
uv.clone(),
);
topSlice.cutVertices[i] = topVertex;
bottomSlice.cutVertices[i] = bottomVertex;
}
// push the new triangles to the top/bottom slices
let offsetTop = topSlice.vertices.length;
let offsetBottom = bottomSlice.vertices.length;
for (let i = 0; i < triangles.length; i += 3) {
topSlice.addTriangle(
offsetTop + triangles[i],
offsetTop + triangles[i + 1],
offsetTop + triangles[i + 2],
SlicedMeshSubmesh.CutFace,
);
bottomSlice.addTriangle(
offsetBottom + triangles[i],
offsetBottom + triangles[i + 2],
offsetBottom + triangles[i + 1],
SlicedMeshSubmesh.CutFace,
);
}
} | /**
* Fills the cut faces for each sliced mesh. The `sliceNormal` is the normal for the plane and points
* in the direction of `topfragment`
* @param topSlice Fragment mesh data for slice above the slice plane
* @param bottomSlice Fragment mesh data for slice above the slice plane
* @param sliceNormal Normal of the slice plane (points towards the top slice)
* @param textureScale Scale factor to apply to UV coordinates
* @param textureOffset Offset to apply to UV coordinates
* @param convex Set to true if fragments are convex
* @returns
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/Slice.ts#L148-L227 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | splitTriangles | function splitTriangles(
fragment: Fragment,
topSlice: Fragment,
bottomSlice: Fragment,
sliceNormal: Vector3,
sliceOrigin: Vector3,
side: boolean[],
subMesh: SlicedMeshSubmesh,
): void {
const triangles: number[] = fragment.triangles[subMesh];
// Keep track of vertices that lie on the intersection plane
let a: number;
let b: number;
let c: number;
for (let i = 0; i < triangles.length; i += 3) {
// Get vertex indexes for this triangle
a = triangles[i];
b = triangles[i + 1];
c = triangles[i + 2];
// Triangle is contained completely within mesh A
if (side[a] && side[b] && side[c]) {
topSlice.addMappedTriangle(a, b, c, subMesh);
}
// Triangle is contained completely within mesh B
else if (!side[a] && !side[b] && !side[c]) {
bottomSlice.addMappedTriangle(a, b, c, subMesh);
}
// Triangle is intersected by the slicing plane. Need to subdivide it
else {
// In these cases, two vertices of the triangle are above the cut plane and one vertex is below
if (side[b] && side[c] && !side[a]) {
splitTriangle(
b,
c,
a,
sliceNormal,
sliceOrigin,
fragment,
topSlice,
bottomSlice,
subMesh,
true,
);
} else if (side[c] && side[a] && !side[b]) {
splitTriangle(
c,
a,
b,
sliceNormal,
sliceOrigin,
fragment,
topSlice,
bottomSlice,
subMesh,
true,
);
} else if (side[a] && side[b] && !side[c]) {
splitTriangle(
a,
b,
c,
sliceNormal,
sliceOrigin,
fragment,
topSlice,
bottomSlice,
subMesh,
true,
);
}
// In these cases, two vertices of the triangle are below the cut plane and one vertex is above
else if (!side[b] && !side[c] && side[a]) {
splitTriangle(
b,
c,
a,
sliceNormal,
sliceOrigin,
fragment,
topSlice,
bottomSlice,
subMesh,
false,
);
} else if (!side[c] && !side[a] && side[b]) {
splitTriangle(
c,
a,
b,
sliceNormal,
sliceOrigin,
fragment,
topSlice,
bottomSlice,
subMesh,
false,
);
} else if (!side[a] && !side[b] && side[c]) {
splitTriangle(
a,
b,
c,
sliceNormal,
sliceOrigin,
fragment,
topSlice,
bottomSlice,
subMesh,
false,
);
}
}
}
} | /**
* Identifies triangles that are intersected by the slice plane and splits them in two
* @param fragment
* @param topSlice Fragment mesh data for slice above the slice plane
* @param bottomSlice Fragment mesh data for slice above the slice plane
* @param sliceNormal The normal of the slice plane (points towards the top slice)
* @param sliceOrigin The origin of the slice plane
* @param side Array mapping each vertex to either the top/bottom slice
* @param subMesh Index of the sub mesh
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/Slice.ts#L239-L354 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | splitTriangle | function splitTriangle(
v1_idx: number,
v2_idx: number,
v3_idx: number,
sliceNormal: Vector3,
sliceOrigin: Vector3,
fragment: Fragment,
topSlice: Fragment,
bottomSlice: Fragment,
subMesh: SlicedMeshSubmesh,
v3BelowCutPlane: boolean,
): void {
// - `v1`, `v2`, `v3` are the indexes of the triangle relative to the original mesh data
// - `v1` and `v2` are on the the side of split plane that belongs to meshA
// - `v3` is on the side of the split plane that belongs to meshB
// - `vertices`, `normals`, `uv` are the original mesh data used for interpolation
//
// v3BelowCutPlane = true
// ======================
//
// v1 *_____________* v2 .
// \ / /|\ cutNormal
// \ / |
// ----*-------*---------*--
// v13 \ / v23 cutOrigin
// \ /
// \ /
// * v3 triangle normal out of screen
//
// v3BelowCutPlane = false
// =======================
//
// * v3 .
// / \ /|\ cutNormal
// v23 / \ v13 |
// -----*-----*----------*--
// / \ cut origin
// / \
// v2 *___________* v1 triangle normal out of screen
//
let v1: MeshVertex =
v1_idx < fragment.vertices.length
? fragment.vertices[v1_idx]
: fragment.cutVertices[v1_idx - fragment.vertices.length];
let v2: MeshVertex =
v2_idx < fragment.vertices.length
? fragment.vertices[v2_idx]
: fragment.cutVertices[v2_idx - fragment.vertices.length];
let v3: MeshVertex =
v3_idx < fragment.vertices.length
? fragment.vertices[v3_idx]
: fragment.cutVertices[v3_idx - fragment.vertices.length];
const v13 = linePlaneIntersection(
v1.position,
v3.position,
sliceNormal,
sliceOrigin,
);
const v23 = linePlaneIntersection(
v2.position,
v3.position,
sliceNormal,
sliceOrigin,
);
if (v13 && v23) {
// Interpolate normals and UV coordinates
const norm13 = new Vector3(
v1.normal.x + v13.s * (v3.normal.x - v1.normal.x),
v1.normal.y + v13.s * (v3.normal.y - v1.normal.y),
v1.normal.z + v13.s * (v3.normal.z - v1.normal.z),
).normalize();
const norm23 = new Vector3(
v2.normal.x + v23.s * (v3.normal.x - v2.normal.x),
v2.normal.y + v23.s * (v3.normal.y - v2.normal.y),
v2.normal.z + v23.s * (v3.normal.z - v2.normal.z),
).normalize();
const uv13 = new Vector2(
v1.uv.x + v13.s * (v3.uv.x - v1.uv.x),
v1.uv.y + v13.s * (v3.uv.y - v1.uv.y),
);
const uv23 = new Vector2(
v2.uv.x + v23.s * (v3.uv.x - v2.uv.x),
v2.uv.y + v23.s * (v3.uv.y - v2.uv.y),
);
// push vertices/normals/uv for the intersection points to each mesh
topSlice.addCutFaceVertex(v13.x, norm13, uv13);
topSlice.addCutFaceVertex(v23.x, norm23, uv23);
bottomSlice.addCutFaceVertex(v13.x, norm13, uv13);
bottomSlice.addCutFaceVertex(v23.x, norm23, uv23);
// Indices for the intersection vertices (for the original mesh data)
const index13_A: number = topSlice.vertices.length - 2;
const index23_A: number = topSlice.vertices.length - 1;
const index13_B: number = bottomSlice.vertices.length - 2;
const index23_B: number = bottomSlice.vertices.length - 1;
if (v3BelowCutPlane) {
// Triangle slice above the cutting plane is a quad, so divide into two triangles
topSlice.addTriangle(
index23_A,
index13_A,
topSlice.indexMap[v2_idx],
subMesh,
);
topSlice.addTriangle(
index13_A,
topSlice.indexMap[v1_idx],
topSlice.indexMap[v2_idx],
subMesh,
);
// One triangle must be added to mesh 2
bottomSlice.addTriangle(
bottomSlice.indexMap[v3_idx],
index13_B,
index23_B,
subMesh,
);
// When looking at the cut-face, the edges should wind counter-clockwise
topSlice.constraints.push(
new EdgeConstraint(
topSlice.cutVertices.length - 2,
topSlice.cutVertices.length - 1,
),
);
bottomSlice.constraints.push(
new EdgeConstraint(
bottomSlice.cutVertices.length - 1,
bottomSlice.cutVertices.length - 2,
),
);
} else {
// Triangle slice above the cutting plane is a simple triangle
topSlice.addTriangle(
index13_A,
index23_A,
topSlice.indexMap[v3_idx],
subMesh,
);
// Triangle slice below the cutting plane is a quad, so divide into two triangles
bottomSlice.addTriangle(
bottomSlice.indexMap[v1_idx],
bottomSlice.indexMap[v2_idx],
index13_B,
subMesh,
);
bottomSlice.addTriangle(
bottomSlice.indexMap[v2_idx],
index23_B,
index13_B,
subMesh,
);
// When looking at the cut-face, the edges should wind counter-clockwise
topSlice.constraints.push(
new EdgeConstraint(
topSlice.cutVertices.length - 1,
topSlice.cutVertices.length - 2,
),
);
bottomSlice.constraints.push(
new EdgeConstraint(
bottomSlice.cutVertices.length - 2,
bottomSlice.cutVertices.length - 1,
),
);
}
}
} | /**
* Splits triangle defined by the points (v1,v2,v3)
* @param v1_idx Index of first vertex in triangle
* @param v2_idx Index of second vertex in triangle
* @param v3_idx Index of third vertex in triangle
* @param sliceNormal The normal of the slice plane (points towards the top slice)
* @param sliceOrigin The origin of the slice plane
* @param fragment Original mesh data
* @param topSlice Mesh data for top slice
* @param bottomSlice Mesh data for bottom slice
* @param subMesh Index of the submesh that the triangle belongs to
* @param v3BelowCutPlane Boolean indicating whether v3 is above or below the slice plane.
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/Slice.ts#L369-L548 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | EdgeConstraint.constructor | constructor(
v1: number,
v2: number,
triangle1?: number,
triangle2?: number,
edge1?: number,
) {
this.v1 = v1;
this.v2 = v2;
this.t1 = triangle1 ?? -1;
this.t2 = triangle2 ?? -1;
this.t1Edge = edge1 ?? 0;
} | /**
* Creates a new edge constraint with the given end points
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/EdgeConstraint.ts#L33-L45 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | EdgeConstraint.equals | equals(other: EdgeConstraint): boolean {
return (
(this.v1 === other.v1 && this.v2 === other.v2) ||
(this.v1 === other.v2 && this.v2 === other.v1)
);
} | /**
* Determines whether the specified object is equal to the current object
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/EdgeConstraint.ts#L50-L55 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | EdgeConstraint.toString | toString(): string {
return `Edge: T${this.t1}->T${this.t2} (V${this.v1}->V${this.v2})`;
} | /**
* Returns a string that represents the current object
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/EdgeConstraint.ts#L60-L62 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Fragment.constructor | constructor(args: FragmentArgs | undefined = undefined) {
this.vertices = [];
this.cutVertices = [];
this.triangles = [[], []];
this.constraints = [];
this.indexMap = [];
this.bounds = new Box3();
this.vertexAdjacency = [];
if (!args) {
return;
}
const { positions, normals, uvs, indices } = args;
for (let i = 0; i < positions.length / 3; i++) {
const position = new Vector3(
positions[3 * i],
positions[3 * i + 1],
positions[3 * i + 2],
);
const normal = new Vector3(
normals[3 * i],
normals[3 * i + 1],
normals[3 * i + 2],
);
const uv = new Vector2(uvs[2 * i], uvs[2 * i + 1]);
this.vertices.push(new MeshVertex(position, normal, uv));
}
this.triangles = [Array.from(indices)];
this.calculateBounds();
} | /**
* Constructor for a Fragment object
* @param args The arguments for the Fragment object
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/Fragment.ts#L62-L97 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Fragment.triangleCount | get triangleCount(): number {
return (this.triangles[0].length + this.triangles[1].length) / 3;
} | /**
* Gets the total number of triangles across all sub meshes
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/Fragment.ts#L102-L104 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Fragment.vertexCount | get vertexCount(): number {
return this.vertices.length + this.cutVertices.length;
} | /**
* Gets the total number of vertices in the geometry
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/Fragment.ts#L109-L111 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Fragment.addCutFaceVertex | addCutFaceVertex(position: Vector3, normal: Vector3, uv: Vector2): void {
const vertex = new MeshVertex(position, normal, uv);
this.vertices.push(vertex);
this.cutVertices.push(vertex);
// Track which non-cut-face vertex this cut-face vertex is mapped to
this.vertexAdjacency.push(this.vertices.length - 1);
} | /**
* Adds a new cut face vertex
* @param position The vertex position
* @param normal The vertex normal
* @param uv The vertex UV coordinates
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/Fragment.ts#L119-L126 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Fragment.addMappedVertex | addMappedVertex(vertex: MeshVertex, sourceIndex: number): void {
this.vertices.push(vertex);
this.indexMap[sourceIndex] = this.vertices.length - 1;
} | /**
* Adds a new vertex to this mesh that is mapped to the source mesh
* @param vertex Vertex data
* @param sourceIndex Index of the vertex in the source mesh
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/Fragment.ts#L133-L136 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Fragment.addTriangle | addTriangle(
v1: number,
v2: number,
v3: number,
subMesh: SlicedMeshSubmesh,
): void {
this.triangles[subMesh].push(v1, v2, v3);
} | /**
* Adds a new triangle to this mesh. The arguments v1, v2, v3 are the indexes of the
* vertices relative to this mesh's list of vertices; no mapping is performed.
* @param v1 Index of the first vertex
* @param v2 Index of the second vertex
* @param v3 Index of the third vertex
* @param subMesh The sub-mesh to add the triangle to
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/Fragment.ts#L146-L153 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Fragment.addMappedTriangle | addMappedTriangle(
v1: number,
v2: number,
v3: number,
subMesh: SlicedMeshSubmesh,
): void {
this.triangles[subMesh].push(
this.indexMap[v1],
this.indexMap[v2],
this.indexMap[v3],
);
} | /**
* Adds a new triangle to this mesh. The arguments v1, v2, v3 are the indices of the
* vertices in the original mesh. These vertices are mapped to the indices in the sliced mesh.
* @param v1 Index of the first vertex
* @param v2 Index of the second vertex
* @param v3 Index of the third vertex
* @param subMesh The sub-mesh to add the triangle to
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/Fragment.ts#L163-L174 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Fragment.weldCutFaceVertices | weldCutFaceVertices(): void {
// Temporary array containing the unique (welded) vertices
// Initialize capacity to current number of cut vertices to prevent
// unnecessary reallocations
const weldedVerts: MeshVertex[] = [];
// Need to update adjacency as well
const weldedVertsAdjacency: number[] = [];
// We also keep track of the index mapping between the skipped vertices
// and the index of the welded vertex so we can update the edges
const indexMap: number[] = new Array(this.cutVertices.length);
// Number of welded vertices in the temp array
let k = 0;
// Perform spatial hashing of vertices
const adjacencyMap = new Map<number, number>();
this.cutVertices.forEach((vertex, i) => {
const key = vertex.hash();
if (!adjacencyMap.has(key)) {
indexMap[i] = k;
adjacencyMap.set(key, k);
weldedVerts.push(this.cutVertices[i]);
weldedVertsAdjacency.push(this.vertexAdjacency[i]);
k++;
} else {
indexMap[i] = adjacencyMap.get(key)!;
}
});
// Update the edge constraints to point to the new welded vertices
for (let i = 0; i < this.constraints.length; i++) {
const edge = this.constraints[i];
edge.v1 = indexMap[edge.v1];
edge.v2 = indexMap[edge.v2];
}
// Update the cut vertices
this.cutVertices = weldedVerts;
this.vertexAdjacency = weldedVertsAdjacency;
} | /**
* Finds coincident vertices on the cut face and welds them together.
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/Fragment.ts#L179-L219 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Fragment.calculateBounds | calculateBounds() {
// Initialize min and max vectors with the first vertex in the array
let min = this.vertices[0].position.clone();
let max = min.clone();
// Iterate over the vertices to find the min and max x, y, and z
this.vertices.forEach((vertex) => {
min.x = Math.min(min.x, vertex.position.x);
min.y = Math.min(min.y, vertex.position.y);
min.z = Math.min(min.z, vertex.position.z);
max.x = Math.max(max.x, vertex.position.x);
max.y = Math.max(max.y, vertex.position.y);
max.z = Math.max(max.z, vertex.position.z);
});
this.bounds = new Box3(min, max);
} | /**
* Calculates the bounds of the mesh data
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/Fragment.ts#L224-L241 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | MeshVertex.hash | hash(): number {
// Use inverse so we can multiply instead of divide to save a few ops
const x = Math.floor(this.position.x * this.invTolerance);
const y = Math.floor(this.position.y * this.invTolerance);
const z = Math.floor(this.position.z * this.invTolerance);
const xy = 0.5 * ((x + y) * (x + y + 1)) + y; // Pairing x and y
return (0.5 * ((xy + z) * (xy + z + 1))) / 2 + z;
} | /**
* Uses Cantor pairing to hash vertex position into a unique integer
* @param inverseTolerance The inverse of the tolerance used for spatial hashing
* @returns
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/MeshVertex.ts#L29-L36 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | MeshVertex.equals | equals(other: MeshVertex): boolean {
return this.hash() === other.hash();
} | /**
* Returns true if this vertex and another vertex share the same position
* @param other
* @returns
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/MeshVertex.ts#L43-L45 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | TriangulationPoint.constructor | constructor(index: number, coords: Vector2) {
this.index = index;
this.coords = coords;
this.bin = 0;
} | /**
* Instantiates a new triangulation point
* @param index The index of the point in the original point list
* @param coords The 2D coordinates of the point in the triangulation plane
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/entities/TriangulationPoint.ts#L28-L32 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | getAdjacentVertex | function getAdjacentVertex(i: number, n: number): number {
if (i + 1 < n) {
return i + 1;
} else {
// If i == n, adjacent vertex is i == 1
return ((i + 1) % n) + 1;
}
} | // Helper function for getting an adjacent vertex, translated to TypeScript | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.test.ts#L7-L14 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.constructor | constructor(
inputPoints: MeshVertex[],
constraints: EdgeConstraint[],
normal: Vector3,
) {
super(inputPoints, normal);
this.constraints = constraints;
this.vertexTriangles = [];
} | /**
* Initializes the triangulator with the vertex data to be triangulated given a set of edge constraints
* @param inputPoints The of points to triangulate
* @param constraints The list of edge constraints which defines how the vertices in `inputPoints` are connected.
* @param normal The normal of the plane in which the `inputPoints` lie.
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L69-L77 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.triangulate | triangulate(): number[] {
// Need at least 3 vertices to triangulate
if (this.N < 3) {
return [];
}
this.addSuperTriangle();
this.normalizeCoordinates();
this.computeTriangulation();
if (this.constraints.length > 0) {
this.applyConstraints();
this.discardTrianglesViolatingConstraints();
}
this.discardTrianglesWithSuperTriangleVertices();
let triangles: number[] = [];
for (let i = 0; i < this.triangleCount; i++) {
// Add all triangles that don't contain a super-triangle vertex
if (!this.skipTriangle[i]) {
triangles.push(this.triangulation[i][V1]);
triangles.push(this.triangulation[i][V2]);
triangles.push(this.triangulation[i][V3]);
}
}
return triangles;
} | /**
* Calculates the triangulation
* @returns Returns an array containing the indices of the triangles, mapped to the list of points passed in during initialization.
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L83-L111 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.applyConstraints | applyConstraints(): void {
// Map each vertex to a triangle that contains it
this.vertexTriangles = new Array<number>(this.N + 3).fill(0);
for (let i = 0; i < this.triangulation.length; i++) {
this.vertexTriangles[this.triangulation[i][V1]] = i;
this.vertexTriangles[this.triangulation[i][V2]] = i;
this.vertexTriangles[this.triangulation[i][V3]] = i;
}
// Loop through each edge constraint
for (let constraint of this.constraints) {
// Ignore degenerate constraints
if (constraint.v1 === constraint.v2) continue;
// We find the edges of the triangulation that intersect the constraint edge and remove them
// For each intersecting edge, we identify the triangles that share that edge (which form a quad)
// The diagonal of this quad is flipped.
const intersectingEdges = this.findIntersectingEdges(
constraint,
this.vertexTriangles,
);
this.removeIntersectingEdges(constraint, intersectingEdges);
}
} | /**
* Applys the edge constraints to the triangulation
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L116-L139 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.findIntersectingEdges | findIntersectingEdges(
constraint: EdgeConstraint,
vertexTriangles: number[],
): EdgeConstraint[] {
const intersectingEdges: EdgeConstraint[] = [];
// Need to find the first edge that the constraint crosses.
const startEdge = this.findStartingEdge(vertexTriangles, constraint);
if (startEdge) {
intersectingEdges.push(startEdge);
} else {
return intersectingEdges;
}
// Search for all triangles that intersect the constraint. Stop when we find a triangle that contains v_j
let t = startEdge.t1;
let edgeIndex = startEdge.t1Edge;
let lastTriangle = t;
let finalTriangleFound = false;
while (!finalTriangleFound) {
// Cross the last intersecting edge and inspect the next triangle
lastTriangle = t;
t = this.triangulation[t][edgeIndex];
// Get coordinates of constraint end points and triangle vertices
const v_i = this.points[constraint.v1].coords;
const v_j = this.points[constraint.v2].coords;
const v1 = this.points[this.triangulation[t][V1]].coords;
const v2 = this.points[this.triangulation[t][V2]].coords;
const v3 = this.points[this.triangulation[t][V3]].coords;
// If triangle contains the endpoint of the constraint, the search is done
if (this.triangleContainsVertex(t, constraint.v2)) {
finalTriangleFound = true;
// Otherwise, the constraint must intersect one edge of this triangle. Ignore the edge that we entered from
} else if (
this.triangulation[t][E12] !== lastTriangle &&
linesIntersect(v_i, v_j, v1, v2)
) {
edgeIndex = E12;
var edge = new EdgeConstraint(
this.triangulation[t][V1],
this.triangulation[t][V2],
t,
this.triangulation[t][E12],
edgeIndex,
);
intersectingEdges.push(edge);
} else if (
this.triangulation[t][E23] !== lastTriangle &&
linesIntersect(v_i, v_j, v2, v3)
) {
edgeIndex = E23;
var edge = new EdgeConstraint(
this.triangulation[t][V2],
this.triangulation[t][V3],
t,
this.triangulation[t][E23],
edgeIndex,
);
intersectingEdges.push(edge);
} else if (
this.triangulation[t][E31] !== lastTriangle &&
linesIntersect(v_i, v_j, v3, v1)
) {
edgeIndex = E31;
var edge = new EdgeConstraint(
this.triangulation[t][V3],
this.triangulation[t][V1],
t,
this.triangulation[t][E31],
edgeIndex,
);
intersectingEdges.push(edge);
} else {
// Shouldn't reach this point
console.warn("Failed to find final triangle, exiting early.");
break;
}
}
return intersectingEdges;
} | /**
* Searches through the triangulation to find intersecting edges
* @param constraint
* @param vertexTriangles
* @returns Array of edges that are intersecting
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L147-L230 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.findStartingEdge | findStartingEdge(
vertexTriangles: number[],
constraint: EdgeConstraint,
): EdgeConstraint | null {
// Initialize out parameter to default value
let startingEdge = new EdgeConstraint(-1, -1);
let v_i = constraint.v1;
// Start the search with an initial triangle that contains v1
let tSearch = vertexTriangles[v_i];
// Circle v_i until we find a triangle that contains an edge which intersects the constraint edge
// This will be the starting triangle in the search for finding all triangles that intersect the constraint
let noCandidatesFound = false;
let intersectingEdgeIndex: number | null = null;
let tE12: number, tE23: number, tE31: number;
const visited = new Array<boolean>(this.triangulation.length);
while (!intersectingEdgeIndex && !noCandidatesFound) {
visited[tSearch] = true;
// Triangulation already contains the constraint so we ignore the constraint
if (this.triangleContainsConstraint(tSearch, constraint)) {
return null;
}
intersectingEdgeIndex = this.edgeConstraintIntersectsTriangle(
tSearch,
constraint,
);
// Check if the constraint intersects any edges of this triangle
if (intersectingEdgeIndex) {
break;
}
tE12 = this.triangulation[tSearch][E12];
tE23 = this.triangulation[tSearch][E23];
tE31 = this.triangulation[tSearch][E31];
// If constraint does not intersect this triangle, check adjacent
// triangles by crossing edges that have v1 as a vertex
// Avoid triangles that we have previously visited in the search
if (
tE12 !== OUT_OF_BOUNDS &&
!visited[tE12] &&
this.triangleContainsVertex(tE12, v_i)
) {
tSearch = tE12;
} else if (
tE23 !== OUT_OF_BOUNDS &&
!visited[tE23] &&
this.triangleContainsVertex(tE23, v_i)
) {
tSearch = tE23;
} else if (
tE31 !== OUT_OF_BOUNDS &&
!visited[tE31] &&
this.triangleContainsVertex(tE31, v_i)
) {
tSearch = tE31;
} else {
noCandidatesFound = true;
break;
}
}
if (intersectingEdgeIndex) {
const v_k =
this.triangulation[tSearch][this.edgeVertex1[intersectingEdgeIndex]];
const v_l =
this.triangulation[tSearch][this.edgeVertex2[intersectingEdgeIndex]];
const triangle2 = this.triangulation[tSearch][intersectingEdgeIndex];
startingEdge = new EdgeConstraint(
v_k,
v_l,
tSearch,
triangle2,
intersectingEdgeIndex,
);
return startingEdge;
}
return null;
} | /**
* Finds the starting edge for the search to find all edges that intersect the constraint
* @param vertexTriangles
* @param constraint The constraint being used to check for intersections
* @param startingEdge
* @returns
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L239-L324 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.removeIntersectingEdges | removeIntersectingEdges(
constraint: EdgeConstraint,
intersectingEdges: EdgeConstraint[],
): void {
// Remove intersecting edges. Keep track of the new edges that we create
let newEdges: EdgeConstraint[] = [];
let edge: EdgeConstraint | undefined;
// Mark the number of times we have been through the loop. If no new edges
// have been added after all edges have been visited, stop the loop. Every
// time an edge is added to newEdges, reset the counter.
let counter = 0;
// Loop through all intersecting edges until they have been properly resolved
// or they have all been visited with no diagonal swaps.
while (
intersectingEdges.length > 0 &&
counter <= intersectingEdges.length
) {
edge = intersectingEdges.shift()!;
let quad = this.findQuadFromSharedEdge(edge.t1, edge.t1Edge);
if (quad) {
// If the quad is convex, we swap the diagonal (a quad is convex if the diagonals intersect)
// Otherwise push it back into the queue so we can swap the diagonal later on.
if (
linesIntersect(
this.points[quad.q4].coords,
this.points[quad.q3].coords,
this.points[quad.q1].coords,
this.points[quad.q2].coords,
)
) {
// Swap diagonals of the convex quads whose diagonals intersect the constraint
this.swapQuadDiagonal(
quad,
intersectingEdges,
newEdges,
this.constraints,
);
// The new diagonal is between Q3 and Q4
let newEdge = new EdgeConstraint(
quad.q3,
quad.q4,
quad.t1,
quad.t2,
E31,
);
// If the new diagonal still intersects the constraint edge v_i->v_j,
// put back on the list of intersecting eddges
if (
linesIntersect(
this.points[constraint.v1].coords,
this.points[constraint.v2].coords,
this.points[quad.q3].coords,
this.points[quad.q4].coords,
)
) {
intersectingEdges.push(newEdge);
}
// Otherwise record in list of new edges
else {
counter = 0;
newEdges.push(newEdge);
}
} else {
intersectingEdges.push(edge);
}
}
counter++;
}
// If any new edges were formed due to a diagonal being swapped, restore the Delauney condition
// of the triangulation while respecting the constraints
if (newEdges.length > 0) {
this.restoreConstrainedDelauneyTriangulation(constraint, newEdges);
}
} | /// <param name="intersectingEdges">A queue containing the previously found edges that intersect the constraint</param> | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L332-L413 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.restoreConstrainedDelauneyTriangulation | restoreConstrainedDelauneyTriangulation(
constraint: EdgeConstraint,
newEdges: EdgeConstraint[],
): void {
// Iterate over the list of newly created edges and swap
// non-constraint diagonals until no more swaps take place
let swapOccurred = true;
let counter = 0;
while (swapOccurred) {
counter++;
swapOccurred = false;
for (let i = 0; i < newEdges.length; i++) {
const edge = newEdges[i];
// If newly added edge is equal to constraint, we don't want to flip this edge so skip it
if (edge.equals(constraint)) {
continue;
}
let quad = this.findQuadFromSharedEdge(edge.t1, edge.t1Edge);
if (quad) {
if (
this.swapTest(
this.points[quad.q1].coords,
this.points[quad.q2].coords,
this.points[quad.q3].coords,
this.points[quad.q4].coords,
)
) {
this.swapQuadDiagonal(quad, newEdges, this.constraints, null);
// Enqueue the new diagonal
const v_m = quad.q3;
const v_n = quad.q4;
newEdges[i] = new EdgeConstraint(v_m, v_n, quad.t1, quad.t2, E31);
swapOccurred = true;
}
}
}
}
} | /// <param name="newEdges">The list of new edges that were added</param> | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L420-L462 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.discardTrianglesViolatingConstraints | discardTrianglesViolatingConstraints(): void {
// Initialize to all triangles being skipped
this.skipTriangle.fill(true);
function hash(x: number, y: number) {
return ((x + y) * (x + y + 1)) / 2 + y;
}
// Identify the boundary edges
let boundaries = new Set<number>();
for (let i = 0; i < this.constraints.length; i++) {
const constraint = this.constraints[i];
boundaries.add(hash(constraint.v1, constraint.v2));
}
// Search frontier
let frontier: number[] = [];
let v1: number, v2: number, v3: number;
let boundaryE12: boolean, boundaryE23: boolean, boundaryE31: boolean;
const visited = new Array<boolean>(this.triangulation.length);
for (let i = 0; i < this.triangleCount; i++) {
if (visited[i]) continue;
v1 = this.triangulation[i][V1];
v2 = this.triangulation[i][V2];
v3 = this.triangulation[i][V3];
boundaryE12 = boundaries.has(hash(v1, v2));
boundaryE23 = boundaries.has(hash(v2, v3));
boundaryE31 = boundaries.has(hash(v3, v1));
// If this triangle has a boundary edge, start searching for adjacent triangles
if (!(boundaryE12 || boundaryE23 || boundaryE31)) continue;
this.skipTriangle[i] = false;
// Search along edges that are not boundary edges
frontier = [];
if (!boundaryE12) {
frontier.push(this.triangulation[i][E12]);
}
if (!boundaryE23) {
frontier.push(this.triangulation[i][E23]);
}
if (!boundaryE31) {
frontier.push(this.triangulation[i][E31]);
}
// Recursively search along all non-boundary edges, marking the
// adjacent triangles as "keep"
while (frontier.length > 0) {
const k = frontier.shift();
if (!k || k === OUT_OF_BOUNDS || visited[k]) {
continue;
}
this.skipTriangle[k] = false;
visited[k] = true;
v1 = this.triangulation[k][V1];
v2 = this.triangulation[k][V2];
v3 = this.triangulation[k][V3];
// Continue searching along non-boundary edges
if (!boundaries.has(hash(v1, v2))) {
frontier.push(this.triangulation[k][E12]);
}
if (!boundaries.has(hash(v2, v3))) {
frontier.push(this.triangulation[k][E23]);
}
if (!boundaries.has(hash(v3, v1))) {
frontier.push(this.triangulation[k][E31]);
}
}
}
} | /**
* Discards triangles that violate the any of the edge constraints
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L467-L542 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.triangleContainsConstraint | triangleContainsConstraint(t: number, constraint: EdgeConstraint): boolean {
if (t >= this.triangulation.length) return false;
return (
(this.triangulation[t][V1] === constraint.v1 ||
this.triangulation[t][V2] === constraint.v1 ||
this.triangulation[t][V3] === constraint.v1) &&
(this.triangulation[t][V1] === constraint.v2 ||
this.triangulation[t][V2] === constraint.v2 ||
this.triangulation[t][V3] === constraint.v2)
);
} | /// <returns>True if the triangle contains one or both of the endpoints of the constraint</returns> | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L550-L561 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.edgeConstraintIntersectsTriangle | edgeConstraintIntersectsTriangle(
t: number,
constraint: EdgeConstraint,
): number | null {
const v_i = this.points[constraint.v1].coords;
const v_j = this.points[constraint.v2].coords;
const v1 = this.points[this.triangulation[t][V1]].coords;
const v2 = this.points[this.triangulation[t][V2]].coords;
const v3 = this.points[this.triangulation[t][V3]].coords;
if (linesIntersect(v_i, v_j, v1, v2)) {
return E12;
} else if (linesIntersect(v_i, v_j, v2, v3)) {
return E23;
} else if (linesIntersect(v_i, v_j, v3, v1)) {
return E31;
} else {
return null;
}
} | /**
* Returns true if the edge constraint intersects an edge of triangle `t`
* @param t The triangle to test
* @param constraint The edge constraint
* @param intersectingEdgeIndex The index of the intersecting edge (E12, E23, E31)
* @returns Returns true if an intersection is found, otherwise false.
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L570-L589 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.findQuadFromSharedEdge | findQuadFromSharedEdge(t1: number, t1SharedEdge: number): Quad | null {
// q3
// *---------*---------*
// \ / \ /
// \ t2L / \ t2R /
// \ / \ /
// \ / t2 \ /
// q1 *---------* q2
// / \ t1 / \
// / \ / \
// / t1L \ / t1R \
// / \ / \
// *---------*---------*
// q4
let q1: number, q2: number, q3: number, q4: number;
let t1L: number, t1R: number, t2L: number, t2R: number;
// t2 is adjacent to t1 along t1Edge
let t2 = this.triangulation[t1][t1SharedEdge];
let t2SharedEdge = this.findSharedEdge(t2, t1);
if (t2SharedEdge) {
// Get the top 3 vertices of the quad from t2
if (t2SharedEdge === E12) {
q2 = this.triangulation[t2][V1];
q1 = this.triangulation[t2][V2];
q3 = this.triangulation[t2][V3];
} else if (t2SharedEdge === E23) {
q2 = this.triangulation[t2][V2];
q1 = this.triangulation[t2][V3];
q3 = this.triangulation[t2][V1];
} // (t2SharedEdge == E31)
else {
q2 = this.triangulation[t2][V3];
q1 = this.triangulation[t2][V1];
q3 = this.triangulation[t2][V2];
}
// q4 is the point in t1 opposite of the shared edge
q4 = this.triangulation[t1][this.oppositePoint[t1SharedEdge]];
// Get the adjacent triangles to make updating adjacency easier
t1L = this.triangulation[t1][this.previousEdge[t1SharedEdge]];
t1R = this.triangulation[t1][this.nextEdge[t1SharedEdge]];
t2L = this.triangulation[t2][this.nextEdge[t2SharedEdge]];
t2R = this.triangulation[t2][this.previousEdge[t2SharedEdge]];
return new Quad(q1, q2, q3, q4, t1, t2, t1L, t1R, t2L, t2R);
}
return null;
} | /**
*
* @param t1 Base triangle
* @param t1SharedEdge Edge index that is being intersected<
* @returns Returns the quad formed by triangle `t1` and the other triangle that shares the intersecting edge
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L597-L648 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.swapQuadDiagonal | swapQuadDiagonal(
quad: Quad,
edges1: EdgeConstraint[],
edges2: EdgeConstraint[],
edges3: EdgeConstraint[] | null,
): void {
// BEFORE
// q3
// *---------*---------*
// \ / \ /
// \ t2L / \ t2R /
// \ / \ /
// \ / t2 \ /
// q1 *---------* q2
// / \ t1 / \
// / \ / \
// / t1L \ / t1R \
// / \ / \
// *---------*---------*
// q4
// AFTER
// q3
// *---------*---------*
// \ /|\ /
// \ t2L / | \ t2R /
// \ / | \ /
// \ / | \ /
// q1 * t1 | t2 * q2
// / \ | / \
// / \ | / \
// / t1L \ | / t1R \
// / \|/ \
// *---------*---------*
// q4
const t1 = quad.t1;
const t2 = quad.t2;
const t1R = quad.t1R;
const t1L = quad.t1L;
const t2R = quad.t2R;
const t2L = quad.t2L;
// Perform the swap. As always, put the new vertex as the first vertex of the triangle
this.triangulation[t1][V1] = quad.q4;
this.triangulation[t1][V2] = quad.q1;
this.triangulation[t1][V3] = quad.q3;
this.triangulation[t2][V1] = quad.q4;
this.triangulation[t2][V2] = quad.q3;
this.triangulation[t2][V3] = quad.q2;
this.triangulation[t1][E12] = t1L;
this.triangulation[t1][E23] = t2L;
this.triangulation[t1][E31] = t2;
this.triangulation[t2][E12] = t1;
this.triangulation[t2][E23] = t2R;
this.triangulation[t2][E31] = t1R;
// Update adjacency for the adjacent triangles
this.updateAdjacency(t2L, t2, t1);
this.updateAdjacency(t1R, t1, t2);
// Now that triangles have moved, need to update edges as well
this.updateEdgesAfterSwap(edges1, t1, t2, t1L, t1R, t2L, t2R);
this.updateEdgesAfterSwap(edges2, t1, t2, t1L, t1R, t2L, t2R);
this.updateEdgesAfterSwap(edges3, t1, t2, t1L, t1R, t2L, t2R);
// Also need to update the vertexTriangles array since the vertices q1 and q2
// may have been referencing t2/t1 respectively and they are no longer.
this.vertexTriangles[quad.q1] = t1;
this.vertexTriangles[quad.q2] = t2;
} | /**
* Swaps the diagonal of the quadrilateral q0->q1->q2->q3 formed by t1 and t2
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L653-L726 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | ConstrainedTriangulator.updateEdgesAfterSwap | updateEdgesAfterSwap(
edges: EdgeConstraint[] | null,
t1: number,
t2: number,
t1L: number,
t1R: number,
t2L: number,
t2R: number,
) {
if (!edges) {
return;
}
// Update edges to reflect changes in triangles
for (let edge of edges) {
if (edge.t1 === t1 && edge.t2 === t1R) {
edge.t1 = t2;
edge.t2 = t1R;
edge.t1Edge = E31;
} else if (edge.t1 === t1 && edge.t2 === t1L) {
// Triangles stay the same
edge.t1Edge = E12;
} else if (edge.t1 === t1R && edge.t2 === t1) {
edge.t2 = t2;
} else if (edge.t1 === t1L && edge.t2 === t1) {
// Unchanged
} else if (edge.t1 === t2 && edge.t2 === t2R) {
// Triangles stay the same
edge.t1Edge = E23;
} else if (edge.t1 === t2 && edge.t2 === t2L) {
edge.t1 = t1;
edge.t2 = t2L;
edge.t1Edge = E23;
} else if (edge.t1 === t2R && edge.t2 === t2) {
// Unchanged
} else if (edge.t1 === t2L && edge.t2 === t2) {
edge.t2 = t1;
}
}
} | /**
* Update the edges
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/ConstrainedTriangulator.ts#L731-L770 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Triangulator.constructor | constructor(inputPoints: MeshVertex[], normal: Vector3) {
this.N = inputPoints.length;
if (this.N >= 3) {
this.triangleCount = 2 * this.N + 1;
this.triangulation = Array.from({ length: this.triangleCount }, () =>
new Array(6).fill(0),
);
this.skipTriangle = new Array<boolean>(this.triangleCount).fill(false);
this.points = new Array<TriangulationPoint>(this.N + 3); // Extra 3 points used to store super triangle
this.normal = normal.clone().normalize();
// Choose two points in the plane as one basis vector
let e1 = inputPoints[0].position
.clone()
.sub(inputPoints[1].position)
.normalize();
let e2 = this.normal.clone();
let e3 = new Vector3();
e3.crossVectors(e1, e2).normalize();
// To find the 2nd basis vector, find the largest component and swap with the smallest, negating the largest
// Project 3D vertex onto the 2D plane
for (let i = 0; i < this.N; i++) {
var position = inputPoints[i].position;
var coords = new Vector2(position.dot(e1), position.dot(e3));
this.points[i] = new TriangulationPoint(i, coords);
}
} else {
this.triangleCount = 0;
this.triangulation = [];
this.skipTriangle = [];
this.points = [];
this.normal = new Vector3();
}
} | /**
* Initializes the triangulator with the vertex data to be triangulated
*
* @param inputPoints The points to triangulate
* @param normal The normal of the triangulation plane
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/Triangulator.ts#L73-L109 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Triangulator.triangulate | triangulate(): number[] {
// Need at least 3 vertices to triangulate
if (this.N < 3) {
return [];
}
this.addSuperTriangle();
this.normalizeCoordinates();
this.computeTriangulation();
this.discardTrianglesWithSuperTriangleVertices();
const triangles: number[] = [];
for (let i = 0; i < this.triangleCount; i++) {
// Add all triangles that don't contain a super-triangle vertex
if (!this.skipTriangle[i]) {
triangles.push(
this.triangulation[i][V1],
this.triangulation[i][V2],
this.triangulation[i][V3],
);
}
}
return triangles;
} | /**
* Performs the triangulation
*
* @returns Returns an array containing the indices of the triangles, mapped to the list of points passed in during initialization
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/Triangulator.ts#L116-L140 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Triangulator.normalizeCoordinates | normalizeCoordinates() {
// 1) Normalize coordinates. Coordinates are scaled so they lie between 0 and 1
// The scaling should be uniform so relative positions of points are unchanged
let xMin = Number.MAX_VALUE;
let xMax = Number.MIN_VALUE;
let yMin = Number.MAX_VALUE;
let yMax = Number.MIN_VALUE;
// Find min/max points in the set
for (let i = 0; i < this.N; i++) {
xMin = Math.min(xMin, this.points[i].coords.x);
xMax = Math.max(xMax, this.points[i].coords.x);
yMin = Math.min(yMin, this.points[i].coords.y);
yMax = Math.max(yMax, this.points[i].coords.y);
}
// Normalization coefficient. Using same coefficient for both x & y
// ensures uniform scaling
const normalizationScaleFactor = Math.max(xMax - xMin, yMax - yMin);
// Normalize each point
for (let i = 0; i < this.N; i++) {
var point = this.points[i];
var normalizedPos = new Vector2(
(point.coords.x - xMin) / normalizationScaleFactor,
(point.coords.y - yMin) / normalizationScaleFactor,
);
this.points[i].coords = normalizedPos;
}
} | /**
* Uniformly scales the 2D coordinates of all the points between [0, 1]
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/Triangulator.ts#L145-L175 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Triangulator.sortPointsIntoBins | sortPointsIntoBins(): TriangulationPoint[] {
// Compute the number of bins along each axis
const n = Math.round(Math.pow(this.N, 0.25));
// Total bin count
const binCount = n * n;
// Assign bin numbers to each point by taking the normalized coordinates
// and dividing them into a n x n grid.
for (let k = 0; k < this.N; k++) {
var point = this.points[k];
const i = Math.floor(0.99 * n * point.coords.y);
const j = Math.floor(0.99 * n * point.coords.x);
point.bin = BinSort.getBinNumber(i, j, n);
}
return BinSort.sort<TriangulationPoint>(this.points, this.N, binCount);
} | /**
* Sorts the points into bins using an ordered grid
*
* @returns Returns the array of sorted points
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/Triangulator.ts#L182-L199 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
three-pinata | github_2023 | dgreenheck | typescript | Triangulator.computeTriangulation | computeTriangulation() {
let tSearch = 0; // Index of the current triangle being searched
let tLast = 0; // Index of the last triangle formed
let sortedPoints = this.sortPointsIntoBins();
// Loop through each point and insert it into the triangulation
for (let i = 0; i < this.N; i++) {
let point = sortedPoints[i];
// Insert new point into the triangulation. Start by finding the triangle that contains the point `p`
// Keep track of how many triangles we visited in case search fails and we get stuck in a loop
let counter = 0;
let pointInserted = false;
while (!pointInserted) {
if (counter++ > tLast || tSearch === OUT_OF_BOUNDS) {
break;
}
// Get coordinates of triangle vertices
let v1 = this.points[this.triangulation[tSearch][V1]].coords;
let v2 = this.points[this.triangulation[tSearch][V2]].coords;
let v3 = this.points[this.triangulation[tSearch][V3]].coords;
// Verify that point is on the correct side of each edge of the triangle.
// If a point is on the left side of an edge, move to the adjacent triangle and check again. The search
// continues until a containing triangle is found or the point is outside of all triangles
if (!isPointOnRightSideOfLine(v1, v2, point.coords)) {
tSearch = this.triangulation[tSearch][E12];
} else if (!isPointOnRightSideOfLine(v2, v3, point.coords)) {
tSearch = this.triangulation[tSearch][E23];
} else if (!isPointOnRightSideOfLine(v3, v1, point.coords)) {
tSearch = this.triangulation[tSearch][E31];
} else {
this.insertPointIntoTriangle(point, tSearch, tLast);
tLast += 2;
tSearch = tLast;
pointInserted = true;
}
}
}
} | /**
* Computes the triangulation of the point set.
* @returns Returns true if the triangulation was successful.
*/ | https://github.com/dgreenheck/three-pinata/blob/3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0/src/fracture/triangulators/Triangulator.ts#L205-L246 | 3d2f3acbfb4d237a2cee93f4118711aa7c5fa0b0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.