repo_name stringlengths 1 62 | dataset stringclasses 1 value | lang stringclasses 11 values | pr_id int64 1 20.1k | owner stringlengths 2 34 | reviewer stringlengths 2 39 | diff_hunk stringlengths 15 262k | code_review_comment stringlengths 1 99.6k |
|---|---|---|---|---|---|---|---|
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -0,0 +1,191 @@
+import copy
+import logging
+from dataclasses import dataclass
+from typing import Callable, Dict, List, Sequence, Tuple
+
+import torch
+from datasets import load_dataset
+from transformers import PreTrainedModel, PreTrainedTokenizer
+
+from pfl.data import ArtificialFederatedDataset
+from pfl.data.pytorch import PyTorchDataDataset
+from pfl.data.sampling import get_data_sampler
+
+logger = logging.getLogger(__name__)
+
+IGNORE_INDEX = -100
+PROMPT_DICT = {
+ "prompt_input":
+ ("Below is an instruction that describes a task, paired with an input that provides further context. "
+ "Write a response that appropriately completes the request.\n\n"
+ "### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:"
+ ),
+ "prompt_no_input":
+ ("Below is an instruction that describes a task. "
+ "Write a response that appropriately completes the request.\n\n"
+ "### Instruction:\n{instruction}\n\n### Response:"),
+}
+
+
+def _tokenize_alpaca(strings: Sequence[str],
+ tokenizer: PreTrainedTokenizer) -> Dict:
+ """Tokenize a list of strings."""
+ tokenized_list = [
+ tokenizer(
+ text,
+ return_tensors="pt",
+ padding="longest",
+ max_length=tokenizer.model_max_length,
+ truncation=True,
+ ) for text in strings
+ ]
+ input_ids = labels = [
+ tokenized.input_ids[0] for tokenized in tokenized_list
+ ]
+ input_ids_lens = labels_lens = [
+ tokenized.input_ids.ne(tokenizer.pad_token_id).sum().item()
+ for tokenized in tokenized_list
+ ]
+ return {
+ "input_ids": input_ids,
+ "labels": labels,
+ "input_ids_lens": input_ids_lens,
+ "labels_lens": labels_lens,
+ }
+
+
+def preprocess_alpaca(hf_dataset, tokenizer: PreTrainedTokenizer) -> Tuple:
+ """Preprocess the data by tokenizing."""
+ prompt_input, prompt_no_input = PROMPT_DICT["prompt_input"], PROMPT_DICT[
+ "prompt_no_input"]
+ sources = [
+ prompt_input.format_map(example) if example.get("input", "") != "" else
+ prompt_no_input.format_map(example) for example in hf_dataset
+ ]
+ targets = [
+ f"{example['output']}{tokenizer.eos_token}" for example in hf_dataset
+ ]
+
+ examples = [s + t for s, t in zip(sources, targets)]
+ examples_tokenized, sources_tokenized = (_tokenize_alpaca(
+ strings, tokenizer) for strings in (examples, sources))
+ input_ids = examples_tokenized["input_ids"]
+ labels = copy.deepcopy(input_ids)
+ for label, source_len in zip(labels, sources_tokenized["input_ids_lens"]):
+ label[:source_len] = IGNORE_INDEX
+ return input_ids, labels
+
+
+class AlpacaDataset(torch.utils.data.Dataset):
+ """Dataset for supervised fine-tuning."""
+
+ def __init__(self, input_ids: Sequence, labels: Sequence):
+ super().__init__()
+ self.input_ids = input_ids
+ self.labels = labels
+
+ def __len__(self):
+ return len(self.input_ids)
+
+ def __getitem__(self, i) -> Dict[str, torch.Tensor]:
+ return {"input_ids": self.input_ids[i], "labels": self.labels[i]}
+
+
+@dataclass
+class AlpacaDataCollator:
+ """Collate examples for supervised fine-tuning."""
+
+ tokenizer: PreTrainedTokenizer
+
+ def __call__(self, instances: Sequence[Dict]) -> Dict[str, torch.Tensor]:
+ input_ids, labels = tuple([instance[key] for instance in instances]
+ for key in ("input_ids", "labels"))
+ input_ids = torch.nn.utils.rnn.pad_sequence(
+ input_ids,
+ batch_first=True,
+ padding_value=self.tokenizer.pad_token_id)
+ labels = torch.nn.utils.rnn.pad_sequence(labels,
+ batch_first=True,
+ padding_value=IGNORE_INDEX)
+ return {
+ "input_ids": input_ids,
+ "labels": labels,
+ "attention_mask": input_ids.ne(self.tokenizer.pad_token_id),
+ }
+
+
+def make_iid_federated_dataset(input_ids: Sequence, labels: Sequence,
+ tokenizer: PreTrainedTokenizer,
+ user_dataset_len_sampler: Callable):
+ data_collator = AlpacaDataCollator(tokenizer)
+
+ def make_dataset_fn(indices: List):
+ user_input_ids, user_labels = [], []
+ for i in indices:
+ user_input_ids.append(input_ids[i])
+ user_labels.append(labels[i])
+ return PyTorchDataDataset(raw_data=AlpacaDataset(
+ user_input_ids, user_labels),
+ collate_fn=data_collator)
+
+ data_sampler = get_data_sampler("random", max_bound=len(input_ids))
+ return ArtificialFederatedDataset(make_dataset_fn, data_sampler,
+ user_dataset_len_sampler)
+
+
+def smart_tokenizer_and_embedding_resize(
+ num_new_tokens: int,
+ tokenizer: PreTrainedTokenizer,
+ model: PreTrainedModel,
+):
+ """Resize tokenizer and embedding.
+
+ Note: This is the unoptimized version that may make your embedding size not be divisible by 64.
+ """
+ logger.info(f"Resizing model's token embedding to {len(tokenizer)} "
+ f"with {num_new_tokens} new tokens.")
+ model.resize_token_embeddings(len(tokenizer))
+
+ if num_new_tokens > 0:
+ input_embeddings = model.get_input_embeddings().weight.data
+ output_embeddings = model.get_output_embeddings().weight.data
+
+ input_embeddings_avg = input_embeddings[:-num_new_tokens].mean(
+ dim=0, keepdim=True)
+ output_embeddings_avg = output_embeddings[:-num_new_tokens].mean(
+ dim=0, keepdim=True)
+
+ input_embeddings[-num_new_tokens:] = input_embeddings_avg
+ output_embeddings[-num_new_tokens:] = output_embeddings_avg
+
+
+def make_alpaca_iid_federated_datasets(
+ tokenizer: PreTrainedTokenizer,
+ user_dataset_len_sampler: Callable,
+):
+ hf_dataset = load_dataset("tatsu-lab/alpaca")["train"]
+
+ special_tokens_dict = {}
+ if tokenizer.pad_token is None:
+ special_tokens_dict["pad_token"] = "[PAD]" # noqa: S105
+ if tokenizer.eos_token is None:
+ special_tokens_dict["eos_token"] = "</s>" # noqa: S105
+ if tokenizer.bos_token is None:
+ special_tokens_dict["bos_token"] = "<s>" # noqa: S105
+ if tokenizer.unk_token is None:
+ special_tokens_dict["unk_token"] = "<unk>" # noqa: S105
+ num_new_tokens = tokenizer.add_special_tokens(special_tokens_dict)
+
+ input_ids, labels = preprocess_alpaca(hf_dataset, tokenizer)
+ federated_dataset = make_iid_federated_dataset(input_ids, labels,
+ tokenizer,
+ user_dataset_len_sampler)
+
+ def postprocessing_model_fn(model):
+ smart_tokenizer_and_embedding_resize(num_new_tokens, tokenizer, model)
+
+ metadata = {
+ 'num_new_tokens': num_new_tokens,
+ 'postprocessing_model_fn': postprocessing_model_fn,
+ }
+ return federated_dataset, None, None, metadata | federated or central eval dataset? |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -0,0 +1,191 @@
+import copy
+import logging
+from dataclasses import dataclass
+from typing import Callable, Dict, List, Sequence, Tuple
+
+import torch
+from datasets import load_dataset
+from transformers import PreTrainedModel, PreTrainedTokenizer
+
+from pfl.data import ArtificialFederatedDataset
+from pfl.data.pytorch import PyTorchDataDataset
+from pfl.data.sampling import get_data_sampler
+
+logger = logging.getLogger(__name__)
+
+IGNORE_INDEX = -100
+PROMPT_DICT = {
+ "prompt_input":
+ ("Below is an instruction that describes a task, paired with an input that provides further context. "
+ "Write a response that appropriately completes the request.\n\n"
+ "### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:"
+ ),
+ "prompt_no_input":
+ ("Below is an instruction that describes a task. "
+ "Write a response that appropriately completes the request.\n\n"
+ "### Instruction:\n{instruction}\n\n### Response:"),
+}
+
+
+def _tokenize_alpaca(strings: Sequence[str],
+ tokenizer: PreTrainedTokenizer) -> Dict:
+ """Tokenize a list of strings."""
+ tokenized_list = [
+ tokenizer(
+ text,
+ return_tensors="pt",
+ padding="longest",
+ max_length=tokenizer.model_max_length,
+ truncation=True,
+ ) for text in strings
+ ]
+ input_ids = labels = [
+ tokenized.input_ids[0] for tokenized in tokenized_list
+ ]
+ input_ids_lens = labels_lens = [
+ tokenized.input_ids.ne(tokenizer.pad_token_id).sum().item()
+ for tokenized in tokenized_list
+ ]
+ return {
+ "input_ids": input_ids,
+ "labels": labels,
+ "input_ids_lens": input_ids_lens,
+ "labels_lens": labels_lens,
+ }
+
+
+def preprocess_alpaca(hf_dataset, tokenizer: PreTrainedTokenizer) -> Tuple:
+ """Preprocess the data by tokenizing."""
+ prompt_input, prompt_no_input = PROMPT_DICT["prompt_input"], PROMPT_DICT[
+ "prompt_no_input"]
+ sources = [
+ prompt_input.format_map(example) if example.get("input", "") != "" else
+ prompt_no_input.format_map(example) for example in hf_dataset
+ ]
+ targets = [
+ f"{example['output']}{tokenizer.eos_token}" for example in hf_dataset
+ ]
+
+ examples = [s + t for s, t in zip(sources, targets)]
+ examples_tokenized, sources_tokenized = (_tokenize_alpaca(
+ strings, tokenizer) for strings in (examples, sources))
+ input_ids = examples_tokenized["input_ids"]
+ labels = copy.deepcopy(input_ids)
+ for label, source_len in zip(labels, sources_tokenized["input_ids_lens"]):
+ label[:source_len] = IGNORE_INDEX
+ return input_ids, labels
+
+
+class AlpacaDataset(torch.utils.data.Dataset):
+ """Dataset for supervised fine-tuning."""
+
+ def __init__(self, input_ids: Sequence, labels: Sequence):
+ super().__init__()
+ self.input_ids = input_ids
+ self.labels = labels
+
+ def __len__(self):
+ return len(self.input_ids)
+
+ def __getitem__(self, i) -> Dict[str, torch.Tensor]:
+ return {"input_ids": self.input_ids[i], "labels": self.labels[i]}
+
+
+@dataclass
+class AlpacaDataCollator:
+ """Collate examples for supervised fine-tuning."""
+
+ tokenizer: PreTrainedTokenizer
+
+ def __call__(self, instances: Sequence[Dict]) -> Dict[str, torch.Tensor]:
+ input_ids, labels = tuple([instance[key] for instance in instances]
+ for key in ("input_ids", "labels"))
+ input_ids = torch.nn.utils.rnn.pad_sequence(
+ input_ids,
+ batch_first=True,
+ padding_value=self.tokenizer.pad_token_id)
+ labels = torch.nn.utils.rnn.pad_sequence(labels,
+ batch_first=True,
+ padding_value=IGNORE_INDEX)
+ return {
+ "input_ids": input_ids,
+ "labels": labels,
+ "attention_mask": input_ids.ne(self.tokenizer.pad_token_id),
+ }
+
+
+def make_iid_federated_dataset(input_ids: Sequence, labels: Sequence,
+ tokenizer: PreTrainedTokenizer,
+ user_dataset_len_sampler: Callable):
+ data_collator = AlpacaDataCollator(tokenizer)
+
+ def make_dataset_fn(indices: List):
+ user_input_ids, user_labels = [], []
+ for i in indices:
+ user_input_ids.append(input_ids[i])
+ user_labels.append(labels[i])
+ return PyTorchDataDataset(raw_data=AlpacaDataset( | Looks like you already load and preprocess all data before constructing AlpacaDataset. PyTorchFederatedDataset may be much faster? |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -0,0 +1,191 @@
+import copy
+import logging
+from dataclasses import dataclass
+from typing import Callable, Dict, List, Sequence, Tuple
+
+import torch
+from datasets import load_dataset
+from transformers import PreTrainedModel, PreTrainedTokenizer
+
+from pfl.data import ArtificialFederatedDataset
+from pfl.data.pytorch import PyTorchDataDataset
+from pfl.data.sampling import get_data_sampler
+
+logger = logging.getLogger(__name__)
+
+IGNORE_INDEX = -100
+PROMPT_DICT = {
+ "prompt_input":
+ ("Below is an instruction that describes a task, paired with an input that provides further context. "
+ "Write a response that appropriately completes the request.\n\n"
+ "### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:"
+ ),
+ "prompt_no_input":
+ ("Below is an instruction that describes a task. "
+ "Write a response that appropriately completes the request.\n\n"
+ "### Instruction:\n{instruction}\n\n### Response:"),
+}
+
+
+def _tokenize_alpaca(strings: Sequence[str],
+ tokenizer: PreTrainedTokenizer) -> Dict:
+ """Tokenize a list of strings."""
+ tokenized_list = [
+ tokenizer(
+ text,
+ return_tensors="pt",
+ padding="longest",
+ max_length=tokenizer.model_max_length,
+ truncation=True,
+ ) for text in strings
+ ]
+ input_ids = labels = [
+ tokenized.input_ids[0] for tokenized in tokenized_list
+ ]
+ input_ids_lens = labels_lens = [
+ tokenized.input_ids.ne(tokenizer.pad_token_id).sum().item()
+ for tokenized in tokenized_list
+ ]
+ return {
+ "input_ids": input_ids,
+ "labels": labels,
+ "input_ids_lens": input_ids_lens,
+ "labels_lens": labels_lens,
+ }
+
+
+def preprocess_alpaca(hf_dataset, tokenizer: PreTrainedTokenizer) -> Tuple:
+ """Preprocess the data by tokenizing."""
+ prompt_input, prompt_no_input = PROMPT_DICT["prompt_input"], PROMPT_DICT[
+ "prompt_no_input"]
+ sources = [
+ prompt_input.format_map(example) if example.get("input", "") != "" else
+ prompt_no_input.format_map(example) for example in hf_dataset
+ ]
+ targets = [
+ f"{example['output']}{tokenizer.eos_token}" for example in hf_dataset
+ ]
+
+ examples = [s + t for s, t in zip(sources, targets)]
+ examples_tokenized, sources_tokenized = (_tokenize_alpaca(
+ strings, tokenizer) for strings in (examples, sources))
+ input_ids = examples_tokenized["input_ids"]
+ labels = copy.deepcopy(input_ids)
+ for label, source_len in zip(labels, sources_tokenized["input_ids_lens"]):
+ label[:source_len] = IGNORE_INDEX
+ return input_ids, labels
+
+
+class AlpacaDataset(torch.utils.data.Dataset):
+ """Dataset for supervised fine-tuning."""
+
+ def __init__(self, input_ids: Sequence, labels: Sequence):
+ super().__init__()
+ self.input_ids = input_ids
+ self.labels = labels
+
+ def __len__(self):
+ return len(self.input_ids)
+
+ def __getitem__(self, i) -> Dict[str, torch.Tensor]:
+ return {"input_ids": self.input_ids[i], "labels": self.labels[i]}
+
+
+@dataclass
+class AlpacaDataCollator:
+ """Collate examples for supervised fine-tuning."""
+
+ tokenizer: PreTrainedTokenizer
+
+ def __call__(self, instances: Sequence[Dict]) -> Dict[str, torch.Tensor]:
+ input_ids, labels = tuple([instance[key] for instance in instances]
+ for key in ("input_ids", "labels"))
+ input_ids = torch.nn.utils.rnn.pad_sequence(
+ input_ids,
+ batch_first=True,
+ padding_value=self.tokenizer.pad_token_id)
+ labels = torch.nn.utils.rnn.pad_sequence(labels,
+ batch_first=True,
+ padding_value=IGNORE_INDEX)
+ return {
+ "input_ids": input_ids,
+ "labels": labels,
+ "attention_mask": input_ids.ne(self.tokenizer.pad_token_id),
+ }
+
+
+def make_iid_federated_dataset(input_ids: Sequence, labels: Sequence,
+ tokenizer: PreTrainedTokenizer,
+ user_dataset_len_sampler: Callable):
+ data_collator = AlpacaDataCollator(tokenizer)
+
+ def make_dataset_fn(indices: List):
+ user_input_ids, user_labels = [], []
+ for i in indices:
+ user_input_ids.append(input_ids[i])
+ user_labels.append(labels[i])
+ return PyTorchDataDataset(raw_data=AlpacaDataset(
+ user_input_ids, user_labels),
+ collate_fn=data_collator)
+
+ data_sampler = get_data_sampler("random", max_bound=len(input_ids))
+ return ArtificialFederatedDataset(make_dataset_fn, data_sampler, | as discussed, ArtificialFederatedDataset is too easy. It makes FL training as close to central training as possible |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -218,6 +218,12 @@ def add_dnn_training_arguments(argument_parser):
default=None,
help='Batch size for local training of one user.')
+ argument_parser.add_argument( | Should we just rename "central_eval_batch_size" ? I don't see any reason why you would want separate local and central eval batch size. now this is a new argument for all examples, but only used for llm. |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -182,6 +182,7 @@ class NNTrainHyperParams(NNEvalHyperParams):
local_learning_rate: HyperParamClsOrFloat
local_max_grad_norm: Optional[HyperParamClsOrFloat] = None
local_num_steps: Optional[HyperParamClsOrInt] = None
+ grad_accumulation_steps: int = 1 | throw error in tf component somewhere that this is not supported yet |
pfl-research | github_2023 | python | 34 | apple | monachitnis | @@ -0,0 +1,140 @@
+import argparse
+import logging
+
+import numpy as np
+import torch
+import transformers
+from dataset.argument_parsing import add_dataset_arguments, get_datasets
+from model.hugging_face import causal_lm_metrics_fn, wrap_hugging_face_model
+from utils.argument_parsing import (
+ add_algorithm_arguments,
+ add_filepath_arguments,
+ add_seed_arguments,
+ get_algorithm,
+ maybe_inject_arguments_from_config,
+ parse_mechanism,
+)
+from utils.logging import init_logging
+
+from llm.argument_parsing import add_llm_arguments, parse_peft_config | where is this `llm` package from?
Since this is under `benchmarks/llm`, and not under `huggingface` subdir, is this example meant to be general purpose and extensible to other non-HF llms? |
pfl-research | github_2023 | python | 34 | apple | monachitnis | @@ -7,19 +8,35 @@
from pfl.model.pytorch import PyTorchModel
from ..base import FedProxFrameworkBridge
+from .common import clip_norm_and_update, get_train_step_args
def _proximal_train_step(pytorch_model, local_optimizer, raw_data,
- train_kwargs, global_weights, mu):
- local_optimizer.zero_grad()
- loss = pytorch_model.loss(*raw_data, **train_kwargs)
+ train_kwargs, **kwargs):
+ global_weights, mu = kwargs["global_weights"], kwargs["mu"] | why is this part of this PR? |
pfl-research | github_2023 | python | 34 | apple | monachitnis | @@ -305,6 +305,9 @@ def do_multiple_epochs_of(self, user_dataset: AbstractDatasetType,
num_epochs = (1 if train_params.local_num_epochs is None else
train_params.get('local_num_epochs'))
+ assert train_params.grad_accumulation_steps == 1, ( | change seems unrelated |
pfl-research | github_2023 | python | 34 | apple | martin-pelikan-apple | @@ -30,6 +33,43 @@ def _stats_tensors_to_device(item):
return item
+@dataclass
+class TrainStepArgs:
+ # Common args used by different local training algorithms in PyTorch
+ amp_context: Union[torch.amp.autocast, contextlib.AbstractContextManager]
+ grad_accumulation_steps: int
+ grad_scaler: Optional[torch.cuda.amp.GradScaler]
+ max_grad_norm: float | maybe `Optional[float]` since it can be `None`? |
pfl-research | github_2023 | python | 34 | apple | martin-pelikan-apple | @@ -7,19 +8,36 @@
from pfl.model.pytorch import PyTorchModel
from ..base import FedProxFrameworkBridge
+from .common import clip_norm_and_update, get_train_step_args
def _proximal_train_step(pytorch_model, local_optimizer, raw_data,
- train_kwargs, global_weights, mu):
- local_optimizer.zero_grad()
- loss = pytorch_model.loss(*raw_data, **train_kwargs)
+ train_kwargs, **kwargs):
+ global_weights, mu = kwargs["global_weights"], kwargs["mu"]
+ train_step_args = get_train_step_args(**kwargs)
- # Add proximal term (Definition 2)
- for name, param in pytorch_model.named_parameters():
- loss += mu / 2 * torch.norm(param - global_weights[name])**2
+ if train_step_args.optimizer_should_update:
+ local_optimizer.zero_grad()
- loss.backward()
- local_optimizer.step()
+ with train_step_args.amp_context:
+ if isinstance(raw_data, Dict):
+ loss = pytorch_model.loss(**{**raw_data, **train_kwargs})
+ else:
+ loss = pytorch_model.loss(*raw_data, **train_kwargs)
+
+ # Add proximal term (Definition 2)
+ for name, param in pytorch_model.named_parameters():
+ if param.requires_grad:
+ loss += mu / 2 * torch.norm(param - global_weights[name])**2
+
+ loss /= train_step_args.grad_accumulation_steps | I don't understand why we are dividing the loss here. |
pfl-research | github_2023 | python | 34 | apple | martin-pelikan-apple | @@ -284,6 +314,10 @@ def __next__(self):
underlying_data_iterator = (data for (data, _seed) in sampler_1)
seed_iterator = (seed for (_data, seed) in sampler_2)
+ if self._prefetch_factor > 0:
+ # Prefetching so that the following call won't be blocked
+ self._try_set_cohort_size(pytorch_ops.distributed.world_size) | Why do we set the cohort size to world size here? |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -33,6 +35,33 @@ def test_save_and_load_central_optimizer_impl(
lr=1.0)
check_save_and_load_central_optimizer_impl(pytorch_model_setup)
+ @pytest.mark.parametrize('grad_accumulation_steps', [1, 2, 3, 4])
+ def test_grad_accumulation(self, grad_accumulation_steps,
+ pytorch_model_setup, user_dataset):
+ local_learning_rate = 0.1
+ local_num_epochs = 5
+ mock_local_optimizer = torch.optim.SGD(
+ pytorch_model_setup.model._model.parameters(), local_learning_rate)
+ mock_local_optimizer.step = Mock()
+
+ def new_local_optimizer(*args, **kwargs):
+ return mock_local_optimizer
+
+ pytorch_model_setup.model.new_local_optimizer = new_local_optimizer
+ bridges.sgd_bridge().do_sgd(
+ pytorch_model_setup.model, user_dataset,
+ NNTrainHyperParams(
+ local_learning_rate=local_learning_rate,
+ local_num_epochs=local_num_epochs,
+ local_batch_size=1,
+ grad_accumulation_steps=grad_accumulation_steps))
+
+ total_steps = 2 * local_num_epochs
+ expected_optimizer_calls = (
+ total_steps // grad_accumulation_steps +
+ int(total_steps % grad_accumulation_steps != 0))
+ assert mock_local_optimizer.step.call_count == expected_optimizer_calls | Can you also add something like the expected gradient for the first call? Not really testing that they were accumulated here. |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -71,34 +72,73 @@ def metrics(self, x, y, eval=False):
:param central_optimizer:
A torch.optim.optimizer.Optimizer instance, which is used to apply the
central model updates to the variables.
+ :param central_learning_rate_scheduler: | Does this now conflict with https://github.com/apple/pfl-research/blob/main/benchmarks/utils/callback/pytorch.py#L21 ?
should we have both or move this to callback? |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -43,6 +43,35 @@ def get_default_device():
return default_device
+def setup_amp(amp_dtype: Optional[torch.dtype], grad_scaling: bool): | Docstring this |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -224,6 +224,12 @@ def add_dnn_training_arguments(argument_parser):
default=0.1,
help='Learning rate for training on the client.')
+ argument_parser.add_argument(
+ '--local_max_grad_norm', | This means it is a new parameter for all training setups, and we should make sure it is used in all train.py if so.
Otherwise someone might set local_max_grad_norm on cifar example, but nothing happens.
Or you can just add it for the llm example |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -0,0 +1,37 @@
+from typing import Tuple
+
+from transformers import PreTrainedModel, PreTrainedTokenizer
+
+from pfl.callback import ModelCheckpointingCallback
+from pfl.internal.ops import get_ops
+from pfl.metrics import Metrics
+from pfl.model.base import StatefulModel
+
+
+class HuggingFaceModelCheckpointingCallback(ModelCheckpointingCallback): | docstring |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -0,0 +1,176 @@
+"""
+Some of the code is adapted from: https://github.com/tatsu-lab/stanford_alpaca
+"""
+
+import copy
+import logging
+from dataclasses import dataclass
+from typing import Callable, Dict, List, Sequence, Tuple
+
+import torch
+from datasets import load_dataset
+from transformers import PreTrainedTokenizer
+
+from pfl.data.pytorch import PyTorchDataDataset
+from pfl.data.sampling import get_user_sampler
+
+from . import (
+ IGNORE_INDEX,
+ GetItemDataset,
+ HuggingFaceFederatedDataset,
+)
+
+logger = logging.getLogger(__name__)
+
+PROMPT_DICT = {
+ "prompt_input":
+ ("Below is an instruction that describes a task, paired with an input "
+ "that provides further context. "
+ "Write a response that appropriately completes the request.\n\n"
+ "### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:"
+ ),
+ "prompt_no_input":
+ ("Below is an instruction that describes a task. "
+ "Write a response that appropriately completes the request.\n\n"
+ "### Instruction:\n{instruction}\n\n### Response:"),
+}
+
+
+def _tokenize_alpaca(strings: Sequence[str],
+ tokenizer: PreTrainedTokenizer) -> Dict:
+ """Tokenize a list of strings."""
+ tokenized_list = [
+ tokenizer(
+ text,
+ return_tensors="pt",
+ padding="longest",
+ max_length=tokenizer.model_max_length,
+ truncation=True,
+ ) for text in strings
+ ]
+ input_ids = labels = [
+ tokenized.input_ids[0] for tokenized in tokenized_list
+ ]
+ input_ids_lens = labels_lens = [
+ tokenized.input_ids.ne(tokenizer.pad_token_id).sum().item()
+ for tokenized in tokenized_list
+ ]
+ return {
+ "input_ids": input_ids,
+ "labels": labels,
+ "input_ids_lens": input_ids_lens,
+ "labels_lens": labels_lens,
+ }
+
+
+def preprocess_alpaca(hf_dataset, tokenizer: PreTrainedTokenizer) -> Tuple:
+ """Preprocess the data by tokenizing."""
+ prompt_input, prompt_no_input = PROMPT_DICT["prompt_input"], PROMPT_DICT[
+ "prompt_no_input"]
+ sources = [
+ prompt_input.format_map(example) if example.get("input", "") != "" else
+ prompt_no_input.format_map(example) for example in hf_dataset
+ ]
+ targets = [
+ f"{example['output']}{tokenizer.eos_token}" for example in hf_dataset
+ ]
+
+ examples = [s + t for s, t in zip(sources, targets)]
+ examples_tokenized, sources_tokenized = (_tokenize_alpaca(
+ strings, tokenizer) for strings in (examples, sources))
+ input_ids = examples_tokenized["input_ids"]
+ labels = copy.deepcopy(input_ids)
+ for label, source_len in zip(labels, sources_tokenized["input_ids_lens"]):
+ label[:source_len] = IGNORE_INDEX
+ return input_ids, labels
+
+
+@dataclass
+class AlpacaDataCollator:
+ """Collate examples for supervised fine-tuning."""
+
+ tokenizer: PreTrainedTokenizer
+
+ def __call__(self, instances: Sequence[Dict]) -> Dict[str, torch.Tensor]:
+ input_ids, labels = tuple([instance[key] for instance in instances]
+ for key in ("input_ids", "labels"))
+ input_ids = torch.nn.utils.rnn.pad_sequence(
+ input_ids,
+ batch_first=True,
+ padding_value=self.tokenizer.pad_token_id)
+ labels = torch.nn.utils.rnn.pad_sequence(labels,
+ batch_first=True,
+ padding_value=IGNORE_INDEX)
+ return {
+ "input_ids": input_ids,
+ "labels": labels,
+ "attention_mask": input_ids.ne(self.tokenizer.pad_token_id),
+ }
+
+
+def iid_user_partition( | Missing docstrings on public functions |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -0,0 +1,30 @@
+import logging | Missing copyright headers. I thought we had checks for this? |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -0,0 +1,30 @@
+import logging
+from typing import Dict, List, Union
+
+import torch
+
+from pfl.data.pytorch import PyTorchFederatedDataset, PyTorchTensorDataset
+
+IGNORE_INDEX = -100
+logger = logging.getLogger(__name__)
+
+
+class GetItemDataset(torch.utils.data.Dataset):
+ """ Wraps a dataset that has __getitem__. """
+
+ def __init__(self, data: Union[Dict, List]):
+ super().__init__()
+ self.data = data
+
+ def __len__(self):
+ return len(self.data)
+
+ def __getitem__(self, i):
+ return self.data[i]
+
+
+class HuggingFaceFederatedDataset(PyTorchFederatedDataset): | Looks like there is nothing HuggingFace specific here, just that PyTorchFederatedDataset is now hard coded for tuples. will it be possible to just allow PyTorchFederatedDataset to handle dict datasets instead? |
pfl-research | github_2023 | python | 34 | apple | grananqvist | @@ -30,6 +33,88 @@ def _stats_tensors_to_device(item):
return item
+class GradAccumulationState:
+ """ Track gradient accumulation during local training. """
+
+ def __init__(self, train_params: Optional[NNTrainHyperParams],
+ user_data_length: Optional[int]):
+ if train_params is not None and user_data_length is not None:
+ # Get the total number of steps in local training
+ num_epochs = (1 if train_params.local_num_epochs is None else
+ train_params.get('local_num_epochs'))
+ local_batch_size = train_params.get('local_batch_size')
+ if train_params.get('local_num_steps') is not None:
+ num_steps = train_params.get('local_num_steps')
+ else:
+ num_steps = num_epochs
+ if local_batch_size is not None:
+ # Multiply by number of batches per epoch
+ num_steps *= (
+ user_data_length // local_batch_size +
+ int(user_data_length % local_batch_size != 0))
+ self._num_steps = num_steps
+ self._accumulation_steps = train_params.grad_accumulation_steps
+ else:
+ self._num_steps = None
+ self._accumulation_steps = 1
+ self._steps = 0
+
+ @property
+ def optimizer_should_update(self) -> bool:
+ """ Update every `grad_accumulation_steps` or is the last step """
+ return (self._steps % self._accumulation_steps == 0
+ or self._steps == self._num_steps)
+
+ @property
+ def accumulation_steps(self):
+ return self._accumulation_steps
+
+ def increment(self):
+ self._steps += 1
+
+
+@dataclass(frozen=True)
+class TrainStepArgs: | What about initializing this one in model.do_multiple_epochs_of and local step function will have TrainStepArgs as one of its arguments?
This way repeated code in bridges can be avoided and you do not need to remember to input these to do_multiple_epochs_of |
pfl-research | github_2023 | python | 37 | apple | congzheng-song | @@ -92,8 +91,8 @@ def add_mechanism_arguments(argument_parser):
argument_parser.add_argument(
'--local_privacy_mechanism',
choices=[
- 'none', 'gaussian', 'privunit', 'laplace', 'norm_clipping_only',
- 'separated', 'local_dp_separated'
+ 'none', 'gaussian', 'laplace', 'norm_clipping_only', 'separated', | do we still have separated and local_dp_seperated? these are also based on privunit right? |
pfl-research | github_2023 | others | 31 | apple | martin-pelikan-apple | @@ -54,7 +54,7 @@ In most cases, you can also use our setup script available in the ``pfl`` reposi
# arg1: Install for TF
# arg2: Install for PyTorch
# arg3: Install non-Python dependencies on Linux.
- ./pfl-research/build_scripts/install_horovod.sh true true false
+ ./pfl-research/build_scripts/install_horovod.sh "tf pytorch" false | The comment above indicates 3 arguments but there are really 2, with first being tf and/or pytorch. |
pfl-research | github_2023 | others | 20 | apple | martin-pelikan-apple | @@ -33,3 +33,19 @@ weight_by_samples: false
evaluation_frequency: 10
local_batch_size: 10
central_eval_batch_size: 10000
+
+#wandb_project_id: testing
+# This result in all algorithm parameters being added, even though
+# you only select 1 algorithm. Useful for reusing the same config
+# for multiple algorithms.
+add_all_arguments: true | I like this description more than the one in the flag description (with "sweeping"). It wasn't clear to me initially what this did before reading the code. Maybe we can clarify the description of the flag? |
pfl-research | github_2023 | python | 20 | apple | martin-pelikan-apple | @@ -137,19 +140,35 @@ def to_tensor(values: np.ndarray) -> torch.Tensor:
model_eval_params=model_eval_params,
frequency=arguments.evaluation_frequency),
StopwatchCallback(),
- ModelCheckpointingCallback('./checkpoints'),
+ # Uncomment to save central model checkpoints during training.
+ #ModelCheckpointingCallback('./checkpoints'), | Would it make sense to do this as a flag? It's not easy to notice this comment otherwise. Same for the other similar scenarios here. |
pfl-research | github_2023 | python | 20 | apple | congzheng-song | @@ -137,19 +140,35 @@ def to_tensor(values: np.ndarray) -> torch.Tensor:
model_eval_params=model_eval_params,
frequency=arguments.evaluation_frequency),
StopwatchCallback(),
- ModelCheckpointingCallback('./checkpoints'),
+ # Uncomment to save central model checkpoints during training.
+ #ModelCheckpointingCallback('./checkpoints'),
AggregateMetricsToDisk('./metrics.csv'),
CentralLRDecay(arguments.learning_rate,
0.02,
arguments.central_num_iterations,
30,
- linear_warmup=True)
+ linear_warmup=True),
+ TrackBestOverallMetrics(
+ higher_is_better_metric_names=['Central val | macro AP']),
]
if arguments.restore_model_path is not None:
model.load(arguments.restore_model_path)
logger.info(f'Restored model from {arguments.restore_model_path}')
+ callbacks.extend(algorithm_callbacks)
+
+ if arguments.wandb_project_id:
+ assert 'TASK_ID' in os.environ, "Wandb needs a task id" | `TASK_ID` is a bit generic. Is this for Wandb only? maybe `WANDB_TASK_ID`. |
pfl-research | github_2023 | python | 18 | apple | grananqvist | @@ -29,6 +26,7 @@
import h5py
import multiprocess as mp
import numpy as np
+import tensorflow_federated as tff # pytype: disable=import-error | this is not used anymore since #15 . you maybe missed that commit |
pfl-research | github_2023 | python | 18 | apple | grananqvist | @@ -102,7 +100,8 @@ def fetch_client_ids(database_filepath: str,
if split_name == "val":
# heldout is used in the raw sqlite database
split_name = "heldout"
- connection = sqlite3.connect(database_filepath)
+ connection =
+ .connect(database_filepath) | doesn't seem right |
pfl-research | github_2023 | python | 18 | apple | grananqvist | @@ -22,6 +20,7 @@ class Saveable(ABC):
want to be able to resume training after a crash.
"""
+ @abstractmethod | 👍 |
pfl-research | github_2023 | python | 18 | apple | nkrishnaswami | @@ -413,8 +419,9 @@ def parse_mechanism(mechanism_name,
mechanism = NormClippingOnly(order, clipping_bound)
else:
- assert False, "Please specify `mechanism_name`. If you don't want to \
- use any privacy, specify 'none'."
+ raise AssertionError( | Maybe a `ValueError` here, or all of these automated changes? |
pfl-research | github_2023 | python | 18 | apple | nkrishnaswami | @@ -49,7 +47,7 @@ def normal_cdf(x: float) -> LogFloat:
The CDF of a standard normal 𝒩(0,1).
The result is returned as a LogFloat, so that it is particularly accurate
in the left tail.
- """
+ """ # noqa: RUF002 | Since we've (you've :-)) had to `# noqa` it several times, do you want to put 𝒩 (and maybe a couple of Greek letters) into the `allowed_confusables` setting instead? |
pfl-research | github_2023 | python | 18 | apple | nkrishnaswami | @@ -162,13 +164,13 @@ def test_sample_dataset_len(self):
def make(indices):
return Dataset(indices)
- for l in [1, 10]:
- sample_len = lambda: l # pylint: disable=cell-var-from-loop
+ for length in [1, 10]:
+ sample_len = lambda length=length: length # pylint: disable=cell-var-from-loop | I like the longer loop variable name, but do you know why this lambda added a default-valued parameter? |
pfl-research | github_2023 | others | 19 | apple | grananqvist | @@ -0,0 +1,398 @@
+.. _fl_introduction:
+
+Federated learning with pfl
+===========================
+
+Federated learning (FL) allows training models in a distributed
+manner without storing data centrally on a server
+(`Konecny et al., 2015 <https://arxiv.org/abs/1511.03575>`_,
+`Konecny et al., 2016 <https://arxiv.org/abs/1610.02527>`_).
+
+This section discusses cross-device FL and how it can be implemented
+using ``pfl``. The section also provides examples for preparing the
+data and the model, which are important inputs to the algorithms
+themselves. The section does not provide an exhaustive list
+of algorithms implemented in ``pfl`` but rather a few simple examples
+to get started.
+
+For a more complete view, the official benchmarks are available in the ``benchmarks``
+directory, using a variety of realistic dataset-model combinations with
+and without differential privacy.
+
+Cross-device federated learning
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Stochastic gradient descent (SGD) is the standard algorithm
+for training neural networks. In a distributed setting, the
+training data are split between multiple servers in a data
+center that each have a subset of the data, and each server computes the
+gradient of the loss function with respect to the model parameters on
+its own subset of data.
+The sum of the gradients computed by each of the servers is the sum
+of the gradients over the union of the data on those servers.
+The model parameters are then updated by making a step in the
+direction of this gradient.
+
+The federated setting is similar in principle, with a small fraction
+of user devices taking the place of the servers in each iteration.
+However, in the federated setting the communication links are much
+slower, and the data can be unequally distributed amongst devices.
+The standard SGD algorithm in this setting is called `federated SGD`.
+
+`Federated averaging <https://arxiv.org/abs/1602.05629>`_ is a
+generalized form of federated training. Instead of each device computing
+a single gradient, each device performs multiple steps of SGD locally
+on its data, and reports the model differences back to the server.
+The server then averages the model differences from all
+devices in the cohort and uses the average in place of a gradient.
+In practice, `adaptive optimizers <https://arxiv.org/abs/2003.00295>`_
+are often incorporated into the local or central training.
+
+The number of devices participating in each iteration is referred
+to as cohort size (C). C is typically a small fraction of the overall
+population of devices.
+
+For practical and privacy reasons, user devices typically cannot maintain a state
+across FL rounds, although in some FL algorithms, devices are stateful. It is
+often assumed that in practice every user participates in the training at
+most once or once in a relatively long period of time.
+
+While FL on its own provides only limited privacy guarantees
+(`Boenisch et al., 2023 <https://arxiv.org/abs/2112.02918>`_;
+`Carlini et al., 2023 <https://arxiv.org/abs/2202.07646>`_;
+`Kariyappa et al., 2023 <https://arxiv.org/abs/2209.05578>`_),
+it can be combined with differential privacy (DP)
+(`Dwork et al., 2014 <https://www.cis.upenn.edu/~aaroth/Papers/privacybook.pdf>`_)
+and secure aggregation
+(`Bonawitz et al., 2016 <http://arxiv. org/abs/1611.04482>`_;
+`Talwar et al., 2023 <https://arxiv.org/abs/2307.15017>`_)
+to provide strong privacy guarantees for users (or clients) while training
+high quality models (`Abadi et al., 2016 <https://arxiv.org/abs/1607.00133>`_)
+For example, to incorporate user-level differential privacy using
+Gaussian noise, before sending the model differences back to the server, the differences are
+first clipped to make sure that the norm is upper bounded by a given clipping
+bound, and Gaussian noise is then added to each coordinate. The higher the
+noise relative to the clipping bound, the stronger the privacy guarantees.
+The clipped and randomized vector is then sent back to the server instead of
+the raw model differences.
+
+This document provides a high level example of how to initialize the key
+components to get the basic FL simulation running and a few pointers on how
+to change these components.
+
+Preparing data
+^^^^^^^^^^^^^^
+A federated dataset is a collection of smaller datasets that are each associated
+to a unique user. The federated dataset can be defined using :class:`.FederatedDataset`, which
+takes two key parameters: ``make_dataset_fn`` and ``user_sampler``. We discuss these two parameters next.
+
+The first parameter, ``make_dataset_fn``, is a function that returns the data
+of a particular user given the user ID. This is the place where you want to do any preprocessing.
+For example, imagine that there is one file that represents the data from each user:
+
+.. code-block:: console
+
+ $ cat user1.json
+ {"x": [[0, 0], [1, 0], [0, 1], [1, 1]], "y": [0, 0, 0, 1]}
+
+The data loading function in this case can be implemented as follows:
+
+.. code-block:: python
+
+ from pfl.data.dataset import Dataset
+
+ def make_dataset_fn(user_id):
+ data = json.load(open('{}.json'.format(user_id), 'r'))
+ features = np.array(data['x'])
+ labels = np.eye(2)[data['y']] # Make one-hot encodings
+ return Dataset(raw_data=[features, labels])
+
+In the above example, the raw data of the returned ``Dataset`` is a list of two entries. The first entry is the ``x`` argument and the second entry is the ``y`` argument. These arguments must match the ``loss`` and ``metric`` functions of the model.
+
+The expected order of the data inputs for other deep learning frameworks is described in their corresponding :ref:`models`.
+
+The second parameter of :class:`~pfl.data.federated_dataset.FederatedDataset`, ``user_sampler``, should also be a callable, and will return a sampled user identifier every call.
+``pfl`` implements two different sampling functions by default (available from the factory function :func:`~pfl.data.sampling.get_user_sampler`): random and minimize reuse.
+Random sampling generates each cohort with a uniform distribution.
+The minimize-reuse sampler maximizes the time between instances of reuse of the same user (see :class:`~pfl.data.sampling.MinimizeReuseUserSampler`).
+
+Although the random user sampler might seem the obvious choice because the cohorts in live FL deployments are typically
+selected at random, with a limited number of users available for the FL simulation, the minimize-reuse sampling may in fact have a more realistic behavior.
+
+.. code-block:: python
+
+ >>> from pfl.data.sampling import get_user_sampler
+ >>> user_ids = ['user1', 'user2', 'user3']
+ >>> sampler = get_user_sampler('minimize_reuse', user_ids)
+ >>> for _ in range(5):
+ >>> print('sampled ', sampler())
+ 'sampled user1'
+ 'sampled user2'
+ 'sampled user3'
+ 'sampled user1'
+ 'sampled user2'
+
+When you have defined a callable for the parameter ``make_dataset_fn`` and a callable for the parameter ``user_sampler``, the federated dataset can be created.
+
+.. code-block:: python
+
+ dataset = FederatedDataset(make_dataset_fn, sampler)
+
+
+The dataset can be iterated through, sampling a user dataset each call.
+
+.. code-block:: python
+
+ >>> next(dataset).raw_data
+ [array([[0, 0],
+ [1, 0],
+ [0, 1],
+ [1, 1]]),
+ array([[1., 0.],
+ [1., 0.],
+ [1., 0.],
+ [0., 1.]])]
+
+
+For more information on how to prepare datasets and federated datasets,
+please see the tutorial in TODO and benchmarks in TODO.
+
+Defining a model
+^^^^^^^^^^^^^^^^
+
+Below we define a simple PyTorch model that can be used for binary classification with
+10 input features, and it includes binary cross-entropy loss and accuracy metrics. Note that the
+``loss`` and ``metrics`` functions have two arguments, ``x`` and ``y``, which we discussed above
+when defining the dataset.
+
+.. code-block:: python
+
+ import torch
+ from pfl.model.pytorch import PyTorchModel
+
+ class TestModel(torch.nn.Module):
+
+ def __init__(self):
+ super().__init__()
+ self.linear = torch.nn.Linear(10, 1)
+ self.activation = torch.nn.Sigmoid()
+
+ def forward(self, x): # pylint: disable=arguments-differ
+ x = self.linear(x)
+ x = self.activation(x)
+ return x
+
+ def loss(self, x, y, eval=False):
+ self.eval() if eval else self.train()
+ bce_loss = torch.nn.BCELoss(reduction='sum')
+ return bce_loss(self(torch.FloatTensor(x)), torch.FloatTensor(y))
+
+ def metrics(self, x, y):
+ loss_value = self.loss(x, y, eval=True)
+ num_samples = len(y)
+ correct = ((self(x) > 0.5) == y).float().sum()
+ return {
+ 'loss': Weighted(loss_value, num_samples),
+ 'accuracy': Weighted(correct, num_samples)
+ }
+
+ pytorch_model = TestModel()
+ model = PyTorchModel(model=pytorch_model,
+ local_optimizer_create=torch.optim.SGD,
+ central_optimizer=torch.optim.SGD(
+ pytorch_model.parameters(), lr=1.0))
+
+FL algorithms in pfl
+^^^^^^^^^^^^^^^^^^^^
+
+Federated averaging
+"""""""""""""""""""
+To implement cross-device FL with federated averaging using ``pfl``, the key algorithm to use is
+:class:`.FederatedAveraging`:
+
+.. code-block:: python
+
+ from pfl.algorithm.federated_averaging import FederatedAveraging
+
+ algorithm = FederatedAveraging()
+
+Assuming we want to train a neural network, we can proceed by setting the key
+parameters for central and local training, and evaluation:
+
+.. code-block:: python
+
+ algorithm_params = NNAlgorithmParams(
+ central_num_iterations=central_num_epochs,
+ evaluation_frequency=10,
+ train_cohort_size=cohort_size,
+ val_cohort_size=val_cohort_size)
+
+ model_train_params = NNTrainHyperParams(
+ local_num_epochs=local_num_epochs,
+ local_learning_rate=local_learning_rate,
+ local_batch_size=None)
+
+ model_eval_params = NNEvalHyperParams(local_batch_size=None)
+
+Backend simulates an algorithm on the given federated dataset, which
+includes sampling the users, running local training, applying
+privacy mechanisms and applying postprocessors:
+
+.. code-block:: python
+
+ backend = SimulatedBackend(training_data=dataset,
+ val_data=val_dataset,
+ postprocessors=[])
+
+Callbacks can be provided that can be run at various stages of
+the algorithm. In the example shown below, the callbacks enable
+evaluating the model on the central dataset before the training begins
+and between central iterations, and saving aggregate metrics after
+each 100 iterations:
+
+.. code-block:: python
+
+ cb_eval = CentralEvaluationCallback(central_dataset,
+ model_eval_params)
+
+ cb_save = AggregateMetricsToDisk(
+ output_path=output_path,
+ frequency=100,
+ check_existing_file=False,
+ )
+
+The algorithm can then be run:
+
+.. code-block:: python
+
+ algorithm.run(
+ backend=backend,
+ model=model,
+ algorithm_params=algorithm_params,
+ model_train_params=model_train_params,
+ model_eval_params=model_eval_params,
+ callbacks=[cb_eval, cb_save])
+
+.. _Reptile-example:
+
+Reptile: FL with fine-tuning (personalization)
+""""""""""""""""""""""""""""""""""""""""""""""
+
+:class:`.Reptile`
+(`Nichol et al., 2018 <https://arxiv.org/abs/1803.02999>`_)
+combines federated averaging with fine-tuning where the
+model is fine tuned locally on each device prior to evaluation. Therefore,
+compared to traditional federated averaging, the evaluation should focus
+on metrics after running the local training. It is straightforward to switch
+the algorithm to enable fine-tuning (using the same parameters as in federated
+averaging):
+
+.. code-block:: python
+
+ from pfl.algorithm.reptile import Reptile
+
+ reptile = Reptile()
+
+ reptile.run(
+ backend=backend,
+ model=model,
+ algorithm_params=algorithm_params,
+ model_train_params=model_train_params,
+ model_eval_params=model_eval_params,
+ callbacks=[cb_eval, cb_save])
+
+
+.. _GBDT-example:
+
+Gradient Boosted Decision Trees
+"""""""""""""""""""""""""""""""
+
+This section presents an example of using ``pfl`` to train a gradient boosted
+decision tree (GBDT) model with a
+specialized training algorithm. In this case, the algorithm incrementally
+grows the trees.
+
+The parameters for GBDT algorithm are defined using :class:`.GBDTAlgorithmHyperParams`:
+
+.. code-block:: python
+
+ from pfl.tree.federated_gbdt import GBDTAlgorithmHyperParams
+ from pfl.tree.gbdt_model import GBDTModelHyperParams
+
+ gbdt_algorithm_params = GBDTAlgorithmHyperParams(
+ cohort_size=cohort_size,
+ val_cohort_size=val_cohort_size,
+ num_trees=20)
+ model_train_params = GBDTModelHyperParams()
+ model_eval_params = GBDTModelHyperParams()
+
+
+Two versions of GBDT models are implemented:
+:class:`.GBDTModelClassifier` implements GBDT for classification and
+:class:`.GBDTModelRegressor` implements GBDT for regression. Here is
+an example of creating a GBDT classifier model:
+
+.. code-block:: python
+
+ from pfl.tree.gbdt_model import GBDTModelClassifier
+
+ model = GBDTModelClassifier(num_features=num_features, max_depth=3)
+
+To initialize the GBDT training algorithm, it's necessary to provide details
+about the features. The code snippet below provides an example with 100 bool
+features and 10 floating point features from interval [0, 100] with 5
+equidistant boundaries to consider for tree splits:
+
+.. code-block:: python
+
+ from pfl.tree.tree_utils import Feature
+
+ features = []
+ for i in range(100):
+ features.append(Feature(2, (0, 1), bool, 1))
+ for i in range(10):
+ features.append(Feature(1, (0, 100), float, 5, 'equidistant')
+
+ gbdt_algorithm = FederatedGBDT(features=features)
+
+The algorithm can then be run similarly as in other examples:
+
+.. code-block:: python
+
+ gbdt_algorithm.run(algorithm_params=gbdt_algorithm_params,
+ backend=backend,
+ model=model,
+ model_train_params=model_train_params,
+ model_eval_params=model_eval_params,
+ callbacks=[cb_eval, cb_save])
+
+
+Implementing new FL algorithms in pfl
+"""""""""""""""""""""""""""""""""""""
+
+The above examples provide good starting points on how to implement
+new FL algorithms, although simpler versions can often be created
+by focusing on a single framework.
+
+Most new algorithms are likely
+to extend :class:`.FederatedAveraging`.
+If the new algorithm requires
+the users to store states, consider using :class:`.SCAFFOLD` as an example
+of how to initialize and update user states. If the new algorithm
+modifies the loss function (e.g. by adding a regularization term),
+:class:`.FedProx` is a good starting point.
+If the algorithm modifies the training loop in some way, :ref:`Reptile-example`
+provides a good example. Finally, :ref:`GBDT-example`
+provide examples of implementing algorithms that require specialized
+training and evaluation instead of the typical federated averaging.
+
+From FL to PFL: Incorporating Privacy
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+We discussed above that FL on its own does not guarantee privacy, and
+that is why we may want to incorporate differential privacy (DP) into FL.
+Private federated learning (PFL) is simply FL with
+DP, which can in practice be combined with secure aggregation.
+For more information on how to do incorporate DP into FL
+simulations using ``pfl``, please see TODO and benchmarks | TODO (dp tutorial?) |
pfl-research | github_2023 | python | 8 | apple | congzheng-song | @@ -942,6 +943,68 @@ def on_train_end(self, *, model: ModelType) -> None:
self._fp.close()
+class TrackBestOverallMetrics(TrainingProcessCallback): | This callback needs to be the last callback? Is there anyway to enforce that? |
pfl-research | github_2023 | python | 8 | apple | congzheng-song | @@ -942,6 +943,68 @@ def on_train_end(self, *, model: ModelType) -> None:
self._fp.close()
+class TrackBestOverallMetrics(TrainingProcessCallback):
+ """
+ Track the best value of given metrics over all iterations.
+
+ :param lower_is_better_metric_names:
+ A list of metric names to track. Whenever a metric with a name
+ in this list is encountered, the lowest value of that metric
+ seen through the history of all central iterations is returned.
+ :param higher_is_better_metric_names:
+ Same as ``lower_is_better_metric_names``, but for metrics where
+ a higher value is better.
+ """
+
+ def __init__(
+ self,
+ lower_is_better_metric_names: Optional[List[Union[
+ str, StringMetricName]]] = None,
+ higher_is_better_metric_names: Optional[List[Union[
+ str, StringMetricName]]] = None,
+ ):
+ self._lower_is_better_metric_names = lower_is_better_metric_names or []
+ self._higher_is_better_metric_names = higher_is_better_metric_names or []
+ self._init()
+
+ def _init(self):
+ self._best_lower_metrics: Dict = {}
+ self._best_higher_metrics: Dict = {}
+
+ def on_train_begin(self, *, model: ModelType) -> Metrics:
+ self._init() | This is called twice in `__init__` and `on_train_begin`. Are both calls needed? |
pfl-research | github_2023 | python | 8 | apple | martin-pelikan-apple | @@ -942,6 +943,91 @@ def on_train_end(self, *, model: ModelType) -> None:
self._fp.close()
+class TrackBestOverallMetrics(TrainingProcessCallback):
+ """
+ Track the best value of given metrics over all iterations.
+
+ :param lower_is_better_metric_names:
+ A list of metric names to track. Whenever a metric with a name
+ in this list is encountered, the lowest value of that metric
+ seen through the history of all central iterations is returned.
+ :param higher_is_better_metric_names:
+ Same as ``lower_is_better_metric_names``, but for metrics where
+ a higher value is better.
+ :param assert_metrics_found_within_frequency:
+ As a precaution, assert that all metrics referenced in
+ ``lower_is_better_metric_names`` and
+ ``higher_is_better_metric_names`` are found within this many
+ iterations. If you e.g. misspelled a metric name or put this
+ callback an order before the metric was generated, you will be
+ notified.
+ """
+
+ def __init__(self,
+ lower_is_better_metric_names: Optional[List[Union[
+ str, StringMetricName]]] = None,
+ higher_is_better_metric_names: Optional[List[Union[
+ str, StringMetricName]]] = None,
+ assert_metrics_found_within_frequency: int = 25):
+ self._lower_is_better_metric_names = lower_is_better_metric_names or []
+ self._higher_is_better_metric_names = higher_is_better_metric_names or []
+ self._assert_metrics_found_within_frequency = assert_metrics_found_within_frequency
+ self._init()
+
+ def _init(self):
+ self._best_lower_metrics: Dict = {}
+ self._best_higher_metrics: Dict = {}
+ self._found_metric_at_iteration = {
+ k: 0
+ for k in self._lower_is_better_metric_names +
+ self._higher_is_better_metric_names
+ }
+
+ def on_train_begin(self, *, model: ModelType) -> Metrics:
+ self._init()
+ return Metrics()
+
+ def _get_name_with_postfix(self,
+ original_metric_name: Union[str,
+ StringMetricName]):
+ if isinstance(original_metric_name, str):
+ original_metric_name = StringMetricName(original_metric_name)
+ return MetricNamePostfix(original_metric_name, 'best overall')
+
+ def after_central_iteration(
+ self, aggregate_metrics: Metrics, model: ModelType, *,
+ central_iteration: int) -> Tuple[bool, Metrics]:
+
+ best_overall_metrics = Metrics()
+ for (metric_names,
+ cmp_op) in [(self._lower_is_better_metric_names, min),
+ (self._higher_is_better_metric_names, max)]:
+ for k in metric_names:
+ if k in aggregate_metrics:
+ self._found_metric_at_iteration[k] = central_iteration
+ new_value = get_overall_value(aggregate_metrics[k])
+ if k not in self._best_lower_metrics:
+ self._best_lower_metrics[k] = new_value
+ else:
+ self._best_lower_metrics[k] = cmp_op(
+ self._best_lower_metrics[k], new_value)
+ # This will report best overall metrics at same frequency
+ # as the underlying metric values are appearing.
+ best_overall_metrics[self._get_name_with_postfix(
+ k)] = self._best_lower_metrics[k]
+ else:
+ if (central_iteration | Maybe it would be helpful to log a warning that the metric wasn't found regardless of assert_metrics_found_within_frequency? |
swift-embedded-examples | github_2023 | others | 67 | apple | rauhul | @@ -0,0 +1,31 @@
+name: Build Raspberry Pi Baremetal Examples
+
+on:
+ push:
+ branches: ["main"]
+ pull_request:
+ branches: ["main"]
+ schedule:
+ # Build on Mondays at 9am PST every week
+ - cron: '0 17 * * 1'
+
+jobs:
+ build-pico-sdk: | can you update this name |
swift-embedded-examples | github_2023 | others | 67 | apple | rauhul | @@ -0,0 +1,31 @@
+name: Build Raspberry Pi Baremetal Examples
+
+on:
+ push:
+ branches: ["main"]
+ pull_request:
+ branches: ["main"]
+ schedule:
+ # Build on Mondays at 9am PST every week
+ - cron: '0 17 * * 1'
+
+jobs:
+ build-pico-sdk:
+ runs-on: ubuntu-22.04
+ container: swiftlang/swift:nightly-main-jammy | Can you use a pinned version of swifts, see the other workflows for examples |
swift-embedded-examples | github_2023 | others | 73 | apple | rauhul | @@ -0,0 +1,36 @@
+{
+ "version": 1,
+ "author": "Juraj Michálek", | love the contribution, but could we exclude individual names? (in both json files) |
swift-embedded-examples | github_2023 | others | 37 | apple | rauhul | @@ -37,3 +37,24 @@ $ idf.py flash
```
- The LED should be blinking now.
+## Simulating
+
+The project can be simulated using Wokwi Simulator in [IDE](https://docs.wokwi.com/vscode/getting-started).
+
+- Build the UF2 image recognized by the simulator:
+
+```console
+idf.py uf2
+```
+
+- Open the directory with the project in the IDE.
+
+- Start the simulator.
+
+Alternatively the project can be launched with pre-built binary in web browser:
+
+[](https://wokwi.com/experimental/viewer?diagram=https://raw.githubusercontent.com/georgik/swift-embedded-examples/feature/wokwi/esp32-led-blink-sdk/diagram.json&firmware=https://github.com/georgik/swift-embedded-examples/releases/download/v0.1/embedded-swift-esp32-c6-led-blink-sdk.uf2.bin | Can we update these paths to use URLs in-tree? |
swift-embedded-examples | github_2023 | others | 63 | apple | rauhul | @@ -12,22 +12,85 @@ endif()
set(SWIFT_TARGET "armv6m-none-none-eabi") # default for rp2040
+list(APPEND CLANG_ARCH_ABI_FLAGS "-Xcc")
if(PICO_PLATFORM STREQUAL "rp2350-arm-s")
message(STATUS "PICO_PLATFORM is set to rp2350-arm-s, using armv7em")
set(SWIFT_TARGET "armv7em-none-none-eabi")
+ list(APPEND CLANG_ARCH_ABI_FLAGS "-mfloat-abi=soft") | I'd prefer if you factored the above `list(APPEND CLANG_ARCH_ABI_FLAGS "-Xcc")` back into each of these lines |
swift-embedded-examples | github_2023 | others | 63 | apple | etcwilde | @@ -10,24 +10,85 @@ else()
execute_process(COMMAND which swiftc OUTPUT_VARIABLE SWIFTC OUTPUT_STRIP_TRAILING_WHITESPACE)
endif()
-
set(SWIFT_TARGET "armv6m-none-none-eabi") # default for rp2040
if(PICO_PLATFORM STREQUAL "rp2350-arm-s")
message(STATUS "PICO_PLATFORM is set to rp2350-arm-s, using armv7em")
set(SWIFT_TARGET "armv7em-none-none-eabi")
+ list(APPEND CLANG_ARCH_ABI_FLAGS "-Xcc" "-mfloat-abi=soft")
+elseif(PICO_PLATFORM STREQUAL "rp2040")
+ message(STATUS "PICO_PLATFORM is set to RP2040, using armv6m")
+ list(APPEND CLANG_ARCH_ABI_FLAGS "-Xcc" "-mfloat-abi=soft")
elseif(PICO_PLATFORM STREQUAL "rp2350-riscv")
- # Untested, gives PICO-SDK errors when building
- message(WARNING "PICO_PLATFORM is set to rp2350-riscv, using riscv32 (untested). It is recommended to use rp2350-arm-s.")
+ message(STATUS "PICO_PLATFORM is set to rp2350-riscv, using riscv32.")
set(SWIFT_TARGET "riscv32-none-none-eabi")
+ list(APPEND CLANG_ARCH_ABI_FLAGS "-Xcc" "-march=rv32imac_zicsr_zifencei_zba_zbb_zbs_zbkb" "-Xcc" "-mabi=ilp32")
endif()
add_executable(swift-blinky)
+
+target_link_libraries(swift-blinky
+ pico_stdlib hardware_uart hardware_gpio
+)
+
+# Gather compile definitions from all dependencies
+
+set_property(GLOBAL PROPERTY visited_targets "")
+set_property(GLOBAL PROPERTY compilerdefs_list "")
+
+function(gather_compile_definitions_recursive target)
+ # Get the current value of visited_targets
+ get_property(visited_targets GLOBAL PROPERTY visited_targets)
+
+ # make sure we don't visit the same target twice
+ # and that we don't visit the special generator expressions
+ if (${target} MATCHES "\\$<" OR ${target} MATCHES "::@" OR ${target} IN_LIST visited_targets)
+ return()
+ endif()
+
+ # Append the target to visited_targets
+ list(APPEND visited_targets ${target})
+ set_property(GLOBAL PROPERTY visited_targets "${visited_targets}")
+
+ # Get the current value of compilerdefs_list
+ get_property(compilerdefs_list GLOBAL PROPERTY compilerdefs_list)
+
+ get_target_property(target_definitions ${target} INTERFACE_COMPILE_DEFINITIONS)
+ if (target_definitions)
+ # Append the target definitions to compilerdefs_list
+ list(APPEND compilerdefs_list ${target_definitions})
+ set_property(GLOBAL PROPERTY compilerdefs_list "${compilerdefs_list}")
+ endif()
+
+ get_target_property(target_linked_libs ${target} INTERFACE_LINK_LIBRARIES)
+ if (target_linked_libs)
+ foreach(linked_target ${target_linked_libs})
+ # Recursively gather compile definitions from dependencies
+ gather_compile_definitions_recursive(${linked_target})
+ endforeach()
+ endif()
+endfunction()
+
+gather_compile_definitions_recursive(swift-blinky)
+get_property(COMPILE_DEFINITIONS GLOBAL PROPERTY compilerdefs_list)
+
+# Parse compiler definitions into a format that swiftc can understand
+list(REMOVE_DUPLICATES COMPILE_DEFINITIONS)
+list(PREPEND COMPILE_DEFINITIONS "") # -Xcc -D
+string(REPLACE "$<TARGET_PROPERTY:PICO_TARGET_BINARY_TYPE>" "$<TARGET_PROPERTY:swift-blinky,PICO_TARGET_BINARY_TYPE>" COMPILE_DEFINITIONS "${COMPILE_DEFINITIONS}")
+string(REPLACE ";" " -Xcc -D" COMPILE_DEFINITIONS "${COMPILE_DEFINITIONS}")
+
+# Write the compiler definitions to a file (this way CMake will evaluate the compiler definitions)
+file(GENERATE OUTPUT ${CMAKE_BINARY_DIR}/swiftc_flags.txt CONTENT "${COMPILE_DEFINITIONS}") | Shouldn't need this. The `COMMAND` in a custom command expands generator expressions inline.
> Arguments to COMMAND may use [generator expressions](https://cmake.org/cmake/help/latest/manual/cmake-generator-expressions.7.html#manual:cmake-generator-expressions(7)).
https://cmake.org/cmake/help/latest/command/add_custom_command.html |
swift-embedded-examples | github_2023 | others | 63 | apple | etcwilde | @@ -11,11 +11,65 @@ execute_process(COMMAND which swiftc OUTPUT_VARIABLE SWIFTC OUTPUT_STRIP_TRAILIN
endif()
add_executable(swift-blinky)
+target_link_libraries(swift-blinky
+ pico_stdlib hardware_uart hardware_gpio pico_lwip_arch pico_cyw43_arch_none
+)
+
+# Gather compile definitions from all dependencies
+set_property(GLOBAL PROPERTY visited_targets "")
+set_property(GLOBAL PROPERTY compilerdefs_list "")
+
+function(gather_compile_definitions_recursive target)
+ # Get the current value of visited_targets
+ get_property(visited_targets GLOBAL PROPERTY visited_targets)
+
+ # make sure we don't visit the same target twice
+ # and that we don't visit the special generator expressions
+ if (${target} MATCHES "\\$<" OR ${target} MATCHES "::@" OR ${target} IN_LIST visited_targets)
+ return()
+ endif()
+
+ # Append the target to visited_targets
+ list(APPEND visited_targets ${target})
+ set_property(GLOBAL PROPERTY visited_targets "${visited_targets}")
+
+ # Get the current value of compilerdefs_list
+ get_property(compilerdefs_list GLOBAL PROPERTY compilerdefs_list)
+
+ get_target_property(target_definitions ${target} INTERFACE_COMPILE_DEFINITIONS)
+ if (target_definitions)
+ # Append the target definitions to compilerdefs_list
+ list(APPEND compilerdefs_list ${target_definitions})
+ set_property(GLOBAL PROPERTY compilerdefs_list "${compilerdefs_list}")
+ endif()
+
+ get_target_property(target_linked_libs ${target} INTERFACE_LINK_LIBRARIES)
+ if (target_linked_libs)
+ foreach(linked_target ${target_linked_libs})
+ # Recursively gather compile definitions from dependencies
+ gather_compile_definitions_recursive(${linked_target})
+ endforeach()
+ endif()
+endfunction()
+
+gather_compile_definitions_recursive(swift-blinky)
+get_property(COMPILE_DEFINITIONS GLOBAL PROPERTY compilerdefs_list)
+
+# Parse compiler definitions into a format that swiftc can understand
+list(REMOVE_DUPLICATES COMPILE_DEFINITIONS)
+list(PREPEND COMPILE_DEFINITIONS "") # -Xcc -D
+string(REPLACE "$<TARGET_PROPERTY:PICO_TARGET_BINARY_TYPE>" "$<TARGET_PROPERTY:swift-blinky,PICO_TARGET_BINARY_TYPE>" COMPILE_DEFINITIONS "${COMPILE_DEFINITIONS}")
+string(REPLACE ";" " -Xcc -D" COMPILE_DEFINITIONS "${COMPILE_DEFINITIONS}")
+
+# Write the compiler definitions to a file (this way CMake will evaluate the compiler definitions)
+file(GENERATE OUTPUT ${CMAKE_BINARY_DIR}/swiftc_flags.txt CONTENT "${COMPILE_DEFINITIONS}") | The `COMMAND` in a custom command expands generator expressions inline. |
swift-embedded-examples | github_2023 | others | 39 | apple | kubamracek | @@ -10,10 +10,22 @@
- Connect the Pico board via a USB cable to your Mac, and make sure it's in the USB Mass Storage firmware upload mode (either hold the BOOTSEL button while plugging the board, or make sure your Flash memory doesn't contain any valid firmware).
- Make sure you have a recent nightly Swift toolchain that has Embedded Swift support.
+- Before building, you need to install the python dependencies (ideally in a virtual environment):
+
+```console
+cd swift-embedded-examples
+mkdir pyenv
+cd pyenv
+python3 -m venv .
+source bin/activate
+cd ../Tools
+pip3 install -r requirements.txt
+```
+
- Build and copy the program in the UF2 format to the Mass Storage device to trigger flashing the program into memory (after which the device will reboot and run the firmware):
``` console
$ cd pico-blink
-$ TOOLCHAINS='<toolchain-name>' ./build.sh
+$ TOOLCHAINS='<toolchain-name>' ./build.sh # toolchain-name is typically `swift` | I don't think is actually true. I have multiple toolchains installed and if I set TOOLCHAINS=swift, I do *not* get the latest one chosen.
But point taken that the instructions are not really clear on what the TOOLCHAINS variable should be set to... |
swift-embedded-examples | github_2023 | others | 51 | apple | kubamracek | @@ -6,3 +6,4 @@
*/sdkconfig.old
*/managed_components
*/dependencies.lock
+nrfx-blink-sdk | this probably shouldn't be here, right? |
swift-embedded-examples | github_2023 | others | 34 | apple | rauhul | @@ -21,15 +21,15 @@ This repository is a set of demonstration projects of **Embedded Swift**. Embedd
| Name | Platform | Description | Photo |
| ---- | -------- | ----------- | ----- |
-| [stm32-blink](./stm32-blink) | STM32F746G-DISCO | Baremetal program that blinks an LED repeatedly. Does not use any vendor SDKs or external toolchains. | <img width="300" src="https://raw.githubusercontent.com/kubamracek/swift-evolution/branch/assets/stm32-blink.jpg"> |
-| [stm32-lcd-logo](./stm32-lcd-logo) | STM32F746G-DISCO | Sets up layers on the built-in LCD and animates them in a loop. Does not use any vendor SDKs or external toolchains. | <img width="300" src="https://raw.githubusercontent.com/kubamracek/swift-evolution/branch/assets/stm32-lcd-logo.jpg"> |
-| [stm32-neopixel](./stm32-neopixel) | STM32F746G-DISCO | Uses SPI to program a NeoPixel LED strip. Does not use any vendor SDKs or external toolchains. | <img width="300" src="https://raw.githubusercontent.com/kubamracek/swift-evolution/branch/assets/stm32-led.jpg"> |
-| [stm32-uart-echo](./stm32-uart-echo) | STM32F746G-DISCO | Uses UART to implement a simple "echo" firmware. Does not use any vendor SDKs or external toolchains. | <img width="300" src="https://raw.githubusercontent.com/kubamracek/swift-evolution/branch/assets/stm32-uart-echo.png">|
-| [pico-blink](./pico-blink) | Raspberry Pi Pico | Baremetal program that blinks an LED repeatedly. Does not use any vendor SDKs or external toolchains. | <img width="300" src="https://raw.githubusercontent.com/kubamracek/swift-evolution/branch/assets/pico-blink.jpg"> |
-| [pico-blink-sdk](./pico-blink-sdk) | Raspberry Pi Pico | Baremetal program that blinks an LED repeatedly. Demonstrates how to use code and libraries from the Pico SDK and add Swift code on top of it. | <img width="300" src="https://raw.githubusercontent.com/kubamracek/swift-evolution/branch/assets/pico-blink-sdl.jpg"> |
+| [stm32-blink](./stm32-blink) | STM32F746G-DISCO | Baremetal program that blinks an LED repeatedly. Does not use any vendor SDKs or external toolchains. | <img width="300" src="https://github.com/apple/swift-embedded-examples/assets/1186214/739e98fd-a438-4a64-a7aa-9dddee25034b"> | | I didn't know you could reference images like this. I _think_ I would prefer the images actually be in the repo for the sake of ease of updating, but this solution is nice because the clone size isn't affected. |
swift-embedded-examples | github_2023 | others | 25 | apple | rauhul | @@ -1,8 +1,28 @@
+ | Could we drop this whitespace? |
swift-embedded-examples | github_2023 | others | 25 | apple | rauhul | @@ -11,6 +31,8 @@ add_custom_command(
${SWIFTC}
-target riscv32-none-none-eabi
-Xfrontend -function-sections -enable-experimental-feature Embedded -wmo -parse-as-library -Osize
+ -Xcc -march=${march_flag} -Xcc -mabi=${mabi_flag}
+ -Xlinker -march=${march_flag} -Xlinker -mabi=${mabi_flag} | Do you mind explaining why the linker also needs this information? |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,3 @@
+cmake_minimum_required(VERSION 3.16) | Could you increase this to 3.29 and ensure everything still works? |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,39 @@
+# esp32-led-strip-sdk | I think this name is copy-pasted, could you update it? |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,39 @@
+# esp32-led-strip-sdk
+
+This example demonstrates how to integrate with the ESP-IDF SDK via CMake and how to use the standard GPIO library to control LED from Swift. This example is specifically made for the RISC-V MCUs from ESP32 (the Xtensa MCUs are not currently supported by Swift).
+
+ | These links appear to point out of tree, could you fix them? |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,39 @@
+# esp32-led-strip-sdk
+
+This example demonstrates how to integrate with the ESP-IDF SDK via CMake and how to use the standard GPIO library to control LED from Swift. This example is specifically made for the RISC-V MCUs from ESP32 (the Xtensa MCUs are not currently supported by Swift).
+
+
+
+
+## Requirements
+
+- Set up the [ESP-IDF](https://docs.espressif.com/projects/esp-idf/en/stable/esp32/) development environment. Follow the steps in the [ESP32-C6 "Get Started" guide](https://docs.espressif.com/projects/esp-idf/en/v5.2/esp32c6/get-started/index.html).
+ - Make sure you specifically set up development for the RISC-V ESP32-C6, and not the Xtensa based products.
+
+- Before trying to use Swift with the ESP-IDF SDK, make sure your environment works and can build the provided C/C++ sample projects, in particular:
+ - Try building and running the "get-started/blink" example from ESP-IDF written in C.
+
+## Building
+
+- Make sure you have a recent nightly Swift toolchain that has Embedded Swift support.
+- If needed, run export.sh to get access to the idf.py script from ESP-IDF.
+- Specify the nightly toolchain to be used via the `TOOLCHAINS` environment variable and the target board type by using `idf.py set-target`.
+``` console
+$ cd esp32-led-strip-sdk | copied folder name |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,39 @@
+# esp32-led-strip-sdk
+
+This example demonstrates how to integrate with the ESP-IDF SDK via CMake and how to use the standard GPIO library to control LED from Swift. This example is specifically made for the RISC-V MCUs from ESP32 (the Xtensa MCUs are not currently supported by Swift).
+
+
+
+
+## Requirements
+
+- Set up the [ESP-IDF](https://docs.espressif.com/projects/esp-idf/en/stable/esp32/) development environment. Follow the steps in the [ESP32-C6 "Get Started" guide](https://docs.espressif.com/projects/esp-idf/en/v5.2/esp32c6/get-started/index.html).
+ - Make sure you specifically set up development for the RISC-V ESP32-C6, and not the Xtensa based products.
+
+- Before trying to use Swift with the ESP-IDF SDK, make sure your environment works and can build the provided C/C++ sample projects, in particular:
+ - Try building and running the "get-started/blink" example from ESP-IDF written in C.
+
+## Building
+
+- Make sure you have a recent nightly Swift toolchain that has Embedded Swift support.
+- If needed, run export.sh to get access to the idf.py script from ESP-IDF.
+- Specify the nightly toolchain to be used via the `TOOLCHAINS` environment variable and the target board type by using `idf.py set-target`.
+``` console
+$ cd esp32-led-strip-sdk
+$ export TOOLCHAINS=...
+$ . <path-to-esp-idf>/export.sh
+$ idf.py set-target esp32c6
+$ idf.py build
+```
+
+## Running
+
+- Connect the Esp32-C6-Bug board(Or any other board with integrated LED on GPIO pin 8) over a USB cable to your Mac. Alternatively you can just connect external LED to GPIO pin 8 on any other board. | could you adjust the formatting here slightly: "...board (or any..." |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,31 @@
+//===----------------------------------------------------------------------===//
+//
+// This source file is part of the Swift open source project
+//
+// Modified by Karavaev Aleksei | Please remove this line all files should follow the same header |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,27 @@
+//===----------------------------------------------------------------------===//
+//
+// This source file is part of the Swift open source project
+//
+// Modified by Karavaev Aleksei | same here |
swift-embedded-examples | github_2023 | cpp | 21 | apple | rauhul | @@ -0,0 +1,17 @@
+//===----------------------------------------------------------------------===//
+//
+// This source file is part of the Swift open source project
+//
+// Copyright (c) 2023 Apple Inc. and the Swift project authors. | Could you make sure to use 2024 as the copyright date on these new additions |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,31 @@
+//===----------------------------------------------------------------------===//
+//
+// This source file is part of the Swift open source project
+//
+// Modified by Karavaev Aleksei
+// Copyright (c) 2023 Apple Inc. and the Swift project authors.
+// Licensed under Apache License v2.0 with Runtime Library Exception
+//
+// See https://swift.org/LICENSE.txt for license information
+//
+//===----------------------------------------------------------------------===//
+
+// A simple "overlay" to provide nicer APIs in Swift
+struct Led {
+ var ledPin: gpio_num_t
+ init(gpioPin: Int) {
+ ledPin = gpio_num_t(Int32(gpioPin))
+
+ guard gpio_reset_pin(ledPin) == ESP_OK else {
+ fatalError("cannot reset led")
+ }
+
+ guard gpio_set_direction(ledPin,GPIO_MODE_OUTPUT) == ESP_OK else { | nit: add a space after the comma |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,27 @@
+//===----------------------------------------------------------------------===//
+//
+// This source file is part of the Swift open source project
+//
+// Modified by Karavaev Aleksei
+// Copyright (c) 2023 Apple Inc. and the Swift project authors.
+// Licensed under Apache License v2.0 with Runtime Library Exception
+//
+// See https://swift.org/LICENSE.txt for license information
+//
+//===----------------------------------------------------------------------===//
+
+// The code will blink a led attached to GPIO8, to change the led pin change the Led(gpioPin: 8) to something else | nit: wrap to 80 characters |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,39 @@
+# esp32-led-strip-sdk
+
+This example demonstrates how to integrate with the ESP-IDF SDK via CMake and how to use the standard GPIO library to control LED from Swift. This example is specifically made for the RISC-V MCUs from ESP32 (the Xtensa MCUs are not currently supported by Swift).
+
+
+
+
+## Requirements
+
+- Set up the [ESP-IDF](https://docs.espressif.com/projects/esp-idf/en/stable/esp32/) development environment. Follow the steps in the [ESP32-C6 "Get Started" guide](https://docs.espressif.com/projects/esp-idf/en/v5.2/esp32c6/get-started/index.html).
+ - Make sure you specifically set up development for the RISC-V ESP32-C6, and not the Xtensa based products.
+
+- Before trying to use Swift with the ESP-IDF SDK, make sure your environment works and can build the provided C/C++ sample projects, in particular:
+ - Try building and running the "get-started/blink" example from ESP-IDF written in C.
+
+## Building
+
+- Make sure you have a recent nightly Swift toolchain that has Embedded Swift support.
+- If needed, run export.sh to get access to the idf.py script from ESP-IDF.
+- Specify the nightly toolchain to be used via the `TOOLCHAINS` environment variable and the target board type by using `idf.py set-target`.
+``` console
+$ cd esp32-led-strip-sdk
+$ export TOOLCHAINS=...
+$ . <path-to-esp-idf>/export.sh
+$ idf.py set-target esp32c6
+$ idf.py build
+```
+
+## Running
+
+- Connect the Esp32-C6-Bug board(Or any other board with integrated LED on GPIO pin 8) over a USB cable to your Mac. Alternatively you can just connect external LED to GPIO pin 8 on any other board.
+- Connect RX pin of USB-UART converter to TX0 pin of your board if you need serial ouput.(You may also need to connect GND converter pin to the GND pin of the board) | same here could you add a space after output and remove the parens "...output. You may..." |
swift-embedded-examples | github_2023 | others | 21 | apple | rauhul | @@ -0,0 +1,39 @@
+# esp32-led-blink-sdk
+
+This example demonstrates how to integrate with the ESP-IDF SDK via CMake and how to use the standard GPIO library to control LED from Swift. This example is specifically made for the RISC-V MCUs from ESP32 (the Xtensa MCUs are not currently supported by Swift).
+
+ | last nit: could you change this path to `assets/images/...` |
swift-embedded-examples | github_2023 | cpp | 11 | apple | rauhul | @@ -0,0 +1,15 @@
+//===----------------------------------------------------------------------===//
+//
+// This source file is part of the Swift open source project
+//
+// Copyright (c) 2023 Apple Inc. and the Swift project authors. | Could you update the year to 2024 for the files added? |
hackingBuddyGPT | github_2023 | others | 107 | ipa-lab | andreashappe | @@ -8,8 +8,12 @@ conn.port=2222
# exchange with the user for your target VM
conn.username='bob'
+#To just use keyauth only, use '' with no space for conn.password
+#Otherwise, insert the password for instance here
conn.password='secret'
-
+#To just use username and password auth only, use '' with no space for conn.keyfilename
+#Otherwise, insert the filepath for the keyfile here
+conn.keyfilename='/home/bob/.ssh/sshkey.rsa' | hm, maybe by default we should use `''` so that the behavior does not change? |
hackingBuddyGPT | github_2023 | python | 79 | ipa-lab | andreashappe | @@ -1,4 +1,5 @@
import abc
+import enum | not used? |
hackingBuddyGPT | github_2023 | others | 79 | ipa-lab | andreashappe | @@ -56,21 +62,17 @@ where = ["src"]
[tool.pytest.ini_options]
pythonpath = "src"
-addopts = [
- "--import-mode=importlib",
-]
+addopts = ["--import-mode=importlib"]
[project.optional-dependencies]
-testing = [
- 'pytest',
- 'pytest-mock'
-]
+testing = ['pytest', 'pytest-mock']
dev = [
'ruff',
]
[project.scripts]
wintermute = "hackingBuddyGPT.cli.wintermute:main"
hackingBuddyGPT = "hackingBuddyGPT.cli.wintermute:main"
+wintermuteViewer = "hackingBuddyGPT.cli.viewer:main" | does this exist anymore? |
hackingBuddyGPT | github_2023 | python | 78 | ipa-lab | andreashappe | @@ -55,9 +55,8 @@ def set_initial_state(self, initial_state:AgentWorldview):
def set_template(self, template:str):
self._template = Template(filename=template)
self._template_size = self.llm.count_tokens(self._template.source)
-
- def perform_round(self, turn:int) -> bool:
- got_root : bool = False
+ def perform_round(self, turn: int) -> bool:##fix code | please remove the comment |
hackingBuddyGPT | github_2023 | python | 78 | ipa-lab | andreashappe | @@ -39,13 +38,27 @@ def perform_round(self, turn):
cmd = llm_util.cmd_output_fixer(answer.result)
with self.console.status("[bold green]Executing that command..."):
- self.console.print(Panel(answer.result, title="[bold cyan]Got command from LLM:"))
- result, got_root = self.get_capability(cmd.split(" ", 1)[0])(cmd)
+ self.console.print(Panel(answer.result, title="[bold cyan]Got command from LLM:"))
+
+ # Assuming cmd is of the form "username password"
+ parts = cmd.split(" ", 1)
+ if len(parts) == 2:
+ username, password = parts
+ ##here fix!
+ result, got_root = self.get_capability("test_credential")(username, password)
+ else:
+ # Handle other cases or log error
+ result = "Command format error. Expected 'username password'."
+ got_root = False
- # log and output the command and its result
- self.log_db.add_log_query(self._run_id, turn, cmd, result, answer)
- self._sliding_history.add_command(cmd, result)
- self.console.print(Panel(result, title=f"[bold cyan]{cmd}"))
+ #self.log_db.add_log_query(self._run_id, cmd, result, answer) | this looks wrong, wouldn't this only call test_credentials and not execute_command anymore? |
hackingBuddyGPT | github_2023 | python | 78 | ipa-lab | andreashappe | @@ -11,10 +11,12 @@ def __init__(self, connection_string: str = parameter(desc="sqlite3 database con
def init(self):
self.connect()
self.setup_db()
-
+
def connect(self):
- self.db = sqlite3.connect(self.connection_string)
+# self.db = sqlite3.connect(self.connection_string, timeout=10) # Set timeout to 10 seconds
+ self.db = sqlite3.connect(self.connection_string, check_same_thread=False, timeout=10) | this is weird. I am trying to understand the error case. Are you running two instances of hackingBuddyGPT in parallel? This would not be supported by the sqlite3 database (that we are currently using). I think the clean solution would rather be to use a separate sqlite database per run (using the `log_db` parameter) or using a database such as postgres/mysql that supports concurrent access |
hackingBuddyGPT | github_2023 | python | 89 | ipa-lab | andreashappe | @@ -36,9 +40,22 @@ def get_response(self, prompt, *, retry: int = 0, **kwargs) -> LLMResult:
headers = {"Authorization": f"Bearer {self.api_key}"}
data = {'model': self.model, 'messages': [{'role': 'user', 'content': prompt}]}
+ # Log the request payload
+ #
+ # Uncomment the following to log debug output
+ # logging.debug(f"Request payload: {data}") | could you make this configurable (a `debug-requests` flag instead of commenting logging in and out)? |
hackingBuddyGPT | github_2023 | others | 85 | ipa-lab | andreashappe | @@ -173,16 +173,41 @@ $ cp .env.example .env
$ vi .env
# if you start wintermute without parameters, it will list all available use cases
-$ python wintermute.py
-usage: wintermute.py [-h] {linux_privesc,minimal_linux_privesc,windows privesc} ...
-wintermute.py: error: the following arguments are required: {linux_privesc,windows privesc}
+$ python src/hackingBuddyGPT/cli/wintermute.py
+usage: wintermute.py [-h]
+ {LinuxPrivesc,WindowsPrivesc,ExPrivEscLinux,ExPrivEscLinuxTemplated,ExPrivEscLinuxHintFile,ExPrivEscLinuxLSE,MinimalWebTesting,WebTestingWithExplanation,SimpleWebAPITesting,SimpleWebAPIDocumentation}
+ ...
+wintermute.py: error: the following arguments are required: {LinuxPrivesc,WindowsPrivesc,ExPrivEscLinux,ExPrivEscLinuxTemplated,ExPrivEscLinuxHintFile,ExPrivEscLinuxLSE,MinimalWebTesting,WebTestingWithExplanation,SimpleWebAPITesting,SimpleWebAPIDocumentation}
+```
+
+## Provide a Target Machine over SSH
+
+The next important part is having a machine that we can run our agent against. In our case, the target machine will be situated at `192.168.122.151`.
+
+We are using vulnerable Linux systems running in Virtual Machines for this. Never run this against real systems.
+
+{% callout title="We also provide vulnerable machines!" %} | I don't believe that this works, callout is a feature of our documentation site template.. I don't think github supports this :-/ |
hackingBuddyGPT | github_2023 | others | 85 | ipa-lab | andreashappe | @@ -173,16 +173,41 @@ $ cp .env.example .env
$ vi .env
# if you start wintermute without parameters, it will list all available use cases
-$ python wintermute.py
-usage: wintermute.py [-h] {linux_privesc,minimal_linux_privesc,windows privesc} ...
-wintermute.py: error: the following arguments are required: {linux_privesc,windows privesc}
+$ python src/hackingBuddyGPT/cli/wintermute.py
+usage: wintermute.py [-h]
+ {LinuxPrivesc,WindowsPrivesc,ExPrivEscLinux,ExPrivEscLinuxTemplated,ExPrivEscLinuxHintFile,ExPrivEscLinuxLSE,MinimalWebTesting,WebTestingWithExplanation,SimpleWebAPITesting,SimpleWebAPIDocumentation}
+ ...
+wintermute.py: error: the following arguments are required: {LinuxPrivesc,WindowsPrivesc,ExPrivEscLinux,ExPrivEscLinuxTemplated,ExPrivEscLinuxHintFile,ExPrivEscLinuxLSE,MinimalWebTesting,WebTestingWithExplanation,SimpleWebAPITesting,SimpleWebAPIDocumentation}
+```
+
+## Provide a Target Machine over SSH
+
+The next important part is having a machine that we can run our agent against. In our case, the target machine will be situated at `192.168.122.151`.
+
+We are using vulnerable Linux systems running in Virtual Machines for this. Never run this against real systems.
+
+{% callout title="We also provide vulnerable machines!" %}
+We are using virtual machines from our [Linux Privilege-Escalation Benchmark](https://github.com/ipa-lab/benchmark-privesc-linux) project. Feel free to use them for your own research!
+{% /callout %}
+
+## Run the Hacking Agent
+Finally we can run hackingBuddyGPT against our provided test VM. Enjoy!
+
+{% callout type="warning" title="Don't be evil!" %} | same here, callout might not work |
hackingBuddyGPT | github_2023 | python | 71 | ipa-lab | andreashappe | @@ -31,32 +40,116 @@ def client(self) -> openai.OpenAI:
def instructor(self) -> instructor.Instructor:
return instructor.from_openai(self.client)
- def get_response(self, prompt, *, capabilities=None, **kwargs) -> LLMResult:
+ def get_response(self, prompt, *, capabilities: Dict[str, Capability]=None, **kwargs) -> LLMResult: | type information for prompt missing
|
hackingBuddyGPT | github_2023 | python | 47 | ipa-lab | andreashappe | @@ -26,7 +26,7 @@
@dataclass
class MinimalWebTesting(RoundBasedUseCase):
llm: OpenAILib
- host: str = parameter(desc="The host to test", default="http://localhost")
+ host: str = parameter(desc="The host to test", default="https://api.restful-api.dev/objects") | using localhost would be nice, so to not attack third-parties |
hackingBuddyGPT | github_2023 | python | 47 | ipa-lab | andreashappe | @@ -77,6 +77,8 @@ def perform_round(self, turn: int):
result = response.execute()
self.console.print(Panel(result, title="tool"))
self._prompt_history.append(tool_message(result, tool_call_id))
+ for i in self._prompt_history: | this looks like forgotten debug prints, maybe remove? |
lilAvatarUtils | github_2023 | csharp | 24 | lilxyzw | lilxyzw | @@ -161,7 +161,7 @@ internal static void LabelFieldWithSelection(Rect rect, Object obj, bool hilight
else style = EditorStyles.label;
GUIContent content = EditorGUIUtility.ObjectContent(obj, obj.GetType());
content.tooltip = AssetDatabase.GetAssetPath(obj);
- if(!string.IsNullOrEmpty(content.tooltip)) content.text = Path.GetFileName(content.tooltip);
+ if(!string.IsNullOrEmpty(content.tooltip) && !AvatarUtilsWindow.isMaterialsGUITabOpen) content.text = Path.GetFileName(content.tooltip); | isMaterialsGUITabOpenを追加せず、単にオブジェクトがShaderである場合にcontent.textを変更しないようにするだけでいいと思います |
Syntax.js | github_2023 | javascript | 47 | williamtroup | github-advanced-security[bot] | @@ -600,154 +764,154 @@
}
function renderElementStringPatternVariables(e, t, n) {
if (t !== null) {
- const i = t.length;
- for (let r = 0; r < i; r++) {
- const i = t[r];
- const o = i.split("\n");
- const a = o.length;
- const s = a === 1 ? "string" : "multi-line-string";
- for (let t = 0; t < a; t++) {
+ const r = t.length;
+ for (let i = 0; i < r; i++) {
+ const r = t[i];
+ const o = r.split("\n");
+ const s = o.length;
+ const l = s === 1 ? "string" : "multi-line-string";
+ for (let t = 0; t < s; t++) {
const n = o[t];
- const i = `$S{${_cached_Strings_Count.toString()}}`;
- _cached_Strings[i] = `<span class="${s}">${n}</span>`;
+ const r = `$S{${_cached_Strings_Count.toString()}}`;
+ _cached_Strings[r] = `<span class="${l}">${n}</span>`;
_cached_Strings_Count++;
- e = e.replace(n, i);
+ e = e.replace(n, r);
}
- fireCustomTriggerEvent(n.events.onStringRender, i);
+ Trigger.customEvent(n.events.onStringRender, r);
}
}
return e;
}
function renderElementKeywords(e, t, n) {
- const i = Data.getDefaultStringOrArray(t.keywords, []);
- const r = i.length;
+ const r = Default.getStringOrArray(t.keywords, []);
+ const i = r.length;
const o = t.caseSensitive;
- const a = getKeywordCasing(t.keywordsCasing);
- Data.String.sortArrayOfStringByLength(i);
- for (let s = 0; s < r; s++) {
- const r = i[s];
- const l = getDisplayTextTestCasing(r, a);
- const u = `KW${_cached_Keywords_Count.toString()};`;
- let c = null;
- const g = o ? "g" : "gi";
- const d = new RegExp(getWordRegEx(r, t), g);
+ const s = getKeywordCasing(t.keywordsCasing);
+ Str.sortArrayOfStringByLength(r);
+ for (let l = 0; l < i; l++) {
+ const i = r[l];
+ const a = getDisplayTextTestCasing(i, s);
+ const c = `KW${_cached_Keywords_Count.toString()};`;
+ let u = null;
+ const d = o ? "g" : "gi";
+ const g = new RegExp(getWordRegEx(i, t), d);
if (n.highlightKeywords) {
if (Is.definedFunction(n.events.onKeywordClicked)) {
- c = `<span class="keyword-clickable">${l}</span>`;
- e = e.replace(d, u);
+ u = `<span class="keyword-clickable">${a}</span>`;
+ e = e.replace(g, c);
} else {
- c = `<span class="keyword">${l}</span>`;
- e = e.replace(d, u);
+ u = `<span class="keyword">${a}</span>`;
+ e = e.replace(g, c);
}
} else {
if (Is.definedFunction(n.events.onKeywordClicked)) {
- c = `<span class="no-highlight-keyword-clickable">${l}</span>`;
- e = e.replace(d, u);
+ u = `<span class="no-highlight-keyword-clickable">${a}</span>`;
+ e = e.replace(g, c);
}
}
- _cached_Keywords[u] = c;
+ _cached_Keywords[c] = u;
_cached_Keywords_Count++;
- fireCustomTriggerEvent(n.events.onKeywordRender, r);
+ Trigger.customEvent(n.events.onKeywordRender, i);
}
return e;
}
function replaceMarkUpKeywords(e, t, n) {
- const i = Data.getDefaultStringOrArray(t.keywords, []);
- const r = t.caseSensitive;
+ const r = Default.getStringOrArray(t.keywords, []);
+ const i = t.caseSensitive;
const o = getKeywordCasing(t.keywordsCasing);
- const a = /(<([^>]+)>)/gi;
- const s = r ? "g" : "gi";
- let l = a.exec(e);
- while (l) {
- if (l.index === a.lastIndex) {
- a.lastIndex++;
+ const s = /(<([^>]+)>)/gi;
+ const l = i ? "g" : "gi";
+ let a = s.exec(e);
+ while (a) {
+ if (a.index === s.lastIndex) {
+ s.lastIndex++;
}
- let r = l[0];
- r = r.replace("</", "").replace("<", "").replace(">", "");
- r = r.split(" ")[0];
- if (i.indexOf(r) > -1) {
- const i = `KW${_cached_Keywords_Count.toString()};`;
- const a = new RegExp(getWordRegEx(r, t), s);
- let l = null;
- let u = getDisplayTextTestCasing(r, o);
+ let i = a[0];
+ i = i.replace("</", "").replace("<", "").replace(">", ""); | ## Incomplete string escaping or encoding
This replaces only the first occurrence of ">".
[Show more details](https://github.com/williamtroup/Syntax.js/security/code-scanning/18) |
Syntax.js | github_2023 | javascript | 46 | williamtroup | github-advanced-security[bot] | @@ -0,0 +1,1198 @@
+var __getOwnPropNames = Object.getOwnPropertyNames;
+
+var __esm = (e, t) => function n() {
+ return e && (t = (0, e[__getOwnPropNames(e)[0]])(e = 0)), t;
+};
+
+var __commonJS = (e, t) => function n() {
+ return t || (0, e[__getOwnPropNames(e)[0]])((t = {
+ exports: {}
+ }).exports, t), t.exports;
+};
+
+var Constants;
+
+var init_constant = __esm({
+ "src/ts/constant.ts"() {
+ "use strict";
+ (e => {
+ e.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE = "data-syntax-language";
+ e.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS = "data-syntax-options";
+ e.SYNTAX_JS_ATTRIBUTE_NAME_BUTTONS = "data-syntax-buttons";
+ e.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS = "data-syntax-tab-contents";
+ })(Constants || (Constants = {}));
+ }
+});
+
+var init_enum = __esm({
+ "src/ts/enum.ts"() {
+ "use strict";
+ }
+});
+
+var Is;
+
+var init_is = __esm({
+ "src/ts/is.ts"() {
+ "use strict";
+ init_enum();
+ (e => {
+ function t(e) {
+ return e !== null && e !== void 0 && e.toString() !== "";
+ }
+ e.defined = t;
+ function n(e) {
+ return t(e) && typeof e === "object";
+ }
+ e.definedObject = n;
+ function i(e) {
+ return t(e) && typeof e === "boolean";
+ }
+ e.definedBoolean = i;
+ function r(e) {
+ return t(e) && typeof e === "string";
+ }
+ e.definedString = r;
+ function o(e) {
+ return t(e) && typeof e === "function";
+ }
+ e.definedFunction = o;
+ function a(e) {
+ return t(e) && typeof e === "number";
+ }
+ e.definedNumber = a;
+ function s(e) {
+ return n(e) && e instanceof Array;
+ }
+ e.definedArray = s;
+ })(Is || (Is = {}));
+ }
+});
+
+var Data;
+
+var init_data = __esm({
+ "src/ts/data.ts"() {
+ "use strict";
+ init_enum();
+ init_is();
+ (e => {
+ let t;
+ (e => {
+ function t() {
+ const e = [];
+ for (let t = 0; t < 32; t++) {
+ if (t === 8 || t === 12 || t === 16 || t === 20) {
+ e.push("-");
+ }
+ const n = Math.floor(Math.random() * 16).toString(16);
+ e.push(n);
+ }
+ return e.join("");
+ }
+ e.newGuid = t;
+ function n(e, t) {
+ let n = e;
+ while (n.length < t) {
+ n = `0${n}`;
+ }
+ return n;
+ }
+ e.padNumber = n;
+ function i(e) {
+ e = e.replace(/</g, "<");
+ e = e.replace(/>/g, ">");
+ return e;
+ }
+ e.encodeMarkUpCharacters = i;
+ function r(e) {
+ e.sort((function(e, t) {
+ return t.length - e.length;
+ }));
+ }
+ e.sortArrayOfStringByLength = r;
+ })(t = e.String || (e.String = {}));
+ function n(e, t) {
+ return typeof e === "string" ? e : t;
+ }
+ e.getDefaultAnyString = n;
+ function i(e, t) {
+ return Is.definedString(e) ? e : t;
+ }
+ e.getDefaultString = i;
+ function r(e, t) {
+ return Is.definedBoolean(e) ? e : t;
+ }
+ e.getDefaultBoolean = r;
+ function o(e, t) {
+ return Is.definedNumber(e) ? e : t;
+ }
+ e.getDefaultNumber = o;
+ function a(e, t) {
+ return Is.definedFunction(e) ? e : t;
+ }
+ e.getDefaultFunction = a;
+ function s(e, t) {
+ return Is.definedArray(e) ? e : t;
+ }
+ e.getDefaultArray = s;
+ function l(e, t) {
+ return Is.definedObject(e) ? e : t;
+ }
+ e.getDefaultObject = l;
+ function u(e, t) {
+ let n = t;
+ if (Is.definedString(e)) {
+ const i = e.toString().split(" ");
+ if (i.length === 0) {
+ e = t;
+ } else {
+ n = i;
+ }
+ } else {
+ n = s(e, t);
+ }
+ return n;
+ }
+ e.getDefaultStringOrArray = u;
+ function c(e) {
+ const t = JSON.stringify(e);
+ const n = JSON.parse(t);
+ return n;
+ }
+ e.getClonedObject = c;
+ })(Data || (Data = {}));
+ }
+});
+
+var DomElement;
+
+var init_dom = __esm({
+ "src/ts/dom.ts"() {
+ "use strict";
+ init_enum();
+ init_is();
+ (e => {
+ function t(e, t = "") {
+ const n = e.toLowerCase();
+ const i = n === "text";
+ let r = i ? document.createTextNode("") : document.createElement(n);
+ if (Is.defined(t)) {
+ r.className = t;
+ }
+ return r;
+ }
+ e.create = t;
+ function n(e, n, i) {
+ if (!i.allowHtmlInTextDisplay) {
+ const i = t("div");
+ i.innerHTML = n;
+ e.innerText = i.innerText;
+ } else {
+ e.innerHTML = n;
+ }
+ }
+ e.setNodeText = n;
+ function i(e) {
+ var t = document.createRange();
+ t.selectNode(e);
+ window.getSelection().removeAllRanges();
+ window.getSelection().addRange(t);
+ }
+ e.selectTextInElement = i;
+ })(DomElement || (DomElement = {}));
+ }
+});
+
+var require_syntax = __commonJS({
+ "src/syntax.ts"(exports, module) {
+ init_constant();
+ init_data();
+ init_is();
+ init_enum();
+ init_dom();
+ (() => {
+ let _configuration = {};
+ let _aliases_Rules = {};
+ let _elements = [];
+ let _elements_Original = {};
+ let _cached_Keywords = {};
+ let _cached_Keywords_Count = 0;
+ let _cached_Values = {};
+ let _cached_Values_Count = 0;
+ let _cached_Attributes = {};
+ let _cached_Attributes_Count = 0;
+ let _cached_Strings = {};
+ let _cached_Strings_Count = 0;
+ let _cached_Comments = {};
+ let _cached_Comments_Count = 0;
+ let _languages = {};
+ function render() {
+ const e = _configuration.highlightAllDomElementTypes;
+ const t = e.length;
+ for (let n = 0; n < t; n++) {
+ const t = document.getElementsByTagName(e[n]);
+ const i = [].slice.call(t);
+ const r = i.length;
+ if (r > 0) {
+ fireCustomTriggerEvent(_configuration.events.onBeforeRender);
+ }
+ for (let e = 0; e < r; e++) {
+ const t = i[e];
+ let n = false;
+ if (t.hasAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE) && t.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE).toLowerCase() === "tabbed") {
+ const e = [].slice.call(t.children);
+ const i = e.length;
+ const r = [];
+ const o = [];
+ t.removeAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE);
+ t.className = t.className === "" ? "syntax-highlight" : `${t.className} syntax-highlight`;
+ t.innerHTML = "";
+ const a = DomElement.create("div", "code custom-scroll-bars");
+ t.appendChild(a);
+ const s = DomElement.create("div", "tabs");
+ a.appendChild(s);
+ for (let t = 0; t < i; t++) {
+ const i = renderElement(e[t], a);
+ if (!i.rendered) {
+ n = true;
+ } else {
+ renderTab(s, r, o, i, t, i.tabBindingOptions, i.syntaxLanguage);
+ }
+ }
+ } else {
+ if (!renderElement(t).rendered) {
+ n = true;
+ }
+ }
+ if (n) {
+ break;
+ }
+ }
+ if (r > 0) {
+ fireCustomTriggerEvent(_configuration.events.onAfterRender);
+ }
+ }
+ }
+ function renderTab(e, t, n, i, r, o, a) {
+ const s = DomElement.create("button", "tab");
+ e.appendChild(s);
+ DomElement.setNodeText(s, i.tabTitle, _configuration);
+ t.push(s);
+ n.push(i.tabContents);
+ s.onclick = function() {
+ if (s.className !== "tab-active") {
+ const e = t.length;
+ const r = n.length;
+ for (let n = 0; n < e; n++) {
+ t[n].className = "tab";
+ }
+ for (let e = 0; e < r; e++) {
+ n[e].style.display = "none";
+ }
+ s.className = "tab-active";
+ i.tabContents.style.display = "flex";
+ if (Is.definedObject(o)) {
+ fireCustomTriggerEvent(o.events.onOpen, a);
+ }
+ }
+ };
+ if (r > 0) {
+ i.tabContents.style.display = "none";
+ } else {
+ s.className = "tab-active";
+ }
+ }
+ function renderElement(e, t = null) {
+ const n = {};
+ n.rendered = true;
+ if (Is.defined(e) && e.hasAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE) && (!e.hasAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS) || Is.defined(t))) {
+ n.syntaxLanguage = e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE);
+ if (Is.definedString(n.syntaxLanguage)) {
+ const i = getLanguage(n.syntaxLanguage);
+ if (Is.defined(i) || n.syntaxLanguage.toLowerCase() === "unknown") {
+ const r = getObjectFromString(e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS));
+ const o = getObjectFromString(e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_BUTTONS));
+ if (r.parsed) {
+ if (e.innerHTML.trim() !== "") {
+ let a = e.innerHTML;
+ const s = getBindingOptions(r.object);
+ let l = false;
+ let u = null;
+ fireCustomTriggerEvent(s.events.onBeforeRenderComplete, e);
+ if (e.children.length > 0 && e.children[0].nodeName.toLowerCase() === "pre") {
+ a = e.children[0].innerHTML;
+ l = true;
+ }
+ const c = a.trim();
+ let g = null;
+ let d = null;
+ let f = e.id;
+ if (!Is.definedString(f)) {
+ f = Data.String.newGuid();
+ }
+ _elements_Original[f] = e.innerHTML;
+ e.removeAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE);
+ e.removeAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS);
+ e.id = f;
+ if (!Is.defined(t)) {
+ e.className = e.className === "" ? "syntax-highlight" : `${e.className} syntax-highlight`;
+ e.innerHTML = "";
+ t = DomElement.create("div", "code custom-scroll-bars");
+ e.appendChild(t);
+ } else {
+ if (e.hasAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS) && e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS).toLowerCase() !== "true") {
+ const t = getObjectFromString(e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS));
+ if (t.parsed && Is.definedObject(t.object)) {
+ n.tabBindingOptions = getBindingTabContentOptions(t.object);
+ u = n.tabBindingOptions.description;
+ if (Is.definedString(n.tabBindingOptions.title)) {
+ n.tabTitle = n.tabBindingOptions.title;
+ }
+ }
+ } else {
+ n.tabTitle = getFriendlyLanguageName(n.syntaxLanguage);
+ }
+ }
+ n.tabContents = DomElement.create("div", "tab-contents");
+ t.appendChild(n.tabContents);
+ if (Is.definedString(u)) {
+ d = DomElement.create("div", "description");
+ n.tabContents.appendChild(d);
+ DomElement.setNodeText(d, u, _configuration);
+ }
+ if (s.showLineNumbers) {
+ g = DomElement.create("div", "numbers");
+ n.tabContents.appendChild(g);
+ }
+ const m = DomElement.create("div", "syntax");
+ n.tabContents.appendChild(m);
+ renderElementButtons(m, s, n.syntaxLanguage, o, c);
+ if (n.syntaxLanguage.toLowerCase() !== "unknown") {
+ a = renderHTML(a, i, s);
+ } else {
+ a = Data.String.encodeMarkUpCharacters(a);
+ }
+ renderElementCompletedHTML(d, g, m, a, s, l);
+ fireCustomTriggerEvent(s.events.onRenderComplete, e);
+ if (!Is.defined(n.tabContents)) {
+ renderSyntaxCustomTriggers(e, s);
+ } else {
+ renderSyntaxCustomTriggers(n.tabContents, s);
+ }
+ _elements.push(e);
+ _cached_Keywords = {};
+ _cached_Keywords_Count = 0;
+ _cached_Values = {};
+ _cached_Values_Count = 0;
+ _cached_Attributes = {};
+ _cached_Attributes_Count = 0;
+ _cached_Strings = {};
+ _cached_Strings_Count = 0;
+ _cached_Comments = {};
+ _cached_Comments_Count = 0;
+ } else {
+ n.rendered = logError(_configuration.text.noCodeAvailableToRenderErrorText);
+ }
+ } else {
+ n.rendered = !_configuration.safeMode;
+ }
+ } else {
+ n.rendered = logError(_configuration.text.languageNotSupportedErrorText.replace("{{language}}", n.syntaxLanguage));
+ }
+ } else {
+ n.rendered = logError(_configuration.text.attributeNotSetErrorText.replace("{{attribute_name}}", Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE));
+ }
+ }
+ return n;
+ }
+ function renderSyntaxCustomTriggers(e, t) {
+ renderElementClickEvents(e, t.events.onKeywordClicked, "keyword-clickable");
+ renderElementClickEvents(e, t.events.onKeywordClicked, "no-highlight-keyword-clickable");
+ renderElementClickEvents(e, t.events.onValueClicked, "value-clickable");
+ renderElementClickEvents(e, t.events.onValueClicked, "no-highlight-value-clickable");
+ renderElementClickEvents(e, t.events.onAttributeClicked, "attribute-clickable");
+ renderElementClickEvents(e, t.events.onAttributeClicked, "no-highlight-attribute-clickable");
+ }
+ function renderHTML(e, t, n) {
+ if (!t.isMarkUp) {
+ e = Data.String.encodeMarkUpCharacters(e);
+ }
+ if (n.highlightComments) {
+ e = renderElementMultiLineCommentVariables(e, t, n);
+ e = renderElementCommentVariables(e, t, n);
+ }
+ if (n.highlightStrings) {
+ e = renderElementStringPatternVariables(e, e.match(/"((?:\\.|[^"\\])*)"/g), n);
+ if (t.comment !== "'") {
+ e = renderElementStringPatternVariables(e, e.match(/'((?:\\.|[^"\\])*)'/g), n);
+ }
+ }
+ if (!t.isMarkUp) {
+ e = renderElementKeywords(e, t, n);
+ } else {
+ e = replaceMarkUpKeywords(e, t, n);
+ }
+ e = renderElementValues(e, t, n);
+ if (t.isMarkUp) {
+ e = renderElementAttributes(e, t, n);
+ }
+ e = Data.String.encodeMarkUpCharacters(e);
+ if (n.highlightComments) {
+ e = renderElementCommentsFromVariables(e, t);
+ }
+ if (n.highlightStrings) {
+ e = renderElementStringQuotesFromVariables(e);
+ }
+ e = renderElementVariables(e, _cached_Keywords);
+ e = renderElementVariables(e, _cached_Values);
+ if (t.isMarkUp) {
+ e = renderElementVariables(e, _cached_Attributes);
+ }
+ return e;
+ }
+ function renderElementButtons(e, t, n, i, r) {
+ if (t.showLanguageLabel || t.showCopyButton || t.showPrintButton || i.parsed) {
+ const o = DomElement.create("div", "buttons");
+ const a = [];
+ e.appendChild(o);
+ if (i.parsed && Is.definedArray(i.object)) {
+ const e = i.object;
+ const n = e.length;
+ for (let i = 0; i < n; i++) {
+ const n = e[i];
+ if (Is.defined(n.text) && Is.definedFunction(n.events.onClick)) {
+ renderElementButton(n, a, o, r, t);
+ }
+ }
+ }
+ if (t.showCopyButton) {
+ const e = DomElement.create("button", "button");
+ e.style.display = t.buttonsVisible ? "inline-block" : "none";
+ o.appendChild(e);
+ DomElement.setNodeText(e, _configuration.text.copyButtonText, _configuration);
+ e.onclick = function() {
+ navigator.clipboard.writeText(r);
+ fireCustomTriggerEvent(t.events.onCopy, r);
+ };
+ a.push(e);
+ }
+ if (t.showPrintButton) {
+ const i = DomElement.create("button", "button");
+ i.style.display = t.buttonsVisible ? "inline-block" : "none";
+ o.appendChild(i);
+ DomElement.setNodeText(i, _configuration.text.printButtonText, _configuration);
+ i.onclick = function() {
+ const i = window.open("", "PRINT", "height=400,width=600");
+ const r = e.cloneNode(true);
+ const o = DomElement.create("div");
+ r.removeChild(r.children[0]);
+ o.innerHTML = getFriendlyLanguageName(n);
+ i.document.write("<html>");
+ i.document.write("<head>");
+ i.document.write("<title>");
+ i.document.write(o.innerHTML);
+ i.document.write("</title>");
+ i.document.write("</head>");
+ i.document.write("<body>");
+ i.document.write("<code>");
+ i.document.write("<pre>");
+ i.document.write(r.innerHTML);
+ i.document.write("</pre>");
+ i.document.write("</code>");
+ i.document.write("</body>");
+ i.document.write("</html>");
+ i.document.close();
+ i.focus();
+ i.print();
+ i.close();
+ fireCustomTriggerEvent(t.events.onPrint, r.innerHTML);
+ };
+ a.push(i);
+ }
+ if (t.showLanguageLabel) {
+ const e = DomElement.create("div", "language-label");
+ o.appendChild(e);
+ DomElement.setNodeText(e, getFriendlyLanguageName(n, t.languageLabelCasing), _configuration);
+ }
+ const s = a.length;
+ if (s > t.maximumButtons) {
+ const e = DomElement.create("button", "button button-opener");
+ e.innerText = t.buttonsVisible ? _configuration.text.buttonsCloserText : _configuration.text.buttonsOpenerText;
+ o.insertBefore(e, o.children[0]);
+ e.onclick = function() {
+ const n = e.innerText === _configuration.text.buttonsCloserText;
+ for (let e = 0; e < s; e++) {
+ a[e].style.display = n ? "none" : "inline-block";
+ }
+ e.innerText = n ? _configuration.text.buttonsOpenerText : _configuration.text.buttonsCloserText;
+ if (n) {
+ fireCustomTriggerEvent(t.events.onButtonsClosed);
+ } else {
+ fireCustomTriggerEvent(t.events.onButtonsOpened);
+ }
+ };
+ } else if (!t.buttonsVisible && s <= t.maximumButtons) {
+ for (let e = 0; e < s; e++) {
+ a[e].style.display = "inline-block";
+ }
+ }
+ }
+ }
+ function renderElementButton(e, t, n, i, r) {
+ const o = DomElement.create("button", "button");
+ o.style.display = r.buttonsVisible ? "inline-block" : "none";
+ n.appendChild(o);
+ DomElement.setNodeText(o, e.text, _configuration);
+ o.onclick = function() {
+ e.events.onClick(i);
+ };
+ if (Is.defined(e.className)) {
+ o.className += " " + e.className;
+ }
+ t.push(o);
+ }
+ function renderElementCommentVariables(e, t, n) {
+ const i = t.comment;
+ if (Is.definedString(i)) {
+ const t = e.match(new RegExp(`${i}.*`, "g"));
+ if (t !== null) {
+ const i = t.length;
+ for (let r = 0; r < i; r++) {
+ const i = t[r];
+ const o = `$C{${_cached_Comments_Count.toString()}}`;
+ _cached_Comments[o] = `<span class="comment">${i}</span>`;
+ _cached_Comments_Count++;
+ e = e.replace(i, o);
+ fireCustomTriggerEvent(n.events.onCommentRender, i);
+ }
+ }
+ }
+ return e;
+ }
+ function renderElementMultiLineCommentVariables(e, t, n) {
+ const i = t.multiLineComment;
+ if (Is.definedArray(i) && i.length === 2) {
+ let t = 0;
+ let r = 0;
+ while (t >= 0 && r >= 0) {
+ t = e.indexOf(i[0], r);
+ if (t > -1) {
+ r = e.indexOf(i[1], t + i[0].length);
+ if (r > -1) {
+ const o = e.substring(t, r + i[1].length);
+ const a = o.split("\n");
+ const s = a.length;
+ const l = s === 1 ? "comment" : "multi-line-comment";
+ for (let t = 0; t < s; t++) {
+ const n = `$C{${_cached_Comments_Count.toString()}}`;
+ const i = a[t];
+ _cached_Comments[n] = `<span class="${l}">${i}</span>`;
+ _cached_Comments_Count++;
+ e = e.replace(i, n);
+ }
+ fireCustomTriggerEvent(n.events.onCommentRender, o);
+ }
+ }
+ }
+ }
+ return e;
+ }
+ function renderElementStringPatternVariables(e, t, n) {
+ if (t !== null) {
+ const i = t.length;
+ for (let r = 0; r < i; r++) {
+ const i = t[r];
+ const o = i.split("\n");
+ const a = o.length;
+ const s = a === 1 ? "string" : "multi-line-string";
+ for (let t = 0; t < a; t++) {
+ const n = o[t];
+ const i = `$S{${_cached_Strings_Count.toString()}}`;
+ _cached_Strings[i] = `<span class="${s}">${n}</span>`;
+ _cached_Strings_Count++;
+ e = e.replace(n, i);
+ }
+ fireCustomTriggerEvent(n.events.onStringRender, i);
+ }
+ }
+ return e;
+ }
+ function renderElementKeywords(e, t, n) {
+ const i = Data.getDefaultStringOrArray(t.keywords, []);
+ const r = i.length;
+ const o = t.caseSensitive;
+ const a = getKeywordCasing(t.keywordsCasing);
+ Data.String.sortArrayOfStringByLength(i);
+ for (let s = 0; s < r; s++) {
+ const r = i[s];
+ const l = getDisplayTextTestCasing(r, a);
+ const u = `KW${_cached_Keywords_Count.toString()};`;
+ let c = null;
+ const g = o ? "g" : "gi";
+ const d = new RegExp(getWordRegEx(r, t), g);
+ if (n.highlightKeywords) {
+ if (Is.definedFunction(n.events.onKeywordClicked)) {
+ c = `<span class="keyword-clickable">${l}</span>`;
+ e = e.replace(d, u);
+ } else {
+ c = `<span class="keyword">${l}</span>`;
+ e = e.replace(d, u);
+ }
+ } else {
+ if (Is.definedFunction(n.events.onKeywordClicked)) {
+ c = `<span class="no-highlight-keyword-clickable">${l}</span>`;
+ e = e.replace(d, u);
+ }
+ }
+ _cached_Keywords[u] = c;
+ _cached_Keywords_Count++;
+ fireCustomTriggerEvent(n.events.onKeywordRender, r);
+ }
+ return e;
+ }
+ function replaceMarkUpKeywords(e, t, n) {
+ const i = Data.getDefaultStringOrArray(t.keywords, []);
+ const r = t.caseSensitive;
+ const o = getKeywordCasing(t.keywordsCasing);
+ const a = /(<([^>]+)>)/gi;
+ const s = r ? "g" : "gi";
+ let l = a.exec(e);
+ while (l) {
+ if (l.index === a.lastIndex) {
+ a.lastIndex++;
+ }
+ let r = l[0];
+ r = r.replace("</", "").replace("<", "").replace(">", ""); | ## Incomplete string escaping or encoding
This replaces only the first occurrence of ">".
[Show more details](https://github.com/williamtroup/Syntax.js/security/code-scanning/9) |
Syntax.js | github_2023 | javascript | 46 | williamtroup | github-advanced-security[bot] | @@ -1,980 +1,1146 @@
-/*! Syntax.js v2.6.0 | (c) Bunoon 2024 | MIT License */
-(function() {
- var _parameter_Document = null, _parameter_Navigator = null, _parameter_Window = null, _parameter_Math = null, _parameter_Json = null, _public = {}, _configuration = {}, _string = {empty:"", space:" ", newLine:"\n"}, _aliases_Rules = {}, _elements_Type = {}, _elements = [], _elements_Original = {}, _cached_Keywords = {}, _cached_Keywords_Count = 0, _cached_Values = {}, _cached_Values_Count = 0, _cached_Attributes = {}, _cached_Attributes_Count = 0, _cached_Strings = {}, _cached_Strings_Count =
- 0, _cached_Comments = {}, _cached_Comments_Count = 0, _languages = {}, _languages_Unknown = "unknown", _languages_Tabbed = "tabbed", _attribute_Name_Language = "data-syntax-language", _attribute_Name_Options = "data-syntax-options", _attribute_Name_Buttons = "data-syntax-buttons", _attribute_Name_TabContents = "data-syntax-tab-contents";
- function render() {
- var tagTypes = _configuration.highlightAllDomElementTypes, tagTypesLength = tagTypes.length;
- for (var tagTypeIndex = 0; tagTypeIndex < tagTypesLength; tagTypeIndex++) {
- var domElements = _parameter_Document.getElementsByTagName(tagTypes[tagTypeIndex]), elements = [].slice.call(domElements), elementsLength = elements.length;
- if (elementsLength > 0) {
- fireCustomTrigger(_configuration.onBeforeRender);
- }
- for (var elementIndex = 0; elementIndex < elementsLength; elementIndex++) {
- var element = elements[elementIndex], elementBreak = false;
- if (element.hasAttribute(_attribute_Name_Language) && element.getAttribute(_attribute_Name_Language).toLowerCase() === _languages_Tabbed) {
- var divElements = [].slice.call(element.children), divElementsLength = divElements.length, tabElements = [], tabContentElements = [];
- element.removeAttribute(_attribute_Name_Language);
- element.className = element.className === _string.empty ? "syntax-highlight" : element.className + " syntax-highlight";
- element.innerHTML = _string.empty;
- var codeContainer = createElement("div", "code custom-scroll-bars");
- element.appendChild(codeContainer);
- var tabs = createElement("div", "tabs");
- codeContainer.appendChild(tabs);
- for (var divElementIndex = 0; divElementIndex < divElementsLength; divElementIndex++) {
- var renderResult = renderElement(divElements[divElementIndex], codeContainer);
- if (!renderResult.rendered) {
- elementBreak = true;
+"use strict";
+
+var Constants;
+
+(e => {
+ e.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE = "data-syntax-language";
+ e.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS = "data-syntax-options";
+ e.SYNTAX_JS_ATTRIBUTE_NAME_BUTTONS = "data-syntax-buttons";
+ e.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS = "data-syntax-tab-contents";
+})(Constants || (Constants = {}));
+
+var Is;
+
+(e => {
+ function t(e) {
+ return e !== null && e !== void 0 && e.toString() !== "";
+ }
+ e.defined = t;
+ function n(e) {
+ return t(e) && typeof e === "object";
+ }
+ e.definedObject = n;
+ function i(e) {
+ return t(e) && typeof e === "boolean";
+ }
+ e.definedBoolean = i;
+ function o(e) {
+ return t(e) && typeof e === "string";
+ }
+ e.definedString = o;
+ function r(e) {
+ return t(e) && typeof e === "function";
+ }
+ e.definedFunction = r;
+ function a(e) {
+ return t(e) && typeof e === "number";
+ }
+ e.definedNumber = a;
+ function l(e) {
+ return n(e) && e instanceof Array;
+ }
+ e.definedArray = l;
+})(Is || (Is = {}));
+
+var Data;
+
+(e => {
+ let t;
+ (e => {
+ function t() {
+ const e = [];
+ for (let t = 0; t < 32; t++) {
+ if (t === 8 || t === 12 || t === 16 || t === 20) {
+ e.push("-");
+ }
+ const n = Math.floor(Math.random() * 16).toString(16);
+ e.push(n);
+ }
+ return e.join("");
+ }
+ e.newGuid = t;
+ function n(e, t) {
+ let n = e;
+ while (n.length < t) {
+ n = `0${n}`;
+ }
+ return n;
+ }
+ e.padNumber = n;
+ function i(e) {
+ e = e.replace(/</g, "<");
+ e = e.replace(/>/g, ">");
+ return e;
+ }
+ e.encodeMarkUpCharacters = i;
+ function o(e) {
+ e.sort((function(e, t) {
+ return t.length - e.length;
+ }));
+ }
+ e.sortArrayOfStringByLength = o;
+ })(t = e.String || (e.String = {}));
+ function n(e, t) {
+ return typeof e === "string" ? e : t;
+ }
+ e.getDefaultAnyString = n;
+ function i(e, t) {
+ return Is.definedString(e) ? e : t;
+ }
+ e.getDefaultString = i;
+ function o(e, t) {
+ return Is.definedBoolean(e) ? e : t;
+ }
+ e.getDefaultBoolean = o;
+ function r(e, t) {
+ return Is.definedNumber(e) ? e : t;
+ }
+ e.getDefaultNumber = r;
+ function a(e, t) {
+ return Is.definedFunction(e) ? e : t;
+ }
+ e.getDefaultFunction = a;
+ function l(e, t) {
+ return Is.definedArray(e) ? e : t;
+ }
+ e.getDefaultArray = l;
+ function s(e, t) {
+ return Is.definedObject(e) ? e : t;
+ }
+ e.getDefaultObject = s;
+ function u(e, t) {
+ let n = t;
+ if (Is.definedString(e)) {
+ const i = e.toString().split(" ");
+ if (i.length === 0) {
+ e = t;
} else {
- renderTab(tabs, tabElements, tabContentElements, renderResult, divElementIndex, renderResult.tabBindingOptions, renderResult.syntaxLanguage);
+ n = i;
}
- }
} else {
- if (!renderElement(element).rendered) {
- elementBreak = true;
- }
- }
- if (elementBreak) {
- break;
- }
- }
- if (elementsLength > 0) {
- fireCustomTrigger(_configuration.onAfterRender);
- }
- }
- }
- function renderTab(tabs, tabElements, tabContentElements, renderResult, divElementIndex, tabBindingOptions, syntaxLanguage) {
- var tab = createElement("button", "tab");
- tabs.appendChild(tab);
- setNodeText(tab, renderResult.tabTitle);
- tabElements.push(tab);
- tabContentElements.push(renderResult.tabContents);
- tab.onclick = function() {
- if (tab.className !== "tab-active") {
- var tabElementsLength = tabElements.length, tabContentElementsLength = tabContentElements.length;
- for (var tabElementsIndex = 0; tabElementsIndex < tabElementsLength; tabElementsIndex++) {
- tabElements[tabElementsIndex].className = "tab";
- }
- for (var tabContentElementsIndex = 0; tabContentElementsIndex < tabContentElementsLength; tabContentElementsIndex++) {
- tabContentElements[tabContentElementsIndex].style.display = "none";
- }
- tab.className = "tab-active";
- renderResult.tabContents.style.display = "flex";
- if (isDefinedObject(tabBindingOptions)) {
- fireCustomTrigger(tabBindingOptions.onOpen, syntaxLanguage);
- }
- }
- };
- if (divElementIndex > 0) {
- renderResult.tabContents.style.display = "none";
- } else {
- tab.className = "tab-active";
- }
- }
- function renderElement(element, codeContainer) {
- var result = true, tabTitle = null, tabContents = null, tabBindingOptions = null, syntaxLanguage = null;
- if (isDefined(element) && element.hasAttribute(_attribute_Name_Language) && (!element.hasAttribute(_attribute_Name_TabContents) || isDefined(codeContainer))) {
- syntaxLanguage = element.getAttribute(_attribute_Name_Language);
- if (isDefinedString(syntaxLanguage)) {
- var language = getLanguage(syntaxLanguage);
- if (isDefined(language) || syntaxLanguage.toLowerCase() === _languages_Unknown) {
- var syntaxOptionsParsed = getObjectFromString(element.getAttribute(_attribute_Name_Options)), syntaxButtonsParsed = getObjectFromString(element.getAttribute(_attribute_Name_Buttons));
- if (syntaxOptionsParsed.parsed) {
- if (element.innerHTML.trim() !== _string.empty) {
- var innerHTML = element.innerHTML, syntaxOptions = getBindingOptions(syntaxOptionsParsed.result), isPreFormatted = false, descriptionText = null;
- fireCustomTrigger(syntaxOptions.onBeforeRenderComplete, element);
- if (element.children.length > 0 && element.children[0].nodeName.toLowerCase() === "pre") {
- innerHTML = element.children[0].innerHTML;
- isPreFormatted = true;
- }
- var innerHTMLCopy = innerHTML.trim(), numbers = null, description = null, elementId = element.id;
- if (!isDefinedString(elementId)) {
- elementId = newGuid();
- }
- _elements_Original[elementId] = element.innerHTML;
- element.removeAttribute(_attribute_Name_Language);
- element.removeAttribute(_attribute_Name_Options);
- element.id = elementId;
- if (!isDefined(codeContainer)) {
- element.className = element.className === _string.empty ? "syntax-highlight" : element.className + " syntax-highlight";
- element.innerHTML = _string.empty;
- codeContainer = createElement("div", "code custom-scroll-bars");
- element.appendChild(codeContainer);
- } else {
- if (element.hasAttribute(_attribute_Name_TabContents) && element.getAttribute(_attribute_Name_TabContents).toLowerCase() !== "true") {
- var syntaxTabOptions = getObjectFromString(element.getAttribute(_attribute_Name_TabContents));
- if (syntaxTabOptions.parsed && isDefinedObject(syntaxTabOptions.result)) {
- tabBindingOptions = getBindingTabContentOptions(syntaxTabOptions.result);
- descriptionText = tabBindingOptions.description;
- if (isDefinedString(tabBindingOptions.title)) {
- tabTitle = tabBindingOptions.title;
+ n = l(e, t);
+ }
+ return n;
+ }
+ e.getDefaultStringOrArray = u;
+ function c(e) {
+ const t = JSON.stringify(e);
+ const n = JSON.parse(t);
+ return n;
+ }
+ e.getClonedObject = c;
+})(Data || (Data = {}));
+
+var DomElement;
+
+(e => {
+ function t(e, t = "") {
+ const n = e.toLowerCase();
+ const i = n === "text";
+ let o = i ? document.createTextNode("") : document.createElement(n);
+ if (Is.defined(t)) {
+ o.className = t;
+ }
+ return o;
+ }
+ e.create = t;
+ function n(e, n, i) {
+ if (!i.allowHtmlInTextDisplay) {
+ const i = t("div");
+ i.innerHTML = n;
+ e.innerText = i.innerText;
+ } else {
+ e.innerHTML = n;
+ }
+ }
+ e.setNodeText = n;
+ function i(e) {
+ var t = document.createRange();
+ t.selectNode(e);
+ window.getSelection().removeAllRanges();
+ window.getSelection().addRange(t);
+ }
+ e.selectTextInElement = i;
+})(DomElement || (DomElement = {}));
+
+(() => {
+ let _configuration = {};
+ let _aliases_Rules = {};
+ let _elements = [];
+ let _elements_Original = {};
+ let _cached_Keywords = {};
+ let _cached_Keywords_Count = 0;
+ let _cached_Values = {};
+ let _cached_Values_Count = 0;
+ let _cached_Attributes = {};
+ let _cached_Attributes_Count = 0;
+ let _cached_Strings = {};
+ let _cached_Strings_Count = 0;
+ let _cached_Comments = {};
+ let _cached_Comments_Count = 0;
+ let _languages = {};
+ function render() {
+ const e = _configuration.highlightAllDomElementTypes;
+ const t = e.length;
+ for (let n = 0; n < t; n++) {
+ const t = document.getElementsByTagName(e[n]);
+ const i = [].slice.call(t);
+ const o = i.length;
+ if (o > 0) {
+ fireCustomTriggerEvent(_configuration.events.onBeforeRender);
+ }
+ for (let e = 0; e < o; e++) {
+ const t = i[e];
+ let n = false;
+ if (t.hasAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE) && t.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE).toLowerCase() === "tabbed") {
+ const e = [].slice.call(t.children);
+ const i = e.length;
+ const o = [];
+ const r = [];
+ t.removeAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE);
+ t.className = t.className === "" ? "syntax-highlight" : `${t.className} syntax-highlight`;
+ t.innerHTML = "";
+ const a = DomElement.create("div", "code custom-scroll-bars");
+ t.appendChild(a);
+ const l = DomElement.create("div", "tabs");
+ a.appendChild(l);
+ for (let t = 0; t < i; t++) {
+ const i = renderElement(e[t], a);
+ if (!i.rendered) {
+ n = true;
+ } else {
+ renderTab(l, o, r, i, t, i.tabBindingOptions, i.syntaxLanguage);
+ }
+ }
+ } else {
+ if (!renderElement(t).rendered) {
+ n = true;
+ }
+ }
+ if (n) {
+ break;
+ }
+ }
+ if (o > 0) {
+ fireCustomTriggerEvent(_configuration.events.onAfterRender);
+ }
+ }
+ }
+ function renderTab(e, t, n, i, o, r, a) {
+ const l = DomElement.create("button", "tab");
+ e.appendChild(l);
+ DomElement.setNodeText(l, i.tabTitle, _configuration);
+ t.push(l);
+ n.push(i.tabContents);
+ l.onclick = function() {
+ if (l.className !== "tab-active") {
+ const e = t.length;
+ const o = n.length;
+ for (let n = 0; n < e; n++) {
+ t[n].className = "tab";
+ }
+ for (let e = 0; e < o; e++) {
+ n[e].style.display = "none";
+ }
+ l.className = "tab-active";
+ i.tabContents.style.display = "flex";
+ if (Is.definedObject(r)) {
+ fireCustomTriggerEvent(r.events.onOpen, a);
+ }
+ }
+ };
+ if (o > 0) {
+ i.tabContents.style.display = "none";
+ } else {
+ l.className = "tab-active";
+ }
+ }
+ function renderElement(e, t = null) {
+ const n = {};
+ n.rendered = true;
+ if (Is.defined(e) && e.hasAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE) && (!e.hasAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS) || Is.defined(t))) {
+ n.syntaxLanguage = e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE);
+ if (Is.definedString(n.syntaxLanguage)) {
+ const i = getLanguage(n.syntaxLanguage);
+ if (Is.defined(i) || n.syntaxLanguage.toLowerCase() === "unknown") {
+ const o = getObjectFromString(e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS));
+ const r = getObjectFromString(e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_BUTTONS));
+ if (o.parsed) {
+ if (e.innerHTML.trim() !== "") {
+ let a = e.innerHTML;
+ const l = getBindingOptions(o.object);
+ let s = false;
+ let u = null;
+ fireCustomTriggerEvent(l.events.onBeforeRenderComplete, e);
+ if (e.children.length > 0 && e.children[0].nodeName.toLowerCase() === "pre") {
+ a = e.children[0].innerHTML;
+ s = true;
+ }
+ const c = a.trim();
+ let g = null;
+ let d = null;
+ let f = e.id;
+ if (!Is.definedString(f)) {
+ f = Data.String.newGuid();
+ }
+ _elements_Original[f] = e.innerHTML;
+ e.removeAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE);
+ e.removeAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS);
+ e.id = f;
+ if (!Is.defined(t)) {
+ e.className = e.className === "" ? "syntax-highlight" : `${e.className} syntax-highlight`;
+ e.innerHTML = "";
+ t = DomElement.create("div", "code custom-scroll-bars");
+ e.appendChild(t);
+ } else {
+ if (e.hasAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS) && e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS).toLowerCase() !== "true") {
+ const t = getObjectFromString(e.getAttribute(Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS));
+ if (t.parsed && Is.definedObject(t.object)) {
+ n.tabBindingOptions = getBindingTabContentOptions(t.object);
+ u = n.tabBindingOptions.description;
+ if (Is.definedString(n.tabBindingOptions.title)) {
+ n.tabTitle = n.tabBindingOptions.title;
+ }
+ }
+ } else {
+ n.tabTitle = getFriendlyLanguageName(n.syntaxLanguage);
+ }
+ }
+ n.tabContents = DomElement.create("div", "tab-contents");
+ t.appendChild(n.tabContents);
+ if (Is.definedString(u)) {
+ d = DomElement.create("div", "description");
+ n.tabContents.appendChild(d);
+ DomElement.setNodeText(d, u, _configuration);
+ }
+ if (l.showLineNumbers) {
+ g = DomElement.create("div", "numbers");
+ n.tabContents.appendChild(g);
+ }
+ const m = DomElement.create("div", "syntax");
+ n.tabContents.appendChild(m);
+ renderElementButtons(m, l, n.syntaxLanguage, r, c);
+ if (n.syntaxLanguage.toLowerCase() !== "unknown") {
+ a = renderHTML(a, i, l);
+ } else {
+ a = Data.String.encodeMarkUpCharacters(a);
+ }
+ renderElementCompletedHTML(d, g, m, a, l, s);
+ fireCustomTriggerEvent(l.events.onRenderComplete, e);
+ if (!Is.defined(n.tabContents)) {
+ renderSyntaxCustomTriggers(e, l);
+ } else {
+ renderSyntaxCustomTriggers(n.tabContents, l);
+ }
+ _elements.push(e);
+ _cached_Keywords = {};
+ _cached_Keywords_Count = 0;
+ _cached_Values = {};
+ _cached_Values_Count = 0;
+ _cached_Attributes = {};
+ _cached_Attributes_Count = 0;
+ _cached_Strings = {};
+ _cached_Strings_Count = 0;
+ _cached_Comments = {};
+ _cached_Comments_Count = 0;
+ } else {
+ n.rendered = logError(_configuration.text.noCodeAvailableToRenderErrorText);
+ }
+ } else {
+ n.rendered = !_configuration.safeMode;
}
- }
} else {
- tabTitle = getFriendlyLanguageName(syntaxLanguage);
+ n.rendered = logError(_configuration.text.languageNotSupportedErrorText.replace("{{language}}", n.syntaxLanguage));
}
- }
- tabContents = createElement("div", "tab-contents");
- codeContainer.appendChild(tabContents);
- if (isDefinedString(descriptionText)) {
- description = createElement("div", "description");
- tabContents.appendChild(description);
- setNodeText(description, descriptionText);
- }
- if (syntaxOptions.showLineNumbers) {
- numbers = createElement("div", "numbers");
- tabContents.appendChild(numbers);
- }
- var syntax = createElement("div", "syntax");
- tabContents.appendChild(syntax);
- renderElementButtons(syntax, syntaxOptions, syntaxLanguage, syntaxButtonsParsed, innerHTMLCopy);
- if (syntaxLanguage.toLowerCase() !== _languages_Unknown) {
- innerHTML = renderHTML(innerHTML, language, syntaxOptions);
- } else {
- innerHTML = encodeMarkUpCharacters(innerHTML);
- }
- renderElementCompletedHTML(element, description, numbers, syntax, innerHTML, syntaxOptions, isPreFormatted);
- fireCustomTrigger(syntaxOptions.onRenderComplete, element);
- _elements.push(element);
- _cached_Keywords = {};
- _cached_Keywords_Count = 0;
- _cached_Values = {};
- _cached_Values_Count = 0;
- _cached_Attributes = {};
- _cached_Attributes_Count = 0;
- _cached_Strings = {};
- _cached_Strings_Count = 0;
- _cached_Comments = {};
- _cached_Comments_Count = 0;
} else {
- result = logError(_configuration.noCodeAvailableToRenderErrorText);
+ n.rendered = logError(_configuration.text.attributeNotSetErrorText.replace("{{attribute_name}}", Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE));
}
- } else {
- result = !_configuration.safeMode;
- }
+ }
+ return n;
+ }
+ function renderSyntaxCustomTriggers(e, t) {
+ renderElementClickEvents(e, t.events.onKeywordClicked, "keyword-clickable");
+ renderElementClickEvents(e, t.events.onKeywordClicked, "no-highlight-keyword-clickable");
+ renderElementClickEvents(e, t.events.onValueClicked, "value-clickable");
+ renderElementClickEvents(e, t.events.onValueClicked, "no-highlight-value-clickable");
+ renderElementClickEvents(e, t.events.onAttributeClicked, "attribute-clickable");
+ renderElementClickEvents(e, t.events.onAttributeClicked, "no-highlight-attribute-clickable");
+ }
+ function renderHTML(e, t, n) {
+ if (!t.isMarkUp) {
+ e = Data.String.encodeMarkUpCharacters(e);
+ }
+ if (n.highlightComments) {
+ e = renderElementMultiLineCommentVariables(e, t, n);
+ e = renderElementCommentVariables(e, t, n);
+ }
+ if (n.highlightStrings) {
+ e = renderElementStringPatternVariables(e, e.match(/"((?:\\.|[^"\\])*)"/g), n);
+ if (t.comment !== "'") {
+ e = renderElementStringPatternVariables(e, e.match(/'((?:\\.|[^"\\])*)'/g), n);
+ }
+ }
+ if (!t.isMarkUp) {
+ e = renderElementKeywords(e, t, n);
} else {
- result = logError(_configuration.languageNotSupportedErrorText.replace("{{language}}", syntaxLanguage));
- }
- } else {
- result = logError(_configuration.attributeNotSetErrorText.replace("{{attribute_name}}", _attribute_Name_Language));
- }
- }
- return {rendered:result, tabContents:tabContents, tabTitle:tabTitle, tabBindingOptions:tabBindingOptions, syntaxLanguage:syntaxLanguage};
- }
- function renderHTML(innerHTML, language, syntaxOptions) {
- if (!language.isMarkUp) {
- innerHTML = encodeMarkUpCharacters(innerHTML);
- }
- if (syntaxOptions.highlightComments) {
- innerHTML = renderElementMultiLineCommentVariables(innerHTML, language, syntaxOptions);
- innerHTML = renderElementCommentVariables(innerHTML, language, syntaxOptions);
- }
- if (syntaxOptions.highlightStrings) {
- innerHTML = renderElementStringPatternVariables(innerHTML, innerHTML.match(/"((?:\\.|[^"\\])*)"/g), syntaxOptions);
- if (language.comment !== "'") {
- innerHTML = renderElementStringPatternVariables(innerHTML, innerHTML.match(/'((?:\\.|[^"\\])*)'/g), syntaxOptions);
- }
- }
- if (!language.isMarkUp) {
- innerHTML = renderElementKeywords(innerHTML, language, syntaxOptions);
- } else {
- innerHTML = replaceMarkUpKeywords(innerHTML, language, syntaxOptions);
- }
- innerHTML = renderElementValues(innerHTML, language, syntaxOptions);
- if (language.isMarkUp) {
- innerHTML = renderElementAttributes(innerHTML, language, syntaxOptions);
- }
- innerHTML = encodeMarkUpCharacters(innerHTML);
- if (syntaxOptions.highlightComments) {
- innerHTML = renderElementCommentsFromVariables(innerHTML, language);
- }
- if (syntaxOptions.highlightStrings) {
- innerHTML = renderElementStringQuotesFromVariables(innerHTML);
- }
- innerHTML = renderElementVariables(innerHTML, _cached_Keywords);
- innerHTML = renderElementVariables(innerHTML, _cached_Values);
- if (language.isMarkUp) {
- innerHTML = renderElementVariables(innerHTML, _cached_Attributes);
- }
- return innerHTML;
- }
- function renderElementButtons(syntax, syntaxOptions, syntaxLanguage, syntaxButtonsParsed, innerHTMLCopy) {
- if (syntaxOptions.showLanguageLabel || syntaxOptions.showCopyButton || syntaxOptions.showPrintButton || syntaxButtonsParsed.parsed) {
- var buttons = createElement("div", "buttons"), buttonsElements = [];
- syntax.appendChild(buttons);
- if (syntaxButtonsParsed.parsed && isDefinedArray(syntaxButtonsParsed.result)) {
- var customButtons = syntaxButtonsParsed.result, customButtonsLength = customButtons.length;
- for (var customButtonsIndex = 0; customButtonsIndex < customButtonsLength; customButtonsIndex++) {
- var customButton = customButtons[customButtonsIndex];
- if (isDefined(customButton.text) && isDefinedFunction(customButton.onClick)) {
- renderElementButton(customButton, buttonsElements, buttons, innerHTMLCopy, syntaxOptions);
- }
- }
- }
- if (syntaxOptions.showCopyButton) {
- var copyButton = createElement("button", "button");
- copyButton.style.display = syntaxOptions.buttonsVisible ? "inline-block" : "none";
- buttons.appendChild(copyButton);
- setNodeText(copyButton, _configuration.copyButtonText);
- copyButton.onclick = function() {
- _parameter_Navigator.clipboard.writeText(innerHTMLCopy);
- fireCustomTrigger(syntaxOptions.onCopy, innerHTMLCopy);
- };
- buttonsElements.push(copyButton);
- }
- if (syntaxOptions.showPrintButton) {
- var printButton = createElement("button", "button");
- printButton.style.display = syntaxOptions.buttonsVisible ? "inline-block" : "none";
- buttons.appendChild(printButton);
- setNodeText(printButton, _configuration.printButtonText);
- printButton.onclick = function() {
- var newWindow = window.open(_string.empty, "PRINT", "height=400,width=600"), newElementForPrint = syntax.cloneNode(true), newTitleElement = createElement("div");
- newElementForPrint.removeChild(newElementForPrint.children[0]);
- newTitleElement.innerHTML = getFriendlyLanguageName(syntaxLanguage);
- newWindow.document.write("<html>");
- newWindow.document.write("<head>");
- newWindow.document.write("<title>");
- newWindow.document.write(newTitleElement.innerHTML);
- newWindow.document.write("</title>");
- newWindow.document.write("</head>");
- newWindow.document.write("<body>");
- newWindow.document.write("<code>");
- newWindow.document.write("<pre>");
- newWindow.document.write(newElementForPrint.innerHTML);
- newWindow.document.write("</pre>");
- newWindow.document.write("</code>");
- newWindow.document.write("</body>");
- newWindow.document.write("</html>");
- newWindow.document.close();
- newWindow.focus();
- newWindow.print();
- newWindow.close();
- fireCustomTrigger(syntaxOptions.onPrint, newElementForPrint.innerHTML);
+ e = replaceMarkUpKeywords(e, t, n);
+ }
+ e = renderElementValues(e, t, n);
+ if (t.isMarkUp) {
+ e = renderElementAttributes(e, t, n);
+ }
+ e = Data.String.encodeMarkUpCharacters(e);
+ if (n.highlightComments) {
+ e = renderElementCommentsFromVariables(e, t);
+ }
+ if (n.highlightStrings) {
+ e = renderElementStringQuotesFromVariables(e);
+ }
+ e = renderElementVariables(e, _cached_Keywords);
+ e = renderElementVariables(e, _cached_Values);
+ if (t.isMarkUp) {
+ e = renderElementVariables(e, _cached_Attributes);
+ }
+ return e;
+ }
+ function renderElementButtons(e, t, n, i, o) {
+ if (t.showLanguageLabel || t.showCopyButton || t.showPrintButton || i.parsed) {
+ const r = DomElement.create("div", "buttons");
+ const a = [];
+ e.appendChild(r);
+ if (i.parsed && Is.definedArray(i.object)) {
+ const e = i.object;
+ const n = e.length;
+ for (let i = 0; i < n; i++) {
+ const n = e[i];
+ if (Is.defined(n.text) && Is.definedFunction(n.events.onClick)) {
+ renderElementButton(n, a, r, o, t);
+ }
+ }
+ }
+ if (t.showCopyButton) {
+ const e = DomElement.create("button", "button");
+ e.style.display = t.buttonsVisible ? "inline-block" : "none";
+ r.appendChild(e);
+ DomElement.setNodeText(e, _configuration.text.copyButtonText, _configuration);
+ e.onclick = function() {
+ navigator.clipboard.writeText(o);
+ fireCustomTriggerEvent(t.events.onCopy, o);
+ };
+ a.push(e);
+ }
+ if (t.showPrintButton) {
+ const i = DomElement.create("button", "button");
+ i.style.display = t.buttonsVisible ? "inline-block" : "none";
+ r.appendChild(i);
+ DomElement.setNodeText(i, _configuration.text.printButtonText, _configuration);
+ i.onclick = function() {
+ const i = window.open("", "PRINT", "height=400,width=600");
+ const o = e.cloneNode(true);
+ const r = DomElement.create("div");
+ o.removeChild(o.children[0]);
+ r.innerHTML = getFriendlyLanguageName(n);
+ i.document.write("<html>");
+ i.document.write("<head>");
+ i.document.write("<title>");
+ i.document.write(r.innerHTML);
+ i.document.write("</title>");
+ i.document.write("</head>");
+ i.document.write("<body>");
+ i.document.write("<code>");
+ i.document.write("<pre>");
+ i.document.write(o.innerHTML);
+ i.document.write("</pre>");
+ i.document.write("</code>");
+ i.document.write("</body>");
+ i.document.write("</html>");
+ i.document.close();
+ i.focus();
+ i.print();
+ i.close();
+ fireCustomTriggerEvent(t.events.onPrint, o.innerHTML);
+ };
+ a.push(i);
+ }
+ if (t.showLanguageLabel) {
+ const e = DomElement.create("div", "language-label");
+ r.appendChild(e);
+ DomElement.setNodeText(e, getFriendlyLanguageName(n, t.languageLabelCasing), _configuration);
+ }
+ const l = a.length;
+ if (l > t.maximumButtons) {
+ const e = DomElement.create("button", "button button-opener");
+ e.innerText = t.buttonsVisible ? _configuration.text.buttonsCloserText : _configuration.text.buttonsOpenerText;
+ r.insertBefore(e, r.children[0]);
+ e.onclick = function() {
+ const n = e.innerText === _configuration.text.buttonsCloserText;
+ for (let e = 0; e < l; e++) {
+ a[e].style.display = n ? "none" : "inline-block";
+ }
+ e.innerText = n ? _configuration.text.buttonsOpenerText : _configuration.text.buttonsCloserText;
+ if (n) {
+ fireCustomTriggerEvent(t.events.onButtonsClosed);
+ } else {
+ fireCustomTriggerEvent(t.events.onButtonsOpened);
+ }
+ };
+ } else if (!t.buttonsVisible && l <= t.maximumButtons) {
+ for (let e = 0; e < l; e++) {
+ a[e].style.display = "inline-block";
+ }
+ }
+ }
+ }
+ function renderElementButton(e, t, n, i, o) {
+ const r = DomElement.create("button", "button");
+ r.style.display = o.buttonsVisible ? "inline-block" : "none";
+ n.appendChild(r);
+ DomElement.setNodeText(r, e.text, _configuration);
+ r.onclick = function() {
+ e.events.onClick(i);
};
- buttonsElements.push(printButton);
- }
- if (syntaxOptions.showLanguageLabel) {
- var languageLabel = createElement("div", "language-label");
- buttons.appendChild(languageLabel);
- setNodeText(languageLabel, getFriendlyLanguageName(syntaxLanguage, syntaxOptions.languageLabelCasing));
- }
- var buttonsElementsLength = buttonsElements.length;
- if (buttonsElementsLength > syntaxOptions.maximumButtons) {
- var openButton = createElement("button", "button button-opener");
- openButton.innerText = syntaxOptions.buttonsVisible ? _configuration.buttonsCloserText : _configuration.buttonsOpenerText;
- buttons.insertBefore(openButton, buttons.children[0]);
- openButton.onclick = function() {
- var areButtonsVisible = openButton.innerText === _configuration.buttonsCloserText;
- for (var buttonsElementIndex = 0; buttonsElementIndex < buttonsElementsLength; buttonsElementIndex++) {
- buttonsElements[buttonsElementIndex].style.display = areButtonsVisible ? "none" : "inline-block";
- }
- openButton.innerText = areButtonsVisible ? _configuration.buttonsOpenerText : _configuration.buttonsCloserText;
- if (areButtonsVisible) {
- fireCustomTrigger(syntaxOptions.onButtonsClosed);
- } else {
- fireCustomTrigger(syntaxOptions.onButtonsOpened);
- }
+ if (Is.defined(e.className)) {
+ r.className += " " + e.className;
+ }
+ t.push(r);
+ }
+ function renderElementCommentVariables(e, t, n) {
+ const i = t.comment;
+ if (Is.definedString(i)) {
+ const t = e.match(new RegExp(`${i}.*`, "g"));
+ if (t !== null) {
+ const i = t.length;
+ for (let o = 0; o < i; o++) {
+ const i = t[o];
+ const r = `$C{${_cached_Comments_Count.toString()}}`;
+ _cached_Comments[r] = `<span class="comment">${i}</span>`;
+ _cached_Comments_Count++;
+ e = e.replace(i, r);
+ fireCustomTriggerEvent(n.events.onCommentRender, i);
+ }
+ }
+ }
+ return e;
+ }
+ function renderElementMultiLineCommentVariables(e, t, n) {
+ const i = t.multiLineComment;
+ if (Is.definedArray(i) && i.length === 2) {
+ let t = 0;
+ let o = 0;
+ while (t >= 0 && o >= 0) {
+ t = e.indexOf(i[0], o);
+ if (t > -1) {
+ o = e.indexOf(i[1], t + i[0].length);
+ if (o > -1) {
+ const r = e.substring(t, o + i[1].length);
+ const a = r.split("\n");
+ const l = a.length;
+ const s = l === 1 ? "comment" : "multi-line-comment";
+ for (let t = 0; t < l; t++) {
+ const n = `$C{${_cached_Comments_Count.toString()}}`;
+ const i = a[t];
+ _cached_Comments[n] = `<span class="${s}">${i}</span>`;
+ _cached_Comments_Count++;
+ e = e.replace(i, n);
+ }
+ fireCustomTriggerEvent(n.events.onCommentRender, r);
+ }
+ }
+ }
+ }
+ return e;
+ }
+ function renderElementStringPatternVariables(e, t, n) {
+ if (t !== null) {
+ const i = t.length;
+ for (let o = 0; o < i; o++) {
+ const i = t[o];
+ const r = i.split("\n");
+ const a = r.length;
+ const l = a === 1 ? "string" : "multi-line-string";
+ for (let t = 0; t < a; t++) {
+ const n = r[t];
+ const i = `$S{${_cached_Strings_Count.toString()}}`;
+ _cached_Strings[i] = `<span class="${l}">${n}</span>`;
+ _cached_Strings_Count++;
+ e = e.replace(n, i);
+ }
+ fireCustomTriggerEvent(n.events.onStringRender, i);
+ }
+ }
+ return e;
+ }
+ function renderElementKeywords(e, t, n) {
+ const i = Data.getDefaultStringOrArray(t.keywords, []);
+ const o = i.length;
+ const r = t.caseSensitive;
+ const a = getKeywordCasing(t.keywordsCasing);
+ Data.String.sortArrayOfStringByLength(i);
+ for (let l = 0; l < o; l++) {
+ const o = i[l];
+ const s = getDisplayTextTestCasing(o, a);
+ const u = `KW${_cached_Keywords_Count.toString()};`;
+ let c = null;
+ const g = r ? "g" : "gi";
+ const d = new RegExp(getWordRegEx(o, t), g);
+ if (n.highlightKeywords) {
+ if (Is.definedFunction(n.events.onKeywordClicked)) {
+ c = `<span class="keyword-clickable">${s}</span>`;
+ e = e.replace(d, u);
+ } else {
+ c = `<span class="keyword">${s}</span>`;
+ e = e.replace(d, u);
+ }
+ } else {
+ if (Is.definedFunction(n.events.onKeywordClicked)) {
+ c = `<span class="no-highlight-keyword-clickable">${s}</span>`;
+ e = e.replace(d, u);
+ }
+ }
+ _cached_Keywords[u] = c;
+ _cached_Keywords_Count++;
+ fireCustomTriggerEvent(n.events.onKeywordRender, o);
+ }
+ return e;
+ }
+ function replaceMarkUpKeywords(e, t, n) {
+ const i = Data.getDefaultStringOrArray(t.keywords, []);
+ const o = t.caseSensitive;
+ const r = getKeywordCasing(t.keywordsCasing);
+ const a = /(<([^>]+)>)/gi;
+ const l = o ? "g" : "gi";
+ let s = a.exec(e);
+ while (s) {
+ if (s.index === a.lastIndex) {
+ a.lastIndex++;
+ }
+ let o = s[0];
+ o = o.replace("</", "").replace("<", "").replace(">", ""); | ## Incomplete string escaping or encoding
This replaces only the first occurrence of ">".
[Show more details](https://github.com/williamtroup/Syntax.js/security/code-scanning/11) |
Syntax.js | github_2023 | typescript | 46 | williamtroup | github-advanced-security[bot] | @@ -0,0 +1,1425 @@
+/**
+ * Syntax.js
+ *
+ * A lightweight, and easy-to-use, JavaScript library for code syntax highlighting!
+ *
+ * @file syntax.js
+ * @version v3.0.0
+ * @author Bunoon
+ * @license MIT License
+ * @copyright Bunoon 2024
+ */
+
+
+import {
+ type BindingOptions,
+ type CustomButton,
+ type BindingTabContentOptionEvents,
+ type BindingTabContentOptions,
+ type Configuration,
+ type SyntaxLanguage,
+ type ConfigurationText,
+ type ConfigurationEvents,
+ type BindingOptionEvents} from "./ts/type";
+
+import { PublicApi } from "./ts/api";
+import { Constants } from "./ts/constant";
+import { Data } from "./ts/data";
+import { Is } from "./ts/is";
+import { Char, Language, TextCasing } from "./ts/enum";
+import { DomElement } from "./ts/dom";
+
+
+type StringToJson = {
+ parsed: boolean;
+ object: any;
+};
+
+type RenderElementResult = {
+ rendered: boolean;
+ tabContents: HTMLElement;
+ tabTitle: string;
+ tabBindingOptions: BindingTabContentOptions,
+ syntaxLanguage: string
+};
+
+
+( () => {
+ // Variables: Configuration
+ let _configuration: Configuration = {} as Configuration;
+
+ // Variables: Aliases
+ let _aliases_Rules: Record<string, string> = {} as Record<string, string>;
+
+ // Variables: Elements
+ let _elements: HTMLElement[] = [];
+ let _elements_Original: Record<string, string> = {} as Record<string, string>;
+
+ // Variables: Temporary Caching
+ let _cached_Keywords: Record<string, string> = {} as Record<string, string>;
+ let _cached_Keywords_Count: number = 0;
+ let _cached_Values: Record<string, string> = {} as Record<string, string>;
+ let _cached_Values_Count: number = 0;
+ let _cached_Attributes: Record<string, string> = {} as Record<string, string>;
+ let _cached_Attributes_Count: number = 0;
+ let _cached_Strings: Record<string, string> = {} as Record<string, string>;
+ let _cached_Strings_Count: number = 0;
+ let _cached_Comments: Record<string, string> = {} as Record<string, string>;
+ let _cached_Comments_Count: number = 0;
+
+ // Variables: Languages
+ let _languages: Record<string, SyntaxLanguage> = {} as Record<string, SyntaxLanguage>;
+
+
+ /*
+ * ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ * Rendering
+ * ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ */
+
+ function render() : void {
+ const tagTypes: string[] = _configuration.highlightAllDomElementTypes as string[];
+ const tagTypesLength: number = tagTypes.length;
+
+ for ( let tagTypeIndex: number = 0; tagTypeIndex < tagTypesLength; tagTypeIndex++ ) {
+ const domElements: HTMLCollectionOf<Element> = document.getElementsByTagName( tagTypes[ tagTypeIndex ] );
+ const elements: HTMLElement[] = [].slice.call( domElements );
+ const elementsLength: number = elements.length;
+
+ if ( elementsLength > 0 ) {
+ fireCustomTriggerEvent( _configuration.events!.onBeforeRender! );
+ }
+
+ for ( let elementIndex: number = 0; elementIndex < elementsLength; elementIndex++ ) {
+ const element: HTMLElement = elements[ elementIndex ];
+ let elementBreak: boolean = false;
+
+ if ( element.hasAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE ) && element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE )!.toLowerCase() === Language.tabbed ) {
+ const divElements: HTMLElement[] = [].slice.call( element.children );
+ const divElementsLength: number = divElements.length;
+ const tabElements: HTMLElement[] = [];
+ const tabContentElements: HTMLElement[] = [];
+
+ element.removeAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE );
+ element.className = element.className === Char.empty ? "syntax-highlight" : `${element.className} syntax-highlight`;
+ element.innerHTML = Char.empty;
+
+ const codeContainer: HTMLElement = DomElement.create( "div", "code custom-scroll-bars" );
+ element.appendChild( codeContainer );
+
+ const tabs: HTMLElement = DomElement.create( "div", "tabs" );
+ codeContainer.appendChild( tabs );
+
+ for ( let divElementIndex: number = 0; divElementIndex < divElementsLength; divElementIndex++ ) {
+ const renderResult: RenderElementResult = renderElement( divElements[ divElementIndex ], codeContainer );
+
+ if ( !renderResult.rendered ) {
+ elementBreak = true;
+
+ } else {
+ renderTab( tabs, tabElements, tabContentElements, renderResult, divElementIndex, renderResult.tabBindingOptions, renderResult.syntaxLanguage );
+ }
+ }
+
+ } else {
+ if ( !renderElement( element ).rendered ) {
+ elementBreak = true;
+ }
+ }
+
+ if ( elementBreak ) {
+ break;
+ }
+ }
+
+ if ( elementsLength > 0 ) {
+ fireCustomTriggerEvent( _configuration.events!.onAfterRender! );
+ }
+ }
+ }
+
+ function renderTab( tabs: HTMLElement, tabElements: HTMLElement[], tabContentElements: HTMLElement[], renderResult: RenderElementResult, divElementIndex: number, tabBindingOptions: BindingTabContentOptions, syntaxLanguage: string ) : void {
+ const tab: HTMLElement = DomElement.create( "button", "tab" );
+ tabs.appendChild( tab );
+
+ DomElement.setNodeText( tab, renderResult.tabTitle, _configuration );
+
+ tabElements.push( tab );
+ tabContentElements.push( renderResult.tabContents );
+
+ tab.onclick = function() {
+ if ( tab.className !== "tab-active" ) {
+ const tabElementsLength: number = tabElements.length;
+ const tabContentElementsLength: number = tabContentElements.length;
+
+ for ( let tabElementsIndex: number = 0; tabElementsIndex < tabElementsLength; tabElementsIndex++ ) {
+ tabElements[ tabElementsIndex ].className = "tab";
+ }
+
+ for ( let tabContentElementsIndex: number = 0; tabContentElementsIndex < tabContentElementsLength; tabContentElementsIndex++ ) {
+ tabContentElements[ tabContentElementsIndex ].style.display = "none";
+ }
+
+ tab.className = "tab-active";
+ renderResult.tabContents.style.display = "flex";
+
+ if ( Is.definedObject( tabBindingOptions ) ) {
+ fireCustomTriggerEvent( tabBindingOptions.events!.onOpen!, syntaxLanguage );
+ }
+ }
+ };
+
+ if ( divElementIndex > 0 ) {
+ renderResult.tabContents.style.display = "none";
+ } else {
+ tab.className = "tab-active";
+ }
+ }
+
+ function renderElement( element: HTMLElement, codeContainer: HTMLElement = null! ) : RenderElementResult {
+ const result: RenderElementResult = {} as RenderElementResult;
+ result.rendered = true;
+
+ if ( Is.defined( element ) && element.hasAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE ) && ( !element.hasAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS ) || Is.defined( codeContainer ) ) ) {
+ result.syntaxLanguage = element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE )!;
+
+ if ( Is.definedString( result.syntaxLanguage ) ) {
+ const language: SyntaxLanguage = getLanguage( result.syntaxLanguage );
+
+ if ( Is.defined( language ) || result.syntaxLanguage.toLowerCase() === Language.unknown ) {
+ const syntaxOptionsParsed: StringToJson = getObjectFromString( element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS ) );
+ const syntaxButtonsParsed: StringToJson = getObjectFromString( element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_BUTTONS ) );
+
+ if ( syntaxOptionsParsed.parsed ) {
+ if ( element.innerHTML.trim() !== Char.empty ) {
+ let innerHTML: string = element.innerHTML;
+ const syntaxOptions: BindingOptions = getBindingOptions( syntaxOptionsParsed.object );
+ let isPreFormatted: boolean = false;
+ let descriptionText: string = null!;
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onBeforeRenderComplete!, element );
+
+ if ( element.children.length > 0 && element.children[ 0 ].nodeName.toLowerCase() === "pre" ) {
+ innerHTML = element.children[ 0 ].innerHTML;
+ isPreFormatted = true;
+ }
+
+ const innerHTMLCopy: string = innerHTML.trim();
+ let numbers: HTMLElement = null!;
+ let description: HTMLElement = null!;
+ let elementId: string = element.id;
+
+ if ( !Is.definedString( elementId ) ) {
+ elementId = Data.String.newGuid();
+ }
+
+ _elements_Original[ elementId ] = element.innerHTML;
+
+ element.removeAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE );
+ element.removeAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS );
+ element.id = elementId;
+
+ if ( !Is.defined( codeContainer ) ) {
+ element.className = element.className === Char.empty ? "syntax-highlight" : `${element.className} syntax-highlight`;
+ element.innerHTML = Char.empty;
+
+ codeContainer = DomElement.create( "div", "code custom-scroll-bars" );
+ element.appendChild( codeContainer );
+
+ } else {
+ if ( element.hasAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS ) && element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS )!.toLowerCase() !== "true" ) {
+ const syntaxTabOptions: StringToJson = getObjectFromString( element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS ) );
+
+ if ( syntaxTabOptions.parsed && Is.definedObject( syntaxTabOptions.object ) ) {
+ result.tabBindingOptions = getBindingTabContentOptions( syntaxTabOptions.object );
+ descriptionText = result.tabBindingOptions.description!;
+
+ if ( Is.definedString( result.tabBindingOptions.title ) ) {
+ result.tabTitle = result.tabBindingOptions.title!;
+ }
+ }
+
+ } else {
+ result.tabTitle = getFriendlyLanguageName( result.syntaxLanguage );
+ }
+ }
+
+ result.tabContents = DomElement.create( "div", "tab-contents" );
+ codeContainer.appendChild( result.tabContents );
+
+ if ( Is.definedString( descriptionText ) ) {
+ description = DomElement.create( "div", "description" );
+ result.tabContents.appendChild( description );
+
+ DomElement.setNodeText( description, descriptionText, _configuration );
+ }
+
+ if ( syntaxOptions.showLineNumbers ) {
+ numbers = DomElement.create( "div", "numbers" );
+ result.tabContents.appendChild( numbers );
+ }
+
+ const syntax: HTMLElement = DomElement.create( "div", "syntax" );
+ result.tabContents.appendChild( syntax );
+
+ renderElementButtons( syntax, syntaxOptions, result.syntaxLanguage, syntaxButtonsParsed, innerHTMLCopy );
+
+ if ( result.syntaxLanguage.toLowerCase() !== Language.unknown ) {
+ innerHTML = renderHTML( innerHTML, language, syntaxOptions );
+ } else {
+ innerHTML = Data.String.encodeMarkUpCharacters( innerHTML );
+ }
+
+ renderElementCompletedHTML( description, numbers, syntax, innerHTML, syntaxOptions, isPreFormatted );
+ fireCustomTriggerEvent( syntaxOptions.events!.onRenderComplete!, element );
+
+ if ( !Is.defined( result.tabContents ) ) {
+ renderSyntaxCustomTriggers( element, syntaxOptions );
+ } else {
+ renderSyntaxCustomTriggers( result.tabContents, syntaxOptions );
+ }
+
+ _elements.push( element );
+
+ _cached_Keywords = {};
+ _cached_Keywords_Count = 0;
+ _cached_Values = {};
+ _cached_Values_Count = 0;
+ _cached_Attributes = {};
+ _cached_Attributes_Count = 0;
+ _cached_Strings = {};
+ _cached_Strings_Count = 0;
+ _cached_Comments = {};
+ _cached_Comments_Count = 0;
+
+ } else {
+ result.rendered = logError( _configuration.text!.noCodeAvailableToRenderErrorText! );
+ }
+
+ } else {
+ result.rendered = !_configuration.safeMode;
+ }
+
+ } else {
+ result.rendered = logError( _configuration.text!.languageNotSupportedErrorText!.replace( "{{language}}", result.syntaxLanguage ) );
+ }
+
+ } else {
+ result.rendered = logError( _configuration.text!.attributeNotSetErrorText!.replace( "{{attribute_name}}", Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE ) );
+ }
+ }
+
+ return result;
+ }
+
+ function renderSyntaxCustomTriggers( element: HTMLElement, syntaxOptions: BindingOptions ) : void {
+ renderElementClickEvents( element, syntaxOptions.events!.onKeywordClicked!, "keyword-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onKeywordClicked!, "no-highlight-keyword-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onValueClicked!, "value-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onValueClicked!, "no-highlight-value-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onAttributeClicked!, "attribute-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onAttributeClicked!, "no-highlight-attribute-clickable" );
+ }
+
+ function renderHTML( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ if ( !language.isMarkUp ) {
+ innerHTML = Data.String.encodeMarkUpCharacters( innerHTML );
+ }
+
+ if ( syntaxOptions.highlightComments ) {
+ innerHTML = renderElementMultiLineCommentVariables( innerHTML, language, syntaxOptions );
+ innerHTML = renderElementCommentVariables( innerHTML, language, syntaxOptions );
+ }
+
+ if ( syntaxOptions.highlightStrings ) {
+ innerHTML = renderElementStringPatternVariables( innerHTML, innerHTML.match( /"((?:\\.|[^"\\])*)"/g )!, syntaxOptions );
+
+ if ( language.comment !== "'" ) {
+ innerHTML = renderElementStringPatternVariables( innerHTML, innerHTML.match( /'((?:\\.|[^"\\])*)'/g )!, syntaxOptions );
+ }
+ }
+
+ if ( !language.isMarkUp ) {
+ innerHTML = renderElementKeywords( innerHTML, language, syntaxOptions );
+ } else {
+ innerHTML = replaceMarkUpKeywords( innerHTML, language, syntaxOptions );
+ }
+
+ innerHTML = renderElementValues( innerHTML, language, syntaxOptions );
+
+ if ( language.isMarkUp ) {
+ innerHTML = renderElementAttributes( innerHTML, language, syntaxOptions );
+ }
+
+ innerHTML = Data.String.encodeMarkUpCharacters( innerHTML );
+
+ if ( syntaxOptions.highlightComments ) {
+ innerHTML = renderElementCommentsFromVariables( innerHTML, language );
+ }
+
+ if ( syntaxOptions.highlightStrings ) {
+ innerHTML = renderElementStringQuotesFromVariables( innerHTML );
+ }
+
+ innerHTML = renderElementVariables( innerHTML, _cached_Keywords );
+ innerHTML = renderElementVariables( innerHTML, _cached_Values );
+
+ if ( language.isMarkUp ) {
+ innerHTML = renderElementVariables( innerHTML, _cached_Attributes );
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementButtons( syntax: HTMLElement, syntaxOptions: BindingOptions, syntaxLanguage: string, syntaxButtonsParsed: StringToJson, innerHTMLCopy: string ) : void {
+ if ( syntaxOptions.showLanguageLabel || syntaxOptions.showCopyButton || syntaxOptions.showPrintButton || syntaxButtonsParsed.parsed ) {
+ const buttons: HTMLElement = DomElement.create( "div", "buttons" );
+ const buttonsElements: HTMLElement[] = [];
+
+ syntax.appendChild( buttons );
+
+ if ( syntaxButtonsParsed.parsed && Is.definedArray( syntaxButtonsParsed.object ) ) {
+ const customButtons: CustomButton[] = syntaxButtonsParsed.object;
+ const customButtonsLength: number = customButtons.length;
+
+ for ( let customButtonsIndex: number = 0; customButtonsIndex < customButtonsLength; customButtonsIndex++ ) {
+ const customButton: CustomButton = customButtons[ customButtonsIndex ];
+
+ if ( Is.defined( customButton.text ) && Is.definedFunction( customButton.events!.onClick! ) ) {
+ renderElementButton( customButton, buttonsElements, buttons, innerHTMLCopy, syntaxOptions );
+ }
+ }
+ }
+
+ if ( syntaxOptions.showCopyButton ) {
+ const copyButton: HTMLButtonElement = DomElement.create( "button", "button" ) as HTMLButtonElement;
+ copyButton.style.display = syntaxOptions.buttonsVisible ? "inline-block" : "none";
+ buttons.appendChild( copyButton );
+
+ DomElement.setNodeText( copyButton, _configuration.text!.copyButtonText!, _configuration );
+
+ copyButton.onclick = function() {
+ navigator.clipboard.writeText( innerHTMLCopy );
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onCopy!, innerHTMLCopy );
+ };
+
+ buttonsElements.push( copyButton );
+ }
+
+ if ( syntaxOptions.showPrintButton ) {
+ const printButton: HTMLButtonElement = DomElement.create( "button", "button" ) as HTMLButtonElement;
+ printButton.style.display = syntaxOptions.buttonsVisible ? "inline-block" : "none";
+ buttons.appendChild( printButton );
+
+ DomElement.setNodeText( printButton, _configuration.text!.printButtonText!, _configuration );
+
+ printButton.onclick = function() {
+ const newWindow: WindowProxy = window.open( Char.empty, "PRINT", "height=400,width=600" )!;
+ const newElementForPrint: HTMLElement = syntax.cloneNode( true ) as HTMLElement;
+ const newTitleElement: HTMLElement = DomElement.create( "div" );
+
+ newElementForPrint.removeChild( newElementForPrint.children[ 0 ] );
+ newTitleElement.innerHTML = getFriendlyLanguageName( syntaxLanguage );
+
+ newWindow.document.write( "<html>" );
+ newWindow.document.write( "<head>" );
+ newWindow.document.write( "<title>" );
+ newWindow.document.write( newTitleElement.innerHTML );
+ newWindow.document.write( "</title>" );
+ newWindow.document.write( "</head>" );
+ newWindow.document.write( "<body>" );
+ newWindow.document.write( "<code>" );
+ newWindow.document.write( "<pre>" );
+ newWindow.document.write( newElementForPrint.innerHTML );
+ newWindow.document.write( "</pre>" );
+ newWindow.document.write( "</code>" );
+ newWindow.document.write( "</body>" );
+ newWindow.document.write( "</html>" );
+
+ newWindow.document.close();
+ newWindow.focus();
+ newWindow.print();
+ newWindow.close();
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onPrint!, newElementForPrint.innerHTML );
+ };
+
+ buttonsElements.push( printButton );
+ }
+
+ if ( syntaxOptions.showLanguageLabel ) {
+ const languageLabel: HTMLElement = DomElement.create( "div", "language-label" );
+ buttons.appendChild( languageLabel );
+
+ DomElement.setNodeText( languageLabel, getFriendlyLanguageName( syntaxLanguage, syntaxOptions.languageLabelCasing! ), _configuration );
+ }
+
+ const buttonsElementsLength: number = buttonsElements.length;
+
+ if ( buttonsElementsLength > syntaxOptions.maximumButtons! ) {
+ const openButton: HTMLButtonElement = DomElement.create( "button", "button button-opener" ) as HTMLButtonElement;
+ openButton.innerText = syntaxOptions.buttonsVisible ? _configuration.text!.buttonsCloserText! : _configuration.text!.buttonsOpenerText!;
+ buttons.insertBefore( openButton, buttons.children[ 0 ] );
+
+ openButton.onclick = function() {
+ const areButtonsVisible: boolean = openButton.innerText === _configuration.text!.buttonsCloserText;
+
+ for ( let buttonsElementIndex: number = 0; buttonsElementIndex < buttonsElementsLength; buttonsElementIndex++ ) {
+ buttonsElements[ buttonsElementIndex ].style.display = areButtonsVisible ? "none" : "inline-block";
+ }
+
+ openButton.innerText = areButtonsVisible ? _configuration.text!.buttonsOpenerText! : _configuration.text!.buttonsCloserText!;
+
+ if ( areButtonsVisible ) {
+ fireCustomTriggerEvent( syntaxOptions.events!.onButtonsClosed! );
+ } else {
+ fireCustomTriggerEvent( syntaxOptions.events!.onButtonsOpened! );
+ }
+ };
+
+ } else if ( !syntaxOptions.buttonsVisible && buttonsElementsLength <= syntaxOptions.maximumButtons! ) {
+ for ( let buttonsElementIndex: number = 0; buttonsElementIndex < buttonsElementsLength; buttonsElementIndex++ ) {
+ buttonsElements[ buttonsElementIndex ].style.display = "inline-block";
+ }
+ }
+ }
+ }
+
+ function renderElementButton( customButton: CustomButton, buttonsElements: HTMLElement[], buttons: HTMLElement, innerHTMLCopy: string, syntaxOptions: BindingOptions ) : void {
+ const newCustomButton: HTMLButtonElement = DomElement.create( "button", "button" ) as HTMLButtonElement;
+ newCustomButton.style.display = syntaxOptions.buttonsVisible ? "inline-block" : "none";
+ buttons.appendChild( newCustomButton );
+
+ DomElement.setNodeText( newCustomButton, customButton.text!, _configuration );
+
+ newCustomButton.onclick = function() {
+ customButton.events!.onClick!( innerHTMLCopy );
+ };
+
+ if ( Is.defined( customButton.className ) ) {
+ newCustomButton.className += Char.space + customButton.className;
+ }
+
+ buttonsElements.push( newCustomButton );
+ }
+
+ function renderElementCommentVariables( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const lookup: string = language.comment!;
+
+ if ( Is.definedString( lookup ) ) {
+ const patternItems: RegExpMatchArray = innerHTML.match( new RegExp( `${lookup}.*`, "g" ) )!;
+
+ if ( patternItems !== null ) {
+ const patternItemsLength: number = patternItems.length;
+
+ for ( let patternItemsIndex: number = 0; patternItemsIndex < patternItemsLength; patternItemsIndex++ ) {
+ const comment: string = patternItems[ patternItemsIndex ];
+ const commentVariable: string = `\$C{${_cached_Comments_Count.toString()}}`;
+
+ _cached_Comments[ commentVariable ] = `<span class=\"comment\">${comment}</span>`;
+ _cached_Comments_Count++;
+
+ innerHTML = innerHTML.replace( comment, commentVariable );
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onCommentRender!, comment );
+ }
+ }
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementMultiLineCommentVariables( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const multiLineComment: string[] = language.multiLineComment as string[];
+
+ if ( Is.definedArray( multiLineComment ) && multiLineComment.length === 2 ) {
+ let startIndex: number = 0;
+ let endIndex: number = 0;
+
+ while ( startIndex >= 0 && endIndex >= 0 ) {
+ startIndex = innerHTML.indexOf( multiLineComment[ 0 ], endIndex );
+
+ if ( startIndex > -1 ) {
+ endIndex = innerHTML.indexOf( multiLineComment[ 1 ], startIndex + multiLineComment[ 0 ].length );
+
+ if ( endIndex > -1 ) {
+ const comment: string = innerHTML.substring( startIndex, endIndex + multiLineComment[ 1 ].length );
+ const commentLines: string[] = comment.split( Char.newLine );
+ const commentLinesLength: number = commentLines.length;
+ const commentCssClass: string = commentLinesLength === 1 ? "comment" : "multi-line-comment";
+
+ for ( let commentLineIndex: number = 0; commentLineIndex < commentLinesLength; commentLineIndex++ ) {
+ const commentVariable: string = `\$C{${_cached_Comments_Count.toString()}}`;
+ const commentLine: string = commentLines[ commentLineIndex ];
+
+ _cached_Comments[ commentVariable ] = `<span class=\"${commentCssClass}\">${commentLine}</span>`;
+ _cached_Comments_Count++;
+
+ innerHTML = innerHTML.replace( commentLine, commentVariable );
+ }
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onCommentRender!, comment );
+ }
+ }
+ }
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementStringPatternVariables( innerHTML: string, patternItems: string[], syntaxOptions: BindingOptions ) : string {
+ if ( patternItems !== null ) {
+ const patternItemsLength: number = patternItems.length;
+
+ for ( let patternItemsIndex: number = 0; patternItemsIndex < patternItemsLength; patternItemsIndex++ ) {
+ const string: string = patternItems[ patternItemsIndex ];
+ const stringLines: string[] = string.split( Char.newLine );
+ const stringLinesLength: number = stringLines.length;
+ const stringCssClass: string = stringLinesLength === 1 ? "string" : "multi-line-string";
+
+ for ( let stringLineIndex: number = 0; stringLineIndex < stringLinesLength; stringLineIndex++ ) {
+ const stringLine: string = stringLines[ stringLineIndex ];
+ const stringVariable: string = `\$S{${_cached_Strings_Count.toString()}}`;
+
+ _cached_Strings[ stringVariable ] = `<span class=\"${stringCssClass}\">${stringLine}</span>`;
+ _cached_Strings_Count++;
+
+ innerHTML = innerHTML.replace( stringLine, stringVariable );
+ }
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onStringRender!, string );
+ }
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementKeywords( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const keywords: string[] = Data.getDefaultStringOrArray( language.keywords, [] );
+ const keywordsLength: number = keywords.length;
+ const caseSensitive: boolean = language.caseSensitive!;
+ const keywordsCasing: string = getKeywordCasing( language.keywordsCasing! );
+
+ Data.String.sortArrayOfStringByLength( keywords );
+
+ for ( let keywordIndex: number = 0; keywordIndex < keywordsLength; keywordIndex++ ) {
+ const keyword: string = keywords[ keywordIndex ];
+ const keywordDisplay: string = getDisplayTextTestCasing( keyword, keywordsCasing );
+ const keywordVariable: string = `KW${_cached_Keywords_Count.toString()};`;
+ let keywordReplacement: string = null!;
+ const regExFlags: string = caseSensitive ? "g" : "gi";
+ const regEx: RegExp = new RegExp( getWordRegEx( keyword, language ), regExFlags );
+
+ if ( syntaxOptions.highlightKeywords ) {
+ if ( Is.definedFunction( syntaxOptions.events!.onKeywordClicked ) ) {
+ keywordReplacement = `<span class=\"keyword-clickable\">${keywordDisplay}</span>`;
+ innerHTML = innerHTML.replace( regEx, keywordVariable );
+ } else {
+ keywordReplacement = `<span class=\"keyword\">${keywordDisplay}</span>`;
+ innerHTML = innerHTML.replace( regEx, keywordVariable );
+ }
+
+ } else {
+ if ( Is.definedFunction( syntaxOptions.events!.onKeywordClicked ) ) {
+ keywordReplacement = `<span class=\"no-highlight-keyword-clickable\">${keywordDisplay}</span>`;
+ innerHTML = innerHTML.replace( regEx, keywordVariable );
+ }
+ }
+
+ _cached_Keywords[ keywordVariable ] = keywordReplacement;
+ _cached_Keywords_Count++;
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onKeywordRender!, keyword );
+ }
+
+ return innerHTML;
+ }
+
+ function replaceMarkUpKeywords( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const keywords: string[] = Data.getDefaultStringOrArray( language.keywords, [] );
+ const caseSensitive: boolean = language.caseSensitive!;
+ const keywordsCasing: string = getKeywordCasing( language.keywordsCasing! );
+
+ const regEx: RegExp = /(<([^>]+)>)/ig;
+ const regExFlags: string = caseSensitive ? "g" : "gi";
+ let regExResult: RegExpExecArray = regEx.exec( innerHTML )!;
+
+ while ( regExResult ) {
+ if ( regExResult.index === regEx.lastIndex ) {
+ regEx.lastIndex++;
+ }
+
+ let tag: string = regExResult[ 0 ];
+ tag = tag.replace( "</", Char.empty ).replace( "<", Char.empty ).replace( ">", Char.empty ); | ## Incomplete string escaping or encoding
This replaces only the first occurrence of ">".
[Show more details](https://github.com/williamtroup/Syntax.js/security/code-scanning/13) |
Syntax.js | github_2023 | typescript | 46 | williamtroup | github-advanced-security[bot] | @@ -0,0 +1,1425 @@
+/**
+ * Syntax.js
+ *
+ * A lightweight, and easy-to-use, JavaScript library for code syntax highlighting!
+ *
+ * @file syntax.js
+ * @version v3.0.0
+ * @author Bunoon
+ * @license MIT License
+ * @copyright Bunoon 2024
+ */
+
+
+import {
+ type BindingOptions,
+ type CustomButton,
+ type BindingTabContentOptionEvents,
+ type BindingTabContentOptions,
+ type Configuration,
+ type SyntaxLanguage,
+ type ConfigurationText,
+ type ConfigurationEvents,
+ type BindingOptionEvents} from "./ts/type";
+
+import { PublicApi } from "./ts/api";
+import { Constants } from "./ts/constant";
+import { Data } from "./ts/data";
+import { Is } from "./ts/is";
+import { Char, Language, TextCasing } from "./ts/enum";
+import { DomElement } from "./ts/dom";
+
+
+type StringToJson = {
+ parsed: boolean;
+ object: any;
+};
+
+type RenderElementResult = {
+ rendered: boolean;
+ tabContents: HTMLElement;
+ tabTitle: string;
+ tabBindingOptions: BindingTabContentOptions,
+ syntaxLanguage: string
+};
+
+
+( () => {
+ // Variables: Configuration
+ let _configuration: Configuration = {} as Configuration;
+
+ // Variables: Aliases
+ let _aliases_Rules: Record<string, string> = {} as Record<string, string>;
+
+ // Variables: Elements
+ let _elements: HTMLElement[] = [];
+ let _elements_Original: Record<string, string> = {} as Record<string, string>;
+
+ // Variables: Temporary Caching
+ let _cached_Keywords: Record<string, string> = {} as Record<string, string>;
+ let _cached_Keywords_Count: number = 0;
+ let _cached_Values: Record<string, string> = {} as Record<string, string>;
+ let _cached_Values_Count: number = 0;
+ let _cached_Attributes: Record<string, string> = {} as Record<string, string>;
+ let _cached_Attributes_Count: number = 0;
+ let _cached_Strings: Record<string, string> = {} as Record<string, string>;
+ let _cached_Strings_Count: number = 0;
+ let _cached_Comments: Record<string, string> = {} as Record<string, string>;
+ let _cached_Comments_Count: number = 0;
+
+ // Variables: Languages
+ let _languages: Record<string, SyntaxLanguage> = {} as Record<string, SyntaxLanguage>;
+
+
+ /*
+ * ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ * Rendering
+ * ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ */
+
+ function render() : void {
+ const tagTypes: string[] = _configuration.highlightAllDomElementTypes as string[];
+ const tagTypesLength: number = tagTypes.length;
+
+ for ( let tagTypeIndex: number = 0; tagTypeIndex < tagTypesLength; tagTypeIndex++ ) {
+ const domElements: HTMLCollectionOf<Element> = document.getElementsByTagName( tagTypes[ tagTypeIndex ] );
+ const elements: HTMLElement[] = [].slice.call( domElements );
+ const elementsLength: number = elements.length;
+
+ if ( elementsLength > 0 ) {
+ fireCustomTriggerEvent( _configuration.events!.onBeforeRender! );
+ }
+
+ for ( let elementIndex: number = 0; elementIndex < elementsLength; elementIndex++ ) {
+ const element: HTMLElement = elements[ elementIndex ];
+ let elementBreak: boolean = false;
+
+ if ( element.hasAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE ) && element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE )!.toLowerCase() === Language.tabbed ) {
+ const divElements: HTMLElement[] = [].slice.call( element.children );
+ const divElementsLength: number = divElements.length;
+ const tabElements: HTMLElement[] = [];
+ const tabContentElements: HTMLElement[] = [];
+
+ element.removeAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE );
+ element.className = element.className === Char.empty ? "syntax-highlight" : `${element.className} syntax-highlight`;
+ element.innerHTML = Char.empty;
+
+ const codeContainer: HTMLElement = DomElement.create( "div", "code custom-scroll-bars" );
+ element.appendChild( codeContainer );
+
+ const tabs: HTMLElement = DomElement.create( "div", "tabs" );
+ codeContainer.appendChild( tabs );
+
+ for ( let divElementIndex: number = 0; divElementIndex < divElementsLength; divElementIndex++ ) {
+ const renderResult: RenderElementResult = renderElement( divElements[ divElementIndex ], codeContainer );
+
+ if ( !renderResult.rendered ) {
+ elementBreak = true;
+
+ } else {
+ renderTab( tabs, tabElements, tabContentElements, renderResult, divElementIndex, renderResult.tabBindingOptions, renderResult.syntaxLanguage );
+ }
+ }
+
+ } else {
+ if ( !renderElement( element ).rendered ) {
+ elementBreak = true;
+ }
+ }
+
+ if ( elementBreak ) {
+ break;
+ }
+ }
+
+ if ( elementsLength > 0 ) {
+ fireCustomTriggerEvent( _configuration.events!.onAfterRender! );
+ }
+ }
+ }
+
+ function renderTab( tabs: HTMLElement, tabElements: HTMLElement[], tabContentElements: HTMLElement[], renderResult: RenderElementResult, divElementIndex: number, tabBindingOptions: BindingTabContentOptions, syntaxLanguage: string ) : void {
+ const tab: HTMLElement = DomElement.create( "button", "tab" );
+ tabs.appendChild( tab );
+
+ DomElement.setNodeText( tab, renderResult.tabTitle, _configuration );
+
+ tabElements.push( tab );
+ tabContentElements.push( renderResult.tabContents );
+
+ tab.onclick = function() {
+ if ( tab.className !== "tab-active" ) {
+ const tabElementsLength: number = tabElements.length;
+ const tabContentElementsLength: number = tabContentElements.length;
+
+ for ( let tabElementsIndex: number = 0; tabElementsIndex < tabElementsLength; tabElementsIndex++ ) {
+ tabElements[ tabElementsIndex ].className = "tab";
+ }
+
+ for ( let tabContentElementsIndex: number = 0; tabContentElementsIndex < tabContentElementsLength; tabContentElementsIndex++ ) {
+ tabContentElements[ tabContentElementsIndex ].style.display = "none";
+ }
+
+ tab.className = "tab-active";
+ renderResult.tabContents.style.display = "flex";
+
+ if ( Is.definedObject( tabBindingOptions ) ) {
+ fireCustomTriggerEvent( tabBindingOptions.events!.onOpen!, syntaxLanguage );
+ }
+ }
+ };
+
+ if ( divElementIndex > 0 ) {
+ renderResult.tabContents.style.display = "none";
+ } else {
+ tab.className = "tab-active";
+ }
+ }
+
+ function renderElement( element: HTMLElement, codeContainer: HTMLElement = null! ) : RenderElementResult {
+ const result: RenderElementResult = {} as RenderElementResult;
+ result.rendered = true;
+
+ if ( Is.defined( element ) && element.hasAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE ) && ( !element.hasAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS ) || Is.defined( codeContainer ) ) ) {
+ result.syntaxLanguage = element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE )!;
+
+ if ( Is.definedString( result.syntaxLanguage ) ) {
+ const language: SyntaxLanguage = getLanguage( result.syntaxLanguage );
+
+ if ( Is.defined( language ) || result.syntaxLanguage.toLowerCase() === Language.unknown ) {
+ const syntaxOptionsParsed: StringToJson = getObjectFromString( element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS ) );
+ const syntaxButtonsParsed: StringToJson = getObjectFromString( element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_BUTTONS ) );
+
+ if ( syntaxOptionsParsed.parsed ) {
+ if ( element.innerHTML.trim() !== Char.empty ) {
+ let innerHTML: string = element.innerHTML;
+ const syntaxOptions: BindingOptions = getBindingOptions( syntaxOptionsParsed.object );
+ let isPreFormatted: boolean = false;
+ let descriptionText: string = null!;
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onBeforeRenderComplete!, element );
+
+ if ( element.children.length > 0 && element.children[ 0 ].nodeName.toLowerCase() === "pre" ) {
+ innerHTML = element.children[ 0 ].innerHTML;
+ isPreFormatted = true;
+ }
+
+ const innerHTMLCopy: string = innerHTML.trim();
+ let numbers: HTMLElement = null!;
+ let description: HTMLElement = null!;
+ let elementId: string = element.id;
+
+ if ( !Is.definedString( elementId ) ) {
+ elementId = Data.String.newGuid();
+ }
+
+ _elements_Original[ elementId ] = element.innerHTML;
+
+ element.removeAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE );
+ element.removeAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_OPTIONS );
+ element.id = elementId;
+
+ if ( !Is.defined( codeContainer ) ) {
+ element.className = element.className === Char.empty ? "syntax-highlight" : `${element.className} syntax-highlight`;
+ element.innerHTML = Char.empty;
+
+ codeContainer = DomElement.create( "div", "code custom-scroll-bars" );
+ element.appendChild( codeContainer );
+
+ } else {
+ if ( element.hasAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS ) && element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS )!.toLowerCase() !== "true" ) {
+ const syntaxTabOptions: StringToJson = getObjectFromString( element.getAttribute( Constants.SYNTAX_JS_ATTRIBUTE_NAME_TAB_CONTENTS ) );
+
+ if ( syntaxTabOptions.parsed && Is.definedObject( syntaxTabOptions.object ) ) {
+ result.tabBindingOptions = getBindingTabContentOptions( syntaxTabOptions.object );
+ descriptionText = result.tabBindingOptions.description!;
+
+ if ( Is.definedString( result.tabBindingOptions.title ) ) {
+ result.tabTitle = result.tabBindingOptions.title!;
+ }
+ }
+
+ } else {
+ result.tabTitle = getFriendlyLanguageName( result.syntaxLanguage );
+ }
+ }
+
+ result.tabContents = DomElement.create( "div", "tab-contents" );
+ codeContainer.appendChild( result.tabContents );
+
+ if ( Is.definedString( descriptionText ) ) {
+ description = DomElement.create( "div", "description" );
+ result.tabContents.appendChild( description );
+
+ DomElement.setNodeText( description, descriptionText, _configuration );
+ }
+
+ if ( syntaxOptions.showLineNumbers ) {
+ numbers = DomElement.create( "div", "numbers" );
+ result.tabContents.appendChild( numbers );
+ }
+
+ const syntax: HTMLElement = DomElement.create( "div", "syntax" );
+ result.tabContents.appendChild( syntax );
+
+ renderElementButtons( syntax, syntaxOptions, result.syntaxLanguage, syntaxButtonsParsed, innerHTMLCopy );
+
+ if ( result.syntaxLanguage.toLowerCase() !== Language.unknown ) {
+ innerHTML = renderHTML( innerHTML, language, syntaxOptions );
+ } else {
+ innerHTML = Data.String.encodeMarkUpCharacters( innerHTML );
+ }
+
+ renderElementCompletedHTML( description, numbers, syntax, innerHTML, syntaxOptions, isPreFormatted );
+ fireCustomTriggerEvent( syntaxOptions.events!.onRenderComplete!, element );
+
+ if ( !Is.defined( result.tabContents ) ) {
+ renderSyntaxCustomTriggers( element, syntaxOptions );
+ } else {
+ renderSyntaxCustomTriggers( result.tabContents, syntaxOptions );
+ }
+
+ _elements.push( element );
+
+ _cached_Keywords = {};
+ _cached_Keywords_Count = 0;
+ _cached_Values = {};
+ _cached_Values_Count = 0;
+ _cached_Attributes = {};
+ _cached_Attributes_Count = 0;
+ _cached_Strings = {};
+ _cached_Strings_Count = 0;
+ _cached_Comments = {};
+ _cached_Comments_Count = 0;
+
+ } else {
+ result.rendered = logError( _configuration.text!.noCodeAvailableToRenderErrorText! );
+ }
+
+ } else {
+ result.rendered = !_configuration.safeMode;
+ }
+
+ } else {
+ result.rendered = logError( _configuration.text!.languageNotSupportedErrorText!.replace( "{{language}}", result.syntaxLanguage ) );
+ }
+
+ } else {
+ result.rendered = logError( _configuration.text!.attributeNotSetErrorText!.replace( "{{attribute_name}}", Constants.SYNTAX_JS_ATTRIBUTE_NAME_LANGUAGE ) );
+ }
+ }
+
+ return result;
+ }
+
+ function renderSyntaxCustomTriggers( element: HTMLElement, syntaxOptions: BindingOptions ) : void {
+ renderElementClickEvents( element, syntaxOptions.events!.onKeywordClicked!, "keyword-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onKeywordClicked!, "no-highlight-keyword-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onValueClicked!, "value-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onValueClicked!, "no-highlight-value-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onAttributeClicked!, "attribute-clickable" );
+ renderElementClickEvents( element, syntaxOptions.events!.onAttributeClicked!, "no-highlight-attribute-clickable" );
+ }
+
+ function renderHTML( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ if ( !language.isMarkUp ) {
+ innerHTML = Data.String.encodeMarkUpCharacters( innerHTML );
+ }
+
+ if ( syntaxOptions.highlightComments ) {
+ innerHTML = renderElementMultiLineCommentVariables( innerHTML, language, syntaxOptions );
+ innerHTML = renderElementCommentVariables( innerHTML, language, syntaxOptions );
+ }
+
+ if ( syntaxOptions.highlightStrings ) {
+ innerHTML = renderElementStringPatternVariables( innerHTML, innerHTML.match( /"((?:\\.|[^"\\])*)"/g )!, syntaxOptions );
+
+ if ( language.comment !== "'" ) {
+ innerHTML = renderElementStringPatternVariables( innerHTML, innerHTML.match( /'((?:\\.|[^"\\])*)'/g )!, syntaxOptions );
+ }
+ }
+
+ if ( !language.isMarkUp ) {
+ innerHTML = renderElementKeywords( innerHTML, language, syntaxOptions );
+ } else {
+ innerHTML = replaceMarkUpKeywords( innerHTML, language, syntaxOptions );
+ }
+
+ innerHTML = renderElementValues( innerHTML, language, syntaxOptions );
+
+ if ( language.isMarkUp ) {
+ innerHTML = renderElementAttributes( innerHTML, language, syntaxOptions );
+ }
+
+ innerHTML = Data.String.encodeMarkUpCharacters( innerHTML );
+
+ if ( syntaxOptions.highlightComments ) {
+ innerHTML = renderElementCommentsFromVariables( innerHTML, language );
+ }
+
+ if ( syntaxOptions.highlightStrings ) {
+ innerHTML = renderElementStringQuotesFromVariables( innerHTML );
+ }
+
+ innerHTML = renderElementVariables( innerHTML, _cached_Keywords );
+ innerHTML = renderElementVariables( innerHTML, _cached_Values );
+
+ if ( language.isMarkUp ) {
+ innerHTML = renderElementVariables( innerHTML, _cached_Attributes );
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementButtons( syntax: HTMLElement, syntaxOptions: BindingOptions, syntaxLanguage: string, syntaxButtonsParsed: StringToJson, innerHTMLCopy: string ) : void {
+ if ( syntaxOptions.showLanguageLabel || syntaxOptions.showCopyButton || syntaxOptions.showPrintButton || syntaxButtonsParsed.parsed ) {
+ const buttons: HTMLElement = DomElement.create( "div", "buttons" );
+ const buttonsElements: HTMLElement[] = [];
+
+ syntax.appendChild( buttons );
+
+ if ( syntaxButtonsParsed.parsed && Is.definedArray( syntaxButtonsParsed.object ) ) {
+ const customButtons: CustomButton[] = syntaxButtonsParsed.object;
+ const customButtonsLength: number = customButtons.length;
+
+ for ( let customButtonsIndex: number = 0; customButtonsIndex < customButtonsLength; customButtonsIndex++ ) {
+ const customButton: CustomButton = customButtons[ customButtonsIndex ];
+
+ if ( Is.defined( customButton.text ) && Is.definedFunction( customButton.events!.onClick! ) ) {
+ renderElementButton( customButton, buttonsElements, buttons, innerHTMLCopy, syntaxOptions );
+ }
+ }
+ }
+
+ if ( syntaxOptions.showCopyButton ) {
+ const copyButton: HTMLButtonElement = DomElement.create( "button", "button" ) as HTMLButtonElement;
+ copyButton.style.display = syntaxOptions.buttonsVisible ? "inline-block" : "none";
+ buttons.appendChild( copyButton );
+
+ DomElement.setNodeText( copyButton, _configuration.text!.copyButtonText!, _configuration );
+
+ copyButton.onclick = function() {
+ navigator.clipboard.writeText( innerHTMLCopy );
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onCopy!, innerHTMLCopy );
+ };
+
+ buttonsElements.push( copyButton );
+ }
+
+ if ( syntaxOptions.showPrintButton ) {
+ const printButton: HTMLButtonElement = DomElement.create( "button", "button" ) as HTMLButtonElement;
+ printButton.style.display = syntaxOptions.buttonsVisible ? "inline-block" : "none";
+ buttons.appendChild( printButton );
+
+ DomElement.setNodeText( printButton, _configuration.text!.printButtonText!, _configuration );
+
+ printButton.onclick = function() {
+ const newWindow: WindowProxy = window.open( Char.empty, "PRINT", "height=400,width=600" )!;
+ const newElementForPrint: HTMLElement = syntax.cloneNode( true ) as HTMLElement;
+ const newTitleElement: HTMLElement = DomElement.create( "div" );
+
+ newElementForPrint.removeChild( newElementForPrint.children[ 0 ] );
+ newTitleElement.innerHTML = getFriendlyLanguageName( syntaxLanguage );
+
+ newWindow.document.write( "<html>" );
+ newWindow.document.write( "<head>" );
+ newWindow.document.write( "<title>" );
+ newWindow.document.write( newTitleElement.innerHTML );
+ newWindow.document.write( "</title>" );
+ newWindow.document.write( "</head>" );
+ newWindow.document.write( "<body>" );
+ newWindow.document.write( "<code>" );
+ newWindow.document.write( "<pre>" );
+ newWindow.document.write( newElementForPrint.innerHTML );
+ newWindow.document.write( "</pre>" );
+ newWindow.document.write( "</code>" );
+ newWindow.document.write( "</body>" );
+ newWindow.document.write( "</html>" );
+
+ newWindow.document.close();
+ newWindow.focus();
+ newWindow.print();
+ newWindow.close();
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onPrint!, newElementForPrint.innerHTML );
+ };
+
+ buttonsElements.push( printButton );
+ }
+
+ if ( syntaxOptions.showLanguageLabel ) {
+ const languageLabel: HTMLElement = DomElement.create( "div", "language-label" );
+ buttons.appendChild( languageLabel );
+
+ DomElement.setNodeText( languageLabel, getFriendlyLanguageName( syntaxLanguage, syntaxOptions.languageLabelCasing! ), _configuration );
+ }
+
+ const buttonsElementsLength: number = buttonsElements.length;
+
+ if ( buttonsElementsLength > syntaxOptions.maximumButtons! ) {
+ const openButton: HTMLButtonElement = DomElement.create( "button", "button button-opener" ) as HTMLButtonElement;
+ openButton.innerText = syntaxOptions.buttonsVisible ? _configuration.text!.buttonsCloserText! : _configuration.text!.buttonsOpenerText!;
+ buttons.insertBefore( openButton, buttons.children[ 0 ] );
+
+ openButton.onclick = function() {
+ const areButtonsVisible: boolean = openButton.innerText === _configuration.text!.buttonsCloserText;
+
+ for ( let buttonsElementIndex: number = 0; buttonsElementIndex < buttonsElementsLength; buttonsElementIndex++ ) {
+ buttonsElements[ buttonsElementIndex ].style.display = areButtonsVisible ? "none" : "inline-block";
+ }
+
+ openButton.innerText = areButtonsVisible ? _configuration.text!.buttonsOpenerText! : _configuration.text!.buttonsCloserText!;
+
+ if ( areButtonsVisible ) {
+ fireCustomTriggerEvent( syntaxOptions.events!.onButtonsClosed! );
+ } else {
+ fireCustomTriggerEvent( syntaxOptions.events!.onButtonsOpened! );
+ }
+ };
+
+ } else if ( !syntaxOptions.buttonsVisible && buttonsElementsLength <= syntaxOptions.maximumButtons! ) {
+ for ( let buttonsElementIndex: number = 0; buttonsElementIndex < buttonsElementsLength; buttonsElementIndex++ ) {
+ buttonsElements[ buttonsElementIndex ].style.display = "inline-block";
+ }
+ }
+ }
+ }
+
+ function renderElementButton( customButton: CustomButton, buttonsElements: HTMLElement[], buttons: HTMLElement, innerHTMLCopy: string, syntaxOptions: BindingOptions ) : void {
+ const newCustomButton: HTMLButtonElement = DomElement.create( "button", "button" ) as HTMLButtonElement;
+ newCustomButton.style.display = syntaxOptions.buttonsVisible ? "inline-block" : "none";
+ buttons.appendChild( newCustomButton );
+
+ DomElement.setNodeText( newCustomButton, customButton.text!, _configuration );
+
+ newCustomButton.onclick = function() {
+ customButton.events!.onClick!( innerHTMLCopy );
+ };
+
+ if ( Is.defined( customButton.className ) ) {
+ newCustomButton.className += Char.space + customButton.className;
+ }
+
+ buttonsElements.push( newCustomButton );
+ }
+
+ function renderElementCommentVariables( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const lookup: string = language.comment!;
+
+ if ( Is.definedString( lookup ) ) {
+ const patternItems: RegExpMatchArray = innerHTML.match( new RegExp( `${lookup}.*`, "g" ) )!;
+
+ if ( patternItems !== null ) {
+ const patternItemsLength: number = patternItems.length;
+
+ for ( let patternItemsIndex: number = 0; patternItemsIndex < patternItemsLength; patternItemsIndex++ ) {
+ const comment: string = patternItems[ patternItemsIndex ];
+ const commentVariable: string = `\$C{${_cached_Comments_Count.toString()}}`;
+
+ _cached_Comments[ commentVariable ] = `<span class=\"comment\">${comment}</span>`;
+ _cached_Comments_Count++;
+
+ innerHTML = innerHTML.replace( comment, commentVariable );
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onCommentRender!, comment );
+ }
+ }
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementMultiLineCommentVariables( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const multiLineComment: string[] = language.multiLineComment as string[];
+
+ if ( Is.definedArray( multiLineComment ) && multiLineComment.length === 2 ) {
+ let startIndex: number = 0;
+ let endIndex: number = 0;
+
+ while ( startIndex >= 0 && endIndex >= 0 ) {
+ startIndex = innerHTML.indexOf( multiLineComment[ 0 ], endIndex );
+
+ if ( startIndex > -1 ) {
+ endIndex = innerHTML.indexOf( multiLineComment[ 1 ], startIndex + multiLineComment[ 0 ].length );
+
+ if ( endIndex > -1 ) {
+ const comment: string = innerHTML.substring( startIndex, endIndex + multiLineComment[ 1 ].length );
+ const commentLines: string[] = comment.split( Char.newLine );
+ const commentLinesLength: number = commentLines.length;
+ const commentCssClass: string = commentLinesLength === 1 ? "comment" : "multi-line-comment";
+
+ for ( let commentLineIndex: number = 0; commentLineIndex < commentLinesLength; commentLineIndex++ ) {
+ const commentVariable: string = `\$C{${_cached_Comments_Count.toString()}}`;
+ const commentLine: string = commentLines[ commentLineIndex ];
+
+ _cached_Comments[ commentVariable ] = `<span class=\"${commentCssClass}\">${commentLine}</span>`;
+ _cached_Comments_Count++;
+
+ innerHTML = innerHTML.replace( commentLine, commentVariable );
+ }
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onCommentRender!, comment );
+ }
+ }
+ }
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementStringPatternVariables( innerHTML: string, patternItems: string[], syntaxOptions: BindingOptions ) : string {
+ if ( patternItems !== null ) {
+ const patternItemsLength: number = patternItems.length;
+
+ for ( let patternItemsIndex: number = 0; patternItemsIndex < patternItemsLength; patternItemsIndex++ ) {
+ const string: string = patternItems[ patternItemsIndex ];
+ const stringLines: string[] = string.split( Char.newLine );
+ const stringLinesLength: number = stringLines.length;
+ const stringCssClass: string = stringLinesLength === 1 ? "string" : "multi-line-string";
+
+ for ( let stringLineIndex: number = 0; stringLineIndex < stringLinesLength; stringLineIndex++ ) {
+ const stringLine: string = stringLines[ stringLineIndex ];
+ const stringVariable: string = `\$S{${_cached_Strings_Count.toString()}}`;
+
+ _cached_Strings[ stringVariable ] = `<span class=\"${stringCssClass}\">${stringLine}</span>`;
+ _cached_Strings_Count++;
+
+ innerHTML = innerHTML.replace( stringLine, stringVariable );
+ }
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onStringRender!, string );
+ }
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementKeywords( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const keywords: string[] = Data.getDefaultStringOrArray( language.keywords, [] );
+ const keywordsLength: number = keywords.length;
+ const caseSensitive: boolean = language.caseSensitive!;
+ const keywordsCasing: string = getKeywordCasing( language.keywordsCasing! );
+
+ Data.String.sortArrayOfStringByLength( keywords );
+
+ for ( let keywordIndex: number = 0; keywordIndex < keywordsLength; keywordIndex++ ) {
+ const keyword: string = keywords[ keywordIndex ];
+ const keywordDisplay: string = getDisplayTextTestCasing( keyword, keywordsCasing );
+ const keywordVariable: string = `KW${_cached_Keywords_Count.toString()};`;
+ let keywordReplacement: string = null!;
+ const regExFlags: string = caseSensitive ? "g" : "gi";
+ const regEx: RegExp = new RegExp( getWordRegEx( keyword, language ), regExFlags );
+
+ if ( syntaxOptions.highlightKeywords ) {
+ if ( Is.definedFunction( syntaxOptions.events!.onKeywordClicked ) ) {
+ keywordReplacement = `<span class=\"keyword-clickable\">${keywordDisplay}</span>`;
+ innerHTML = innerHTML.replace( regEx, keywordVariable );
+ } else {
+ keywordReplacement = `<span class=\"keyword\">${keywordDisplay}</span>`;
+ innerHTML = innerHTML.replace( regEx, keywordVariable );
+ }
+
+ } else {
+ if ( Is.definedFunction( syntaxOptions.events!.onKeywordClicked ) ) {
+ keywordReplacement = `<span class=\"no-highlight-keyword-clickable\">${keywordDisplay}</span>`;
+ innerHTML = innerHTML.replace( regEx, keywordVariable );
+ }
+ }
+
+ _cached_Keywords[ keywordVariable ] = keywordReplacement;
+ _cached_Keywords_Count++;
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onKeywordRender!, keyword );
+ }
+
+ return innerHTML;
+ }
+
+ function replaceMarkUpKeywords( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const keywords: string[] = Data.getDefaultStringOrArray( language.keywords, [] );
+ const caseSensitive: boolean = language.caseSensitive!;
+ const keywordsCasing: string = getKeywordCasing( language.keywordsCasing! );
+
+ const regEx: RegExp = /(<([^>]+)>)/ig;
+ const regExFlags: string = caseSensitive ? "g" : "gi";
+ let regExResult: RegExpExecArray = regEx.exec( innerHTML )!;
+
+ while ( regExResult ) {
+ if ( regExResult.index === regEx.lastIndex ) {
+ regEx.lastIndex++;
+ }
+
+ let tag: string = regExResult[ 0 ];
+ tag = tag.replace( "</", Char.empty ).replace( "<", Char.empty ).replace( ">", Char.empty );
+ tag = tag.split( Char.space )[ 0 ];
+
+ if ( keywords.indexOf( tag ) > -1 ) {
+ const keywordVariable: string = `KW${_cached_Keywords_Count.toString()};`;
+ const regExReplace: RegExp = new RegExp( getWordRegEx( tag, language ), regExFlags );
+ let keywordReplacement: string = null!;
+ let replacementTagDisplay: string = getDisplayTextTestCasing( tag, keywordsCasing );
+
+ if ( syntaxOptions.highlightKeywords ) {
+ if ( Is.definedFunction( syntaxOptions.events!.onKeywordClicked ) ) {
+ keywordReplacement = `<span class=\"keyword-clickable\">${replacementTagDisplay}</span>`;
+ } else {
+ keywordReplacement = `<span class=\"keyword\">${replacementTagDisplay}</span>`;
+ }
+
+ } else {
+ if ( Is.definedFunction( syntaxOptions.events!.onKeywordClicked ) ) {
+ keywordReplacement = `<span class=\"no-highlight-keyword-clickable\">${replacementTagDisplay}</span>`;
+ }
+ }
+
+ innerHTML = innerHTML.replace( regExReplace, keywordVariable );
+
+ _cached_Keywords[ keywordVariable ] = keywordReplacement;
+ _cached_Keywords_Count++;
+ }
+
+ regExResult = regEx.exec( innerHTML )!;
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementValues( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const values: string[] = Data.getDefaultStringOrArray( language.values, [] );
+ const valuesLength: number = values.length;
+ const caseSensitive: boolean = language.caseSensitive!;
+
+ Data.String.sortArrayOfStringByLength( values );
+
+ for ( let valueIndex: number = 0; valueIndex < valuesLength; valueIndex++ ) {
+ const value: string = values[ valueIndex ];
+ const valueVariable: string = `VAL${_cached_Values_Count.toString()};`;
+ let valueReplacement: string = null!;
+ const regExFlags: string = caseSensitive ? "g" : "gi";
+ const regEx: RegExp = new RegExp( getWordRegEx( value, language ), regExFlags );
+
+ if ( syntaxOptions.highlightValues ) {
+ if ( Is.definedFunction( syntaxOptions.events!.onValueClicked! ) ) {
+ valueReplacement = `<span class=\"value-clickable\">${value}</span>`;
+ innerHTML = innerHTML.replace( regEx, valueVariable );
+ } else {
+ valueReplacement = `<span class=\"value\">${value}</span>`;
+ innerHTML = innerHTML.replace( regEx, valueVariable );
+ }
+
+ } else {
+ if ( Is.definedFunction( syntaxOptions.events!.onValueClicked! ) ) {
+ valueReplacement = `<span class=\"no-highlight-value-clickable\">${value}</span>`;
+ innerHTML = innerHTML.replace( regEx, valueVariable );
+ }
+ }
+
+ _cached_Values[ valueVariable ] = valueReplacement;
+ _cached_Values_Count++;
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onValueRender!, value );
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementAttributes( innerHTML: string, language: SyntaxLanguage, syntaxOptions: BindingOptions ) : string {
+ const attributes: string[] = Data.getDefaultStringOrArray( language.attributes, [] );
+ const attributesLength: number = attributes.length;
+ const caseSensitive: boolean = language.caseSensitive!;
+
+ Data.String.sortArrayOfStringByLength( attributes );
+
+ for ( let attributeIndex: number = 0; attributeIndex < attributesLength; attributeIndex++ ) {
+ const attribute: string = attributes[ attributeIndex ];
+ const attributeVariable: string = `ATTR${_cached_Attributes_Count.toString()};`;
+ let attributeReplacement: string = null!;
+ let regExFlags: string = caseSensitive ? "g" : "gi";
+ const regEx: RegExp = new RegExp( getWordRegEx( attribute, language ), regExFlags );
+
+ if ( syntaxOptions.highlightAttributes ) {
+ if ( Is.definedFunction( syntaxOptions.events!.onAttributeClicked ) ) {
+ attributeReplacement = `<span class=\"attribute-clickable\">${attribute}</span>`;
+ innerHTML = innerHTML.replace( regEx, attributeVariable );
+ } else {
+ attributeReplacement = `<span class=\"attribute\">${attribute}</span>`;
+ innerHTML = innerHTML.replace( regEx, attributeVariable );
+ }
+
+ } else {
+ if ( Is.definedFunction( syntaxOptions.events!.onAttributeClicked ) ) {
+ attributeReplacement = `<span class=\"no-highlight-attribute-clickable\">${attribute}</span>`;
+ innerHTML = innerHTML.replace( regEx, attributeVariable );
+ }
+ }
+
+ _cached_Attributes[ attributeVariable ] = attributeReplacement;
+ _cached_Attributes_Count++;
+
+ fireCustomTriggerEvent( syntaxOptions.events!.onAttributeRender!, attribute );
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementStringQuotesFromVariables( innerHTML: string ) : string {
+ for ( let quoteVariable in _cached_Strings ) {
+ if ( _cached_Strings.hasOwnProperty( quoteVariable ) ) {
+ innerHTML = innerHTML.replace( quoteVariable, _cached_Strings[ quoteVariable ] );
+ }
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementCommentsFromVariables( innerHTML: string, language: SyntaxLanguage ) : string {
+ const multiLineComment: string[] = language.multiLineComment as string[];
+ let start: string = null!;
+ let end: string = null!;
+
+ if ( Is.definedArray( multiLineComment ) && multiLineComment.length === 2 ) {
+ start = Data.String.encodeMarkUpCharacters( multiLineComment[ 0 ] );
+ end = Data.String.encodeMarkUpCharacters( multiLineComment[ 1 ] );
+ }
+
+ for ( let commentVariable in _cached_Comments ) {
+ if ( _cached_Comments.hasOwnProperty( commentVariable ) ) {
+ let replacement: string = _cached_Comments[ commentVariable ];
+
+ if ( language.isMarkUp && Is.definedString( start ) && Is.definedString( end ) ) {
+ replacement = replacement.replace( multiLineComment[ 0 ], start );
+ replacement = replacement.replace( multiLineComment[ 1 ], end );
+ }
+
+ innerHTML = innerHTML.replace( commentVariable, replacement );
+ }
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementVariables( innerHTML: string, variables: Record<string, string> ) : string {
+ for ( let variable in variables ) {
+ if ( variables.hasOwnProperty( variable ) ) {
+ const regExHtmlReplace: RegExp = new RegExp( variable, "g" );
+
+ innerHTML = innerHTML.replace( regExHtmlReplace, variables[ variable ] );
+ }
+ }
+
+ return innerHTML;
+ }
+
+ function renderElementCompletedHTML( description: HTMLElement, numbers: HTMLElement, syntax: HTMLElement, innerHTML: string, syntaxOptions: BindingOptions, isPreFormatted: boolean ) : void {
+ const lines: string[] = innerHTML.split( Char.newLine );
+ const linesLength: number = lines.length;
+ const linesLengthStringLength: number = linesLength.toString().length;
+ let numberContainer: HTMLElement = numbers;
+ let codeContainer: HTMLElement = syntax;
+ let replaceWhitespace: string = null!;
+ let lineNumber: number = 1;
+ let lastLineWasBlank: boolean = false;
+
+ if ( isPreFormatted ) {
+ codeContainer = DomElement.create( "pre" );
+ syntax.appendChild( codeContainer );
+
+ if ( Is.defined( numbers ) ) {
+ numberContainer = DomElement.create( "pre" );
+ numbers.appendChild( numberContainer );
+ }
+ }
+
+ if ( syntaxOptions.doubleClickToSelectAll ) {
+ if ( Is.defined( description ) ) {
+ description.ondblclick = function() {
+ DomElement.selectTextInElement( codeContainer );
+ };
+ }
+
+ if ( Is.defined( numbers ) ) {
+ numbers.ondblclick = function() {
+ DomElement.selectTextInElement( codeContainer );
+ };
+ }
+
+ syntax.ondblclick = function() {
+ DomElement.selectTextInElement( codeContainer );
+ };
+ }
+
+ for ( let lineIndex: number = 0; lineIndex < linesLength; lineIndex++ ) {
+ let line: string = lines[ lineIndex ];
+
+ if ( line.trim() !== Char.empty && replaceWhitespace === null ) {
+ replaceWhitespace = line.substring( 0, line.match( /^\s*/ )![ 0 ].length );
+ }
+
+ if ( ( lineIndex !== 0 && lineIndex !== linesLength - 1 ) || line.trim() !== Char.empty ) {
+ if ( line.trim() !== Char.empty || !syntaxOptions.removeBlankLines ) {
+ const isBlank: boolean = line.trim() === Char.empty;
+
+ if ( isBlank && !lastLineWasBlank || !syntaxOptions.removeDuplicateBlankLines || !isBlank ) {
+ lastLineWasBlank = isBlank;
+
+ if ( Is.defined( numberContainer ) ) {
+ const numberCode: HTMLElement = DomElement.create( "p" );
+
+ if ( syntaxOptions.padLineNumbers ) {
+ numberCode.innerText = Data.String.padNumber( lineNumber.toString(), linesLengthStringLength );
+ } else {
+ numberCode.innerText = lineNumber.toString();
+ }
+
+ numberContainer.appendChild( numberCode );
+ lineNumber++;
+ }
+
+ if ( replaceWhitespace !== null ) {
+ line = line.replace( replaceWhitespace, Char.empty );
+
+ if ( !isPreFormatted ) {
+ const remainingStartWhitespaceCount: number = line.match( /^\s*/ )![ 0 ].length;
+ const remainingStartWhitespace: string = line.substring( 0, remainingStartWhitespaceCount );
+ const whitespaceReplacement: string = Array( remainingStartWhitespaceCount ).join( " " );
+
+ line = line.replace( remainingStartWhitespace, whitespaceReplacement );
+ }
+ }
+
+ const syntaxCode: HTMLElement = DomElement.create( "p" );
+ syntaxCode.innerHTML = line.trim() === Char.empty ? "<br>" : line;
+ codeContainer.appendChild( syntaxCode );
+ }
+ }
+ }
+ }
+ }
+
+ function renderElementClickEvents( element: HTMLElement, customTrigger: Function, className: string ) : void {
+ if ( Is.definedFunction( customTrigger ) ) {
+ const domElements: HTMLCollectionOf<Element> = element.getElementsByClassName( className );
+ const elements: HTMLElement[] = [].slice.call( domElements );
+ const elementsLength: number = elements.length;
+
+ for ( let elementIndex: number = 0; elementIndex < elementsLength; elementIndex++ ) {
+ renderElementClickEvent( elements[ elementIndex ], customTrigger );
+ }
+ }
+ }
+
+ function renderElementClickEvent( element: HTMLElement, customTrigger: Function ) : void {
+ const text: string = element.innerText;
+
+ element.onclick = function() {
+ customTrigger( text );
+ };
+ }
+
+ function getFriendlyLanguageName( syntaxLanguage: string, languageLabelCasing: string = null! ) : string {
+ let result: string = null!;
+ const language: SyntaxLanguage = getLanguage( syntaxLanguage );
+
+ if ( Is.defined( language ) && Is.definedString( language.friendlyName ) ) {
+ result = language.friendlyName!;
+ } else {
+ result = syntaxLanguage;
+ }
+
+ result = getDisplayTextTestCasing( result, languageLabelCasing );
+
+ return result;
+ }
+
+ function getLanguage( syntaxLanguage: string ) : SyntaxLanguage {
+ let result: SyntaxLanguage = null!;
+ let language: string = syntaxLanguage.toLowerCase();
+
+ if ( _languages.hasOwnProperty( language ) ) {
+ result = _languages[ language ];
+ } else {
+
+ if ( _aliases_Rules.hasOwnProperty( language ) ) {
+ language = _aliases_Rules[ language ];
+
+ if ( _languages.hasOwnProperty( language ) ) {
+ result = _languages[ language ];
+ }
+ }
+ }
+
+ return result;
+ }
+
+ function getKeywordCasing( keywordsCasing: string ) : string {
+ if ( Is.definedString( keywordsCasing ) ) {
+ keywordsCasing = keywordsCasing.toLowerCase().trim();
+ }
+
+ return keywordsCasing;
+ }
+
+ function getDisplayTextTestCasing( keyword: string, keywordsCasing: string ) : string {
+ if ( keywordsCasing === TextCasing.uppercase ) {
+ keyword = keyword.toUpperCase();
+ } else if ( keywordsCasing === TextCasing.lowercase ) {
+ keyword = keyword.toLowerCase();
+ }
+
+ return keyword;
+ }
+
+ function getWordRegEx( word: string, language: SyntaxLanguage ) : string {
+ let result: string = `(?<=^|[^-])\\b${word}\\b(?=[^-]|\$)`; | ## Useless regular-expression character escape
The escape sequence '\$' is equivalent to just '$', so the sequence may still represent a meta-character when it is used in a [regular expression](1).
The escape sequence '\$' is equivalent to just '$', so the sequence may still represent a meta-character when it is used in a [regular expression](2).
The escape sequence '\$' is equivalent to just '$', so the sequence may still represent a meta-character when it is used in a [regular expression](3).
The escape sequence '\$' is equivalent to just '$', so the sequence may still represent a meta-character when it is used in a [regular expression](4).
[Show more details](https://github.com/williamtroup/Syntax.js/security/code-scanning/15) |
Syntax.js | github_2023 | javascript | 23 | williamtroup | github-advanced-security[bot] | @@ -286,10 +302,67 @@
}
return innerHTML;
}
+ function replaceMarkUpKeywords(innerHTML, language, syntaxOptions) {
+ var keywords = getDefaultStringOrArray(language.keywords, []);
+ var caseSensitive = language.caseSensitive;
+ var keywordsCasing = language.keywordsCasing;
+ if (isDefinedString(keywordsCasing)) {
+ keywordsCasing = keywordsCasing.toLowerCase().trim();
+ }
+ var regEx = /(<([^>]+)>)/ig;
+ var replacements = {};
+ var replacementsNumber = 1;
+ var regExFlags = caseSensitive ? "g" : "gi";
+ var regExResult = regEx.exec(innerHTML);
+ for (; regExResult;) {
+ if (regExResult.index === regEx.lastIndex) {
+ regEx.lastIndex++;
+ }
+ var tag = regExResult[0];
+ tag = tag.replace("</", _string.empty).replace("<", _string.empty).replace(">", _string.empty); | ## Incomplete string escaping or encoding
This replaces only the first occurrence of ">".
[Show more details](https://github.com/williamtroup/Syntax.js/security/code-scanning/5) |
Syntax.js | github_2023 | javascript | 23 | williamtroup | github-advanced-security[bot] | @@ -420,11 +440,84 @@
return innerHTML;
}
+ function replaceMarkUpKeywords( innerHTML, language, syntaxOptions ) {
+ var keywords = getDefaultStringOrArray( language.keywords, [] ),
+ caseSensitive = language.caseSensitive,
+ keywordsCasing = language.keywordsCasing;
+
+ if ( isDefinedString( keywordsCasing ) ) {
+ keywordsCasing = keywordsCasing.toLowerCase().trim();
+ }
+
+ var regEx = /(<([^>]+)>)/ig,
+ replacements = {},
+ replacementsNumber = 1,
+ regExFlags = caseSensitive ? "g" : "gi",
+ regExResult = regEx.exec( innerHTML );
+
+ while ( regExResult ) {
+ if ( regExResult.index === regEx.lastIndex ) {
+ regEx.lastIndex++;
+ }
+
+ var tag = regExResult[ 0 ];
+ tag = tag.replace( "</", _string.empty ).replace( "<", _string.empty ).replace( ">", _string.empty ); | ## Incomplete string escaping or encoding
This replaces only the first occurrence of ">".
[Show more details](https://github.com/williamtroup/Syntax.js/security/code-scanning/7) |
Syntax.js | github_2023 | javascript | 19 | williamtroup | github-advanced-security[bot] | @@ -133,11 +133,13 @@
printButton.onclick = function() {
var newWindow = window.open(_string.empty, "PRINT", "height=400,width=600");
var newElementForPrint = syntax.cloneNode(true);
+ var newTitleElement = createElement("div");
newElementForPrint.removeChild(newElementForPrint.children[0]);
+ newTitleElement.innerHTML = getFriendlyLanguageName(syntaxLanguage);
newWindow.document.write("<html>");
newWindow.document.write("<head>");
newWindow.document.write("<title>");
- newWindow.document.write(getFriendlyLanguageName(syntaxLanguage));
+ newWindow.document.write(newTitleElement.innerHTML); | ## DOM text reinterpreted as HTML
[DOM text](1) is reinterpreted as HTML without escaping meta-characters.
[Show more details](https://github.com/williamtroup/Syntax.js/security/code-scanning/3) |
Syntax.js | github_2023 | javascript | 19 | williamtroup | github-advanced-security[bot] | @@ -224,14 +224,16 @@
printButton.onclick = function() {
var newWindow = window.open( _string.empty, "PRINT", "height=400,width=600" ),
- newElementForPrint = syntax.cloneNode( true );
+ newElementForPrint = syntax.cloneNode( true ),
+ newTitleElement = createElement( "div" );
newElementForPrint.removeChild( newElementForPrint.children[ 0 ] );
+ newTitleElement.innerHTML = getFriendlyLanguageName( syntaxLanguage );
newWindow.document.write( "<html>" );
newWindow.document.write( "<head>" );
newWindow.document.write( "<title>" );
- newWindow.document.write( getFriendlyLanguageName( syntaxLanguage ) );
+ newWindow.document.write( newTitleElement.innerHTML ); | ## DOM text reinterpreted as HTML
[DOM text](1) is reinterpreted as HTML without escaping meta-characters.
[Show more details](https://github.com/williamtroup/Syntax.js/security/code-scanning/4) |
gitops-bridge | github_2023 | others | 39 | gitops-bridge-dev | csantanapr | @@ -0,0 +1,11 @@
+# Pulumi Typescript GitOps Bridge
+
+### How to Start:
+
+1. Create applicable stack files you need
+2. Update configuration values as you need
+3. Add any extra resources you may need in your given environment
+4. Update GitOps Config to output any extra values you may need to your GitOps Controller
+5. Add an Environment Variable for `GITHUB_TOKEN` in your deployment env (local, Github Actions, AWS Code Pipeline, etc;)
+6. `pulumi up` | It would be good to add more details on what you get with this, like the example creates vpc, iam resources, eks cluster, then installs argo on it.
|
gitops-bridge | github_2023 | others | 39 | gitops-bridge-dev | csantanapr | @@ -0,0 +1,11 @@
+# Pulumi Typescript GitOps Bridge | Can you put the example in an `eks` directory?
`argocd/iac/pulumi/eks/typescript/README.md` |
gitops-bridge | github_2023 | others | 5 | gitops-bridge-dev | allamand | @@ -0,0 +1,33 @@
+output "configure_kubectl" {
+ description = "Configure kubectl: make sure you're logged in with the correct AWS profile and run the following command to update your kubeconfig"
+ value = <<-EOT
+ export KUBECONFIG="/tmp/${module.eks.cluster_name}"
+ aws eks --region ${local.region} update-kubeconfig --name ${module.eks.cluster_name}
+ EOT
+}
+
+output "configure_argocd" {
+ description = "Terminal Setup"
+ value = <<-EOT | Not sure the << EOT is working, this is my output:
```
terraform output
access_argocd = <<EOT
export KUBECONFIG="/tmp/ex-private-git"
aws eks --region eu-west-1 update-kubeconfig --name ex-private-git
echo "ArgoCD URL: https://$(kubectl get svc -n argocd argo-cd-argocd-server -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')"
echo "ArgoCD Username: admin"
echo "ArgoCD Password: $(kubectl get secrets argocd-initial-admin-secret -n argocd --template="{{index .data.password | base64decode}}")"
EOT
configure_argocd = <<EOT
export KUBECONFIG="/tmp/ex-private-git"
aws eks --region eu-west-1 update-kubeconfig --name ex-private-git
export ARGOCD_OPTS="--port-forward --port-forward-namespace argocd --grpc-web"
kubectl config set-context --current --namespace argocd
argocd login --port-forward --username admin --password $(argocd admin initial-password | head -1)
echo "ArgoCD Username: admin"
echo "ArgoCD Password: $(kubectl get secrets argocd-initial-admin-secret -n argocd --template="{{index .data.password | base64decode}}")"
echo Port Forward: http://localhost:8080
kubectl port-forward -n argocd svc/argo-cd-argocd-server 8080:80
EOT
configure_kubectl = <<EOT
export KUBECONFIG="/tmp/ex-private-git"
aws eks --region eu-west-1 update-kubeconfig --name ex-private-git
EOT
``` |
gitops-bridge | github_2023 | others | 5 | gitops-bridge-dev | allamand | @@ -0,0 +1,122 @@
+---
+apiVersion: v1
+kind: Secret
+metadata:
+ name: git-addons
+ namespace: argocd
+ labels:
+ argocd.argoproj.io/secret-type: repo-creds
+stringData:
+ type: git
+ url: git@github.com:gitops-bridge-dev
+ sshPrivateKey: |
+ -----BEGIN OPENSSH PRIVATE KEY----- | whose key is it ? |
gitops-bridge | github_2023 | others | 5 | gitops-bridge-dev | allamand | @@ -0,0 +1,299 @@
+provider "aws" {
+ region = local.region
+}
+data "aws_caller_identity" "current" {}
+data "aws_availability_zones" "available" {}
+
+provider "helm" {
+ kubernetes {
+ host = module.eks.cluster_endpoint
+ cluster_ca_certificate = base64decode(module.eks.cluster_certificate_authority_data)
+
+ exec {
+ api_version = "client.authentication.k8s.io/v1beta1"
+ command = "aws"
+ # This requires the awscli to be installed locally where Terraform is executed
+ args = ["eks", "get-token", "--cluster-name", module.eks.cluster_name, "--region", local.region]
+ }
+ }
+}
+
+provider "kubectl" {
+ host = module.eks.cluster_endpoint
+ cluster_ca_certificate = base64decode(module.eks.cluster_certificate_authority_data)
+ exec {
+ api_version = "client.authentication.k8s.io/v1beta1"
+ args = ["eks", "get-token", "--cluster-name", module.eks.cluster_name, "--region", local.region]
+ command = "aws"
+ }
+ load_config_file = false
+ apply_retry_count = 15
+}
+
+provider "kubernetes" {
+ host = module.eks.cluster_endpoint
+ cluster_ca_certificate = base64decode(module.eks.cluster_certificate_authority_data)
+
+ exec {
+ api_version = "client.authentication.k8s.io/v1beta1"
+ command = "aws"
+ # This requires the awscli to be installed locally where Terraform is executed
+ args = ["eks", "get-token", "--cluster-name", module.eks.cluster_name, "--region", local.region]
+ }
+}
+
+locals {
+ name = "ex-${replace(basename(path.cwd), "_", "-")}"
+ environment = "dev"
+ region = "us-west-2"
+ cluster_version = "1.27"
+
+ git_private_ssh_key = "~/.ssh/id_rsa" # Update with the git ssh key to be used by ArgoCD
+
+ gitops_addons_org = "git@github.com:gitops-bridge-dev"
+ gitops_addons_repo = "gitops-bridge-argocd-control-plane-template"
+ gitops_addon_path = "bootstrap/control-plane/addons"
+ gitops_addon_revision = "HEAD"
+
+ gitops_workloads_org = "git@github.com:argoproj"
+ gitops_workloads_repo = "argocd-example-apps"
+ gitops_workloads_path = "helm-guestbook"
+ gitops_workloads_revision = "HEAD"
+
+ aws_addons = {
+ enable_cert_manager = true
+ #enable_aws_efs_csi_driver = true
+ #enable_aws_fsx_csi_driver = true
+ #enable_aws_cloudwatch_metrics = true
+ #enable_aws_privateca_issuer = true
+ #enable_cluster_autoscaler = true
+ #enable_external_dns = true
+ #enable_external_secrets = true
+ #enable_aws_load_balancer_controller = true
+ #enable_fargate_fluentbit = true
+ #enable_aws_for_fluentbit = true
+ #enable_aws_node_termination_handler = true
+ #enable_karpenter = true
+ #enable_velero = true
+ #enable_aws_gateway_api_controller = true
+ #enable_aws_ebs_csi_resources = true # generate gp2 and gp3 storage classes for ebs-csi
+ #enable_aws_secrets_store_csi_driver_provider = true
+ }
+ oss_addons = {
+ #enable_argo_rollouts = true
+ #enable_argo_workflows = true
+ #enable_cluster_proportional_autoscaler = true
+ #enable_gatekeeper = true
+ #enable_gpu_operator = true
+ #enable_ingress_nginx = true
+ #enable_kyverno = true
+ #enable_kube_prometheus_stack = true
+ enable_metrics_server = true
+ #enable_prometheus_adapter = true
+ #enable_secrets_store_csi_driver = true
+ #enable_vpa = true
+ #enable_foo = true # you can add any addon here, make sure to update the gitops repo with the corresponding application set
+ }
+ addons = merge(local.aws_addons, local.oss_addons, { kubernetes_version = local.cluster_version })
+
+ addons_metadata = merge(
+ module.eks_blueprints_addons.gitops_metadata,
+ {
+ aws_cluster_name = module.eks.cluster_name
+ aws_region = local.region
+ aws_account_id = data.aws_caller_identity.current.account_id
+ aws_vpc_id = module.vpc.vpc_id
+ },
+ {
+ gitops_bridge_repo_url = "${local.gitops_addons_org}/${local.gitops_addons_repo}"
+ gitops_bridge_repo_revision = local.gitops_addon_revision
+ }
+ )
+
+ argocd_bootstrap_app_of_apps = {
+ addons = templatefile("${path.module}/bootstrap/addons.yaml", {
+ repoURL = "${local.gitops_addons_org}/${local.gitops_addons_repo}"
+ path = local.gitops_addon_path
+ targetRevision = local.gitops_addon_revision
+ })
+ workloads = templatefile("${path.module}/bootstrap/workloads.yaml", {
+ repoURL = "${local.gitops_workloads_org}/${local.gitops_workloads_repo}"
+ path = local.gitops_workloads_path
+ targetRevision = local.gitops_workloads_revision
+ })
+ }
+
+ vpc_cidr = "10.0.0.0/16"
+ azs = slice(data.aws_availability_zones.available.names, 0, 3)
+
+ tags = {
+ Blueprint = local.name
+ GithubRepo = "github.com/csantanapr/terraform-gitops-bridge"
+ }
+}
+
+################################################################################
+# GitOps Bridge: Private ssh keys for git
+################################################################################
+resource "kubernetes_namespace" "argocd" {
+ depends_on = [ module.eks_blueprints_addons ]
+ metadata {
+ name = "argocd"
+ }
+}
+resource "kubernetes_secret" "git_secrets" {
+ depends_on = [ kubernetes_namespace.argocd ]
+ for_each = {
+ git-addons = {
+ type = "git"
+ url = local.gitops_addons_org
+ sshPrivateKey = file(pathexpand(local.git_private_ssh_key))
+ }
+ git-workloads = {
+ type = "git"
+ url = local.gitops_addons_org | should be `gitops_workloads_org` |
gitops-bridge | github_2023 | others | 5 | gitops-bridge-dev | allamand | @@ -0,0 +1,299 @@
+provider "aws" {
+ region = local.region
+}
+data "aws_caller_identity" "current" {}
+data "aws_availability_zones" "available" {}
+
+provider "helm" {
+ kubernetes {
+ host = module.eks.cluster_endpoint
+ cluster_ca_certificate = base64decode(module.eks.cluster_certificate_authority_data)
+
+ exec {
+ api_version = "client.authentication.k8s.io/v1beta1"
+ command = "aws"
+ # This requires the awscli to be installed locally where Terraform is executed
+ args = ["eks", "get-token", "--cluster-name", module.eks.cluster_name, "--region", local.region]
+ }
+ }
+}
+
+provider "kubectl" {
+ host = module.eks.cluster_endpoint
+ cluster_ca_certificate = base64decode(module.eks.cluster_certificate_authority_data)
+ exec {
+ api_version = "client.authentication.k8s.io/v1beta1"
+ args = ["eks", "get-token", "--cluster-name", module.eks.cluster_name, "--region", local.region]
+ command = "aws"
+ }
+ load_config_file = false
+ apply_retry_count = 15
+}
+
+provider "kubernetes" {
+ host = module.eks.cluster_endpoint
+ cluster_ca_certificate = base64decode(module.eks.cluster_certificate_authority_data)
+
+ exec {
+ api_version = "client.authentication.k8s.io/v1beta1"
+ command = "aws"
+ # This requires the awscli to be installed locally where Terraform is executed
+ args = ["eks", "get-token", "--cluster-name", module.eks.cluster_name, "--region", local.region]
+ }
+}
+
+locals {
+ name = "ex-${replace(basename(path.cwd), "_", "-")}"
+ environment = "dev"
+ region = "us-west-2"
+ cluster_version = "1.27"
+
+ git_private_ssh_key = "~/.ssh/id_rsa" # Update with the git ssh key to be used by ArgoCD
+
+ gitops_addons_org = "git@github.com:gitops-bridge-dev"
+ gitops_addons_repo = "gitops-bridge-argocd-control-plane-template"
+ gitops_addon_path = "bootstrap/control-plane/addons"
+ gitops_addon_revision = "HEAD"
+
+ gitops_workloads_org = "git@github.com:argoproj"
+ gitops_workloads_repo = "argocd-example-apps"
+ gitops_workloads_path = "helm-guestbook"
+ gitops_workloads_revision = "HEAD"
+
+ aws_addons = {
+ enable_cert_manager = true
+ #enable_aws_efs_csi_driver = true
+ #enable_aws_fsx_csi_driver = true
+ #enable_aws_cloudwatch_metrics = true
+ #enable_aws_privateca_issuer = true
+ #enable_cluster_autoscaler = true
+ #enable_external_dns = true
+ #enable_external_secrets = true
+ #enable_aws_load_balancer_controller = true
+ #enable_fargate_fluentbit = true
+ #enable_aws_for_fluentbit = true
+ #enable_aws_node_termination_handler = true
+ #enable_karpenter = true
+ #enable_velero = true
+ #enable_aws_gateway_api_controller = true
+ #enable_aws_ebs_csi_resources = true # generate gp2 and gp3 storage classes for ebs-csi
+ #enable_aws_secrets_store_csi_driver_provider = true
+ }
+ oss_addons = {
+ #enable_argo_rollouts = true
+ #enable_argo_workflows = true
+ #enable_cluster_proportional_autoscaler = true
+ #enable_gatekeeper = true
+ #enable_gpu_operator = true
+ #enable_ingress_nginx = true
+ #enable_kyverno = true
+ #enable_kube_prometheus_stack = true
+ enable_metrics_server = true
+ #enable_prometheus_adapter = true
+ #enable_secrets_store_csi_driver = true
+ #enable_vpa = true
+ #enable_foo = true # you can add any addon here, make sure to update the gitops repo with the corresponding application set
+ }
+ addons = merge(local.aws_addons, local.oss_addons, { kubernetes_version = local.cluster_version })
+
+ addons_metadata = merge(
+ module.eks_blueprints_addons.gitops_metadata,
+ {
+ aws_cluster_name = module.eks.cluster_name
+ aws_region = local.region
+ aws_account_id = data.aws_caller_identity.current.account_id
+ aws_vpc_id = module.vpc.vpc_id
+ },
+ {
+ gitops_bridge_repo_url = "${local.gitops_addons_org}/${local.gitops_addons_repo}"
+ gitops_bridge_repo_revision = local.gitops_addon_revision
+ }
+ )
+
+ argocd_bootstrap_app_of_apps = {
+ addons = templatefile("${path.module}/bootstrap/addons.yaml", {
+ repoURL = "${local.gitops_addons_org}/${local.gitops_addons_repo}"
+ path = local.gitops_addon_path
+ targetRevision = local.gitops_addon_revision
+ })
+ workloads = templatefile("${path.module}/bootstrap/workloads.yaml", {
+ repoURL = "${local.gitops_workloads_org}/${local.gitops_workloads_repo}"
+ path = local.gitops_workloads_path
+ targetRevision = local.gitops_workloads_revision
+ })
+ }
+
+ vpc_cidr = "10.0.0.0/16"
+ azs = slice(data.aws_availability_zones.available.names, 0, 3)
+
+ tags = {
+ Blueprint = local.name
+ GithubRepo = "github.com/csantanapr/terraform-gitops-bridge"
+ }
+}
+
+################################################################################
+# GitOps Bridge: Private ssh keys for git
+################################################################################
+resource "kubernetes_namespace" "argocd" { | I've got some issues with several terraform apply complaining that the namespace already exists.. |
rustc_plugin | github_2023 | others | 25 | cognitive-engineering-lab | willcrichton | @@ -202,69 +202,54 @@ impl<'tcx> PlaceExt<'tcx> for Place<'tcx> {
def_id: DefId,
) -> HashMap<RegionVid, Vec<(Place<'tcx>, Mutability)>> {
let ty = self.ty(body.local_decls(), tcx).ty;
- let mut region_collector = CollectRegions {
+ let mut region_collector = RegionVisitor::<RegionMemberCollector>::new(
tcx,
def_id,
- local: self.local,
- place_stack: self.projection.to_vec(),
- ty_stack: Vec::new(),
- regions: HashMap::default(),
- places: None,
- types: None,
- stop_at: if
+ *self,
+ if
/*shallow*/
false {
StoppingCondition::AfterRefs
} else {
StoppingCondition::None
},
- };
+ );
region_collector.visit_ty(ty);
- region_collector.regions
+ region_collector.into_inner().0
}
fn interior_places(
&self,
tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
def_id: DefId,
- ) -> Vec<Place<'tcx>> {
+ ) -> HashSet<Place<'tcx>> { | You can use the `PlaceSet` alias I define elsewhere. |
rustc_plugin | github_2023 | others | 4 | cognitive-engineering-lab | willcrichton | @@ -60,6 +85,49 @@ impl<In, Out> Default for CopyCache<In, Out> {
}
}
+/// This cache alters the [`Self::get`] method signature to return
+/// an [`Option`] of a reference. In particular the method will return [`None`]
+/// if it is called *with the same key* while computing a construction function
+/// for that key.
+pub struct RecursionBreakingCache<In, Out>(RefCell<HashMap<In, Option<Pin<Box<Out>>>>>); | Rather than duplicating half the logic and type structure between `Cache` and `RecursionBreakingCache`, I would just add a method `get_opt` to `Cache` that has the logic you added, and then change `get` to be `get_opt().unwrap()`. |
rustc_plugin | github_2023 | others | 4 | cognitive-engineering-lab | willcrichton | @@ -1,56 +1,147 @@
-//! Data structure for memoizing computations.
-
-use std::{cell::RefCell, hash::Hash, mem, pin::Pin};
+//! Data structures for memoizing computations.
+//!
+//! Contruct new caches using [`Default::default`], then construct/retrieve
+//! elements with [`get`](Cache::get). `get` should only ever be used with one,
+//! `compute` function[^inconsistent].
+//!
+//! In terms of choice,
+//! - [`CopyCache`] should be used for expensive computations that create cheap
+//! (i.e. small) values.
+//! - [`Cache`] should be used for expensive computations that create expensive
+//! (i.e. large) values.
+//!
+//! Both types of caches implement **recursion breaking**. In general because
+//! caches are supposed to be used as simple `&` (no `mut`) the reference may be
+//! freely copied, including into the `compute` closure. What this means is that
+//! a `compute` may call [`get`](Cache::get) on the cache again. This is usually
+//! safe and can be used to compute data structures that recursively depend on
+//! one another, dynamic-programming style. However if a `get` on a key `k`
+//! itself calls `get` again on the same `k` this will either create an infinite
+//! recursion or an inconsistent cache[^inconsistent].
+//!
+//! Consider a simple example where we compute the Fibonacci Series with a
+//! [`CopyCache`]:
+//!
+//! ```rs
+//! let cache = CopyCache::default();
+//! let next_fib = |this| {
+//! if this <= 1 { return this; }
+//! let fib_1 = cache.get(this - 1, next_fib);
+//! let fib_2 = cache.get(this - 2, next_fib);
+//! fib_1 + fib_2
+//! };
+//! let fib_5 = cache.get(5, next_fib);
+//! ```
+//!
+//! This use of recursive [`get`](CopyCache::get) calls is perfectly legal.
+//! However if we made an error and called `chache.get(this, ...)` (forgetting
+//! the decrement) we would have created an inadvertend infinite recursion.
+//!
+//! To avoid this scenario both caches are implemented to detect when a
+//! recursive call as described is performed and `get` will panic. If your code
+//! uses recursive construction and would like to handle this case gracefully
+//! use [`get_maybe_recursive`](Cache::get_maybe_recursive) instead wich returns
+//! `None` from `get(k)` *iff* `k` this call (potentially transitively)
+//! originates from another `get(k)` call.
+//!
+//! [^inconsistent]: For any given cache value `get` should only ever be used
+//! with one, referentially transparent `compute` function. Essentially this
+//! means running `compute(k)` should always return the same value
+//! *independent of the state of it's environment*. Violation of this rule
+//! can introduces non-determinism in your program.
+use std::{cell::RefCell, hash::Hash, pin::Pin};
use rustc_data_structures::fx::FxHashMap as HashMap;
/// Cache for non-copyable types.
-pub struct Cache<In, Out>(RefCell<HashMap<In, Pin<Box<Out>>>>);
+pub struct Cache<In, Out>(RefCell<HashMap<In, Option<Pin<Box<Out>>>>>);
impl<In, Out> Cache<In, Out>
where
In: Hash + Eq + Clone,
{
+ /// Size of the cache
+ pub fn len(&self) -> usize {
+ self.0.borrow().len()
+ }
/// Returns the cached value for the given key, or runs `compute` if
/// the value is not in cache.
+ ///
+ /// # Panics
+ ///
+ /// Returns `None` if this is a recursive invocation of `get` for key `key`.
pub fn get<'a>(&'a self, key: In, compute: impl FnOnce(In) -> Out) -> &'a Out {
+ self.get_maybe_recursive(key, compute).unwrap_or_else(recursion_panic)
+ }
+ /// Returns the cached value for the given key, or runs `compute` if
+ /// the value is not in cache.
+ ///
+ /// Returns `None` if this is a recursive invocation of `get` for key `key`.
+ pub fn get_maybe_recursive<'a>(&'a self, key: In, compute: impl FnOnce(In) -> Out) -> Option<&'a Out> {
if !self.0.borrow().contains_key(&key) {
+ self.0.borrow_mut().insert(key.clone(), None);
let out = Box::pin(compute(key.clone()));
- self.0.borrow_mut().insert(key.clone(), out);
+ self.0.borrow_mut().insert(key.clone(), Some(out));
}
let cache = self.0.borrow();
- let entry_pin = cache.get(&key).unwrap();
- let entry_ref = entry_pin.as_ref().get_ref();
+ // Important here to first `unwrap` the `Option` created by `get`, then
+ // propagate the potential option stored in the map.
+ let entry = cache.get(&key).expect("invariant broken").as_ref()?;
// SAFETY: because the entry is pinned, it cannot move and this pointer will
// only be invalidated if Cache is dropped. The returned reference has a lifetime
// equal to Cache, so Cache cannot be dropped before this reference goes out of scope.
- unsafe { mem::transmute::<&'_ Out, &'a Out>(entry_ref) }
+ Some(unsafe { std::mem::transmute::<&'_ Out, &'a Out>(&**entry) })
}
}
+fn recursion_panic<A>() -> A { | `fn recursion_panic() -> !` would be more direct. |
rustc_plugin | github_2023 | others | 4 | cognitive-engineering-lab | willcrichton | @@ -1,56 +1,147 @@
-//! Data structure for memoizing computations.
-
-use std::{cell::RefCell, hash::Hash, mem, pin::Pin};
+//! Data structures for memoizing computations.
+//!
+//! Contruct new caches using [`Default::default`], then construct/retrieve
+//! elements with [`get`](Cache::get). `get` should only ever be used with one,
+//! `compute` function[^inconsistent].
+//!
+//! In terms of choice,
+//! - [`CopyCache`] should be used for expensive computations that create cheap
+//! (i.e. small) values.
+//! - [`Cache`] should be used for expensive computations that create expensive
+//! (i.e. large) values.
+//!
+//! Both types of caches implement **recursion breaking**. In general because
+//! caches are supposed to be used as simple `&` (no `mut`) the reference may be
+//! freely copied, including into the `compute` closure. What this means is that
+//! a `compute` may call [`get`](Cache::get) on the cache again. This is usually
+//! safe and can be used to compute data structures that recursively depend on
+//! one another, dynamic-programming style. However if a `get` on a key `k`
+//! itself calls `get` again on the same `k` this will either create an infinite
+//! recursion or an inconsistent cache[^inconsistent].
+//!
+//! Consider a simple example where we compute the Fibonacci Series with a
+//! [`CopyCache`]:
+//!
+//! ```rs
+//! let cache = CopyCache::default();
+//! let next_fib = |this| {
+//! if this <= 1 { return this; }
+//! let fib_1 = cache.get(this - 1, next_fib);
+//! let fib_2 = cache.get(this - 2, next_fib);
+//! fib_1 + fib_2
+//! };
+//! let fib_5 = cache.get(5, next_fib);
+//! ```
+//!
+//! This use of recursive [`get`](CopyCache::get) calls is perfectly legal.
+//! However if we made an error and called `chache.get(this, ...)` (forgetting
+//! the decrement) we would have created an inadvertend infinite recursion.
+//!
+//! To avoid this scenario both caches are implemented to detect when a
+//! recursive call as described is performed and `get` will panic. If your code
+//! uses recursive construction and would like to handle this case gracefully
+//! use [`get_maybe_recursive`](Cache::get_maybe_recursive) instead wich returns
+//! `None` from `get(k)` *iff* `k` this call (potentially transitively)
+//! originates from another `get(k)` call.
+//!
+//! [^inconsistent]: For any given cache value `get` should only ever be used
+//! with one, referentially transparent `compute` function. Essentially this
+//! means running `compute(k)` should always return the same value
+//! *independent of the state of it's environment*. Violation of this rule
+//! can introduces non-determinism in your program.
+use std::{cell::RefCell, hash::Hash, pin::Pin};
use rustc_data_structures::fx::FxHashMap as HashMap;
/// Cache for non-copyable types.
-pub struct Cache<In, Out>(RefCell<HashMap<In, Pin<Box<Out>>>>);
+pub struct Cache<In, Out>(RefCell<HashMap<In, Option<Pin<Box<Out>>>>>);
impl<In, Out> Cache<In, Out>
where
In: Hash + Eq + Clone,
{
+ /// Size of the cache
+ pub fn len(&self) -> usize {
+ self.0.borrow().len()
+ }
/// Returns the cached value for the given key, or runs `compute` if
/// the value is not in cache.
+ ///
+ /// # Panics
+ ///
+ /// Returns `None` if this is a recursive invocation of `get` for key `key`.
pub fn get<'a>(&'a self, key: In, compute: impl FnOnce(In) -> Out) -> &'a Out {
+ self.get_maybe_recursive(key, compute).unwrap_or_else(recursion_panic)
+ }
+ /// Returns the cached value for the given key, or runs `compute` if
+ /// the value is not in cache.
+ ///
+ /// Returns `None` if this is a recursive invocation of `get` for key `key`.
+ pub fn get_maybe_recursive<'a>(&'a self, key: In, compute: impl FnOnce(In) -> Out) -> Option<&'a Out> {
if !self.0.borrow().contains_key(&key) {
+ self.0.borrow_mut().insert(key.clone(), None);
let out = Box::pin(compute(key.clone()));
- self.0.borrow_mut().insert(key.clone(), out);
+ self.0.borrow_mut().insert(key.clone(), Some(out));
}
let cache = self.0.borrow();
- let entry_pin = cache.get(&key).unwrap();
- let entry_ref = entry_pin.as_ref().get_ref();
+ // Important here to first `unwrap` the `Option` created by `get`, then
+ // propagate the potential option stored in the map.
+ let entry = cache.get(&key).expect("invariant broken").as_ref()?;
// SAFETY: because the entry is pinned, it cannot move and this pointer will
// only be invalidated if Cache is dropped. The returned reference has a lifetime
// equal to Cache, so Cache cannot be dropped before this reference goes out of scope.
- unsafe { mem::transmute::<&'_ Out, &'a Out>(entry_ref) }
+ Some(unsafe { std::mem::transmute::<&'_ Out, &'a Out>(&**entry) })
}
}
+fn recursion_panic<A>() -> A {
+ panic!("Recursion detected! The computation of a value tried to retrieve the same from the cache. Using `get_maybe_recursive` to handle this case gracefully.")
+}
+
impl<In, Out> Default for Cache<In, Out> {
fn default() -> Self {
Cache(RefCell::new(HashMap::default()))
}
}
/// Cache for copyable types.
-pub struct CopyCache<In, Out>(RefCell<HashMap<In, Out>>);
+pub struct CopyCache<In, Out>(RefCell<HashMap<In, Option<Out>>>);
impl<In, Out> CopyCache<In, Out>
where
In: Hash + Eq + Clone,
Out: Copy,
{
+ /// Size of the cache
+ pub fn len(&self) -> usize {
+ self.0.borrow().len()
+ }
/// Returns the cached value for the given key, or runs `compute` if
/// the value is not in cache.
+ ///
+ /// # Panics
+ ///
+ /// Returns `None` if this is a recursive invocation of `get` for key `key`.
pub fn get(&self, key: In, compute: impl FnOnce(In) -> Out) -> Out {
- let mut cache = self.0.borrow_mut();
- *cache
- .entry(key.clone())
- .or_insert_with(move || compute(key))
+ self.get_maybe_recursive(key, compute).unwrap_or_else(recursion_panic)
+ }
+
+ /// Returns the cached value for the given key, or runs `compute` if
+ /// the value is not in cache.
+ ///
+ /// Returns `None` if this is a recursive invocation of `get` for key `key`.
+ pub fn get_maybe_recursive(&self, key: In, compute: impl FnOnce(In) -> Out) -> Option<Out> {
+ if !self.0.borrow().contains_key(&key) {
+ self.0.borrow_mut().insert(key.clone(), None);
+ let out = compute(key.clone());
+ self.0.borrow_mut().insert(key.clone(), Some(out));
+ }
+
+ *self.0.borrow_mut()
+ .get(&key)
+ .expect("invariant proken") | typo |
rustc_plugin | github_2023 | others | 4 | cognitive-engineering-lab | willcrichton | @@ -1,56 +1,157 @@
-//! Data structure for memoizing computations.
-
-use std::{cell::RefCell, hash::Hash, mem, pin::Pin};
+//! Data structures for memoizing computations.
+//!
+//! Contruct new caches using [`Default::default`], then construct/retrieve
+//! elements with [`get`](Cache::get). `get` should only ever be used with one,
+//! `compute` function[^inconsistent].
+//!
+//! In terms of choice,
+//! - [`CopyCache`] should be used for expensive computations that create cheap
+//! (i.e. small) values.
+//! - [`Cache`] should be used for expensive computations that create expensive
+//! (i.e. large) values.
+//!
+//! Both types of caches implement **recursion breaking**. In general because
+//! caches are supposed to be used as simple `&` (no `mut`) the reference may be
+//! freely copied, including into the `compute` closure. What this means is that
+//! a `compute` may call [`get`](Cache::get) on the cache again. This is usually
+//! safe and can be used to compute data structures that recursively depend on
+//! one another, dynamic-programming style. However if a `get` on a key `k`
+//! itself calls `get` again on the same `k` this will either create an infinite
+//! recursion or an inconsistent cache[^inconsistent].
+//!
+//! Consider a simple example where we compute the Fibonacci Series with a
+//! [`CopyCache`]:
+//!
+//! ```rs
+//! let cache = CopyCache::default();
+//! let next_fib = |this| {
+//! if this <= 1 { return this; }
+//! let fib_1 = cache.get(this - 1, next_fib);
+//! let fib_2 = cache.get(this - 2, next_fib);
+//! fib_1 + fib_2
+//! };
+//! let fib_5 = cache.get(5, next_fib);
+//! ```
+//!
+//! This use of recursive [`get`](CopyCache::get) calls is perfectly legal.
+//! However if we made an error and called `chache.get(this, ...)` (forgetting
+//! the decrement) we would have created an inadvertend infinite recursion.
+//!
+//! To avoid this scenario both caches are implemented to detect when a
+//! recursive call as described is performed and `get` will panic. If your code
+//! uses recursive construction and would like to handle this case gracefully
+//! use [`get_maybe_recursive`](Cache::get_maybe_recursive) instead wich returns
+//! `None` from `get(k)` *iff* `k` this call (potentially transitively)
+//! originates from another `get(k)` call.
+//!
+//! [^inconsistent]: For any given cache value `get` should only ever be used
+//! with one, referentially transparent `compute` function. Essentially this
+//! means running `compute(k)` should always return the same value
+//! *independent of the state of it's environment*. Violation of this rule
+//! can introduces non-determinism in your program.
+use std::{cell::RefCell, hash::Hash, pin::Pin};
use rustc_data_structures::fx::FxHashMap as HashMap;
/// Cache for non-copyable types.
-pub struct Cache<In, Out>(RefCell<HashMap<In, Pin<Box<Out>>>>);
+pub struct Cache<In, Out>(RefCell<HashMap<In, Option<Pin<Box<Out>>>>>);
impl<In, Out> Cache<In, Out>
where
In: Hash + Eq + Clone,
{
+ /// Size of the cache
+ pub fn len(&self) -> usize {
+ self.0.borrow().len()
+ }
/// Returns the cached value for the given key, or runs `compute` if
/// the value is not in cache.
+ ///
+ /// # Panics
+ ///
+ /// Returns `None` if this is a recursive invocation of `get` for key `key`. | This should just say “This function panics if” as opposed to “Returns `None`.” Same in the CopyCache docs. |
Mooncake.jl | github_2023 | others | 525 | compintell | willtebbutt | @@ -47,6 +47,23 @@ function throw_val_and_grad_ret_type_error(y)
)
end
+function throw_forward_ret_type_error(y)
+ throw(
+ ValueAndGradientReturnTypeError(
+ "Primal of output cannot contain or be of type $(typeof(y)), the amount of memory referred to must be known.", | This is a good error + message, but I have a couple of thoughts.
Since this is a user-facing error, it would be good to avoid using the word "primal" here, because users who are just Mooncake through e.g. DifferentiationInterface and have never read any Mooncake docs are unlikely to know what we mean by it.
Also, it might be good to specifically highlight the fact that this really just applies to pointers as part of the error message, so that users learn that we're specifically trying to say that you can't use pointers.
Perhaps something like:
```suggestion
"Found a value of type $(typeof(y)) in output, but output is not permitted to be or contain a pointer. This is because the amount of memory to which it refers is unknown, and Mooncake.jl is therefore unable to allocate appropriate memory for its gradients.",
``` |
Mooncake.jl | github_2023 | others | 525 | compintell | willtebbutt | @@ -47,6 +47,23 @@ function throw_val_and_grad_ret_type_error(y)
)
end
+function throw_forward_ret_type_error(y)
+ throw(
+ ValueAndGradientReturnTypeError(
+ "Primal of output cannot contain or be of type $(typeof(y)), the amount of memory referred to must be known.",
+ ),
+ )
+end
+
+function throw_circular_reference_or_alias_error(y)
+ throw(
+ ValueAndGradientReturnTypeError(
+ "Error: Object with address $(objectid(y)) and type $(typeof(y)) has been seen before." *
+ " Primal of output cannot contain Circular references or aliases", | ```suggestion
"Object with address $(objectid(y)) and type $(typeof(y)) appears more than once." *
" Output cannot contain circular references or aliases.",
``` |
Mooncake.jl | github_2023 | others | 525 | compintell | willtebbutt | @@ -172,14 +189,62 @@ end
_copy!!(dst, src) = copy!(dst, src)
_copy!!(::Number, src::Number) = src
+"""
+ __exclude_unsupported_output(y)
+
+Required for the robust design of value_and_pullback(), prepare_pullback_cache().
+Handles aliasing, circular references and excludes the Ptr datatype.
+In the forward pass f(args...) output can only return a "Tree" like datastructure with leaf nodes as primitive types.
+Refer https://github.com/compintell/Mooncake.jl/issues/517#issuecomment-2715202789 and related issue for details.
+Internally calls __exclude_unsupported_output_internal!().
+The design is modelled after `zero_tangent`.
+"""
+
+function __exclude_unsupported_output(y)
+ return __exclude_unsupported_output_internal!(y, Set{UInt}())
+end
+
+function __exclude_unsupported_output_internal!(y::P, address_set::Set{UInt}) where {P}
+ if objectid(y) in address_set
+ throw_circular_reference_or_alias_error(y)
+ end
+
+ push!(address_set, objectid(y))
+
+ # recurse over a generic struct.
+ # iterate not defined for custom struct, so must use getfield() and fieldnames().
+ for y_sub in y
+ __exclude_unsupported_output_internal!(y_sub, address_set)
+ end
+ return nothing
+end
+
+# if __exclude_unsupported_output() is called over an immutable isbitstype.
+# custom structs objects are not bitstype, but thier fields can be.
+function __exclude_unsupported_output_internal!( | I would suggest removing this method, in favour of just checking whether `P` is a bitstype in the method above, since bits types (except for pointers), cannot contain pointers / aliasing / circular references. |
Mooncake.jl | github_2023 | others | 525 | compintell | willtebbutt | @@ -172,14 +189,62 @@ end
_copy!!(dst, src) = copy!(dst, src)
_copy!!(::Number, src::Number) = src
+"""
+ __exclude_unsupported_output(y)
+
+Required for the robust design of value_and_pullback(), prepare_pullback_cache().
+Handles aliasing, circular references and excludes the Ptr datatype.
+In the forward pass f(args...) output can only return a "Tree" like datastructure with leaf nodes as primitive types.
+Refer https://github.com/compintell/Mooncake.jl/issues/517#issuecomment-2715202789 and related issue for details.
+Internally calls __exclude_unsupported_output_internal!().
+The design is modelled after `zero_tangent`.
+"""
+
+function __exclude_unsupported_output(y)
+ return __exclude_unsupported_output_internal!(y, Set{UInt}())
+end
+
+function __exclude_unsupported_output_internal!(y::P, address_set::Set{UInt}) where {P}
+ if objectid(y) in address_set
+ throw_circular_reference_or_alias_error(y)
+ end
+
+ push!(address_set, objectid(y))
+
+ # recurse over a generic struct.
+ # iterate not defined for custom struct, so must use getfield() and fieldnames().
+ for y_sub in y
+ __exclude_unsupported_output_internal!(y_sub, address_set)
+ end
+ return nothing
+end
+
+# if __exclude_unsupported_output() is called over an immutable isbitstype.
+# custom structs objects are not bitstype, but thier fields can be.
+function __exclude_unsupported_output_internal!(
+ y::Union{Int8,Int16,Int32,Int64,Int128,UInt8,UInt16,UInt32,UInt64,UInt128,IEEEFloat},
+ ::Set{UInt},
+)
+ return nothing
+end
+
+# in case f(args...) directly outputs a Ptr{T} or it contains a nested Ptr{T}
+function __exclude_unsupported_output_internal!(y::Ptr, ::Set{UInt})
+ return throw_forward_ret_type_error(y)
+end
+
"""
prepare_pullback_cache(f, x...)
Returns a cache used with [`value_and_pullback!!`](@ref). See that function for more info.
"""
function prepare_pullback_cache(fx...; kwargs...)
-
# Take a copy before mutating.
+ fx_temp = deepcopy(fx)
+
+ # Handle forward pass primal exceptions
+ __exclude_unsupported_output(fx_temp[1](fx_temp[2:end]...))
+ # In case any of f_new... have gone through mutations | ```suggestion
fx_temp = deepcopy(fx)
# Handle forward pass primal exceptions
__exclude_unsupported_output(fx_temp[1](fx_temp[2:end]...))
# In case any of f_new... have gone through mutations
```
I would suggest removing this, and putting the call to `__exclude_unsupported_output` immediately after the call to `rule` on line 255 (below). This will save us having to take an extra `deepcopy`, and from running the function twice. |
Mooncake.jl | github_2023 | others | 525 | compintell | willtebbutt | @@ -98,4 +98,28 @@ end
end
end
end
+ @testset "prepare_pullback_cache" begin | ```suggestion
@testset "prepare_pullback_cache errors" begin
``` |
Mooncake.jl | github_2023 | others | 525 | compintell | willtebbutt | @@ -172,14 +189,62 @@ end
_copy!!(dst, src) = copy!(dst, src)
_copy!!(::Number, src::Number) = src
+"""
+ __exclude_unsupported_output(y)
+
+Required for the robust design of value_and_pullback(), prepare_pullback_cache().
+Handles aliasing, circular references and excludes the Ptr datatype.
+In the forward pass f(args...) output can only return a "Tree" like datastructure with leaf nodes as primitive types.
+Refer https://github.com/compintell/Mooncake.jl/issues/517#issuecomment-2715202789 and related issue for details.
+Internally calls __exclude_unsupported_output_internal!().
+The design is modelled after `zero_tangent`. | ```suggestion
Required for the robust design of [`value_and_pullback`](@ref), [`prepare_pullback_cache`](@ref).
Ensures that `y` contains no aliasing, circular references or `Ptr`s.
In the forward pass f(args...) output can only return a "Tree" like datastructure with leaf nodes as primitive types.
Refer https://github.com/compintell/Mooncake.jl/issues/517#issuecomment-2715202789 and related issue for details.
Internally calls [`__exclude_unsupported_output_internal!`](@ref).
The design is modelled after `zero_tangent`.
```
Just some style notes |
Mooncake.jl | github_2023 | others | 525 | compintell | willtebbutt | @@ -169,8 +205,86 @@ struct Cache{Trule,Ty_cache,Ttangents<:Tuple}
tangents::Ttangents
end
-_copy!!(dst, src) = copy!(dst, src)
-_copy!!(::Number, src::Number) = src
+const supportedcollections = Union{Tuple,NamedTuple}
+
+"""
+ is_user_defined_struct(T)
+
+Required for checking if datatype `T` is a immutable Composite, Mutable Composite type (returns true) or a built in collection type (returns false).
+Helps in identifying user defined struct for recursing over fields correctly.
+"""
+function is_user_defined_struct(T)
+ return isconcretetype(T) &&
+ !isprimitivetype(T) &&
+ !(T <: supportedcollections) &&
+ !(T <: Array) &&
+ (@static VERSION >= v"1.11" ? !(T <: Memory) : true)
+end
+
+"""
+ __exclude_unsupported_output(y)
+
+Required for the robust design of [`value_and_pullback`](@ref), [`prepare_pullback_cache`](@ref).
+Ensures that `y` contains no aliasing, circular references, `Ptr`s or non differentiable datatypes.
+In the forward pass f(args...) output can only return a "Tree" like datastructure with leaf nodes as primitive types.
+Refer https://github.com/compintell/Mooncake.jl/issues/517#issuecomment-2715202789 and related issue for details.
+Internally calls [`__exclude_unsupported_output_internal!`](@ref).
+The design is modelled after `zero_tangent`.
+"""
+function __exclude_unsupported_output(y::T) where {T}
+ if (T <: Union{Dict,Set})
+ throw_datastructure_output_error(y)
+ end | Could you explain why we need to exclude `Dict`s and `Set`s? |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.