python_code
stringlengths
0
992k
repo_name
stringlengths
8
46
file_path
stringlengths
5
162
# -------------------------------------------------------- # The YiTrans End-to-End Speech Translation System for IWSLT 2022 Offline Shared Task (https://arxiv.org/abs/2206.05777) # Github source: https://github.com/microsoft/SpeechT5/tree/main/YiTrans # Copyright (c) 2022 Microsoft # Licensed under The MIT License [se...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/modules/__init__.py
# -------------------------------------------------------- # Copyright (c) 2022 Microsoft # Licensed under The MIT License [see LICENSE for details] # Based on fairseq code bases # https://github.com/facebookresearch/fairseq # -------------------------------------------------------- """ Modified from https://github...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/modules/transformer_layer.py
# -------------------------------------------------------- # The YiTrans End-to-End Speech Translation System for IWSLT 2022 Offline Shared Task (https://arxiv.org/abs/2206.05777) # Github source: https://github.com/microsoft/SpeechT5/tree/main/YiTrans # Copyright (c) 2022 Microsoft # Licensed under The MIT License [se...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/modules/transformer_decoder.py
# -------------------------------------------------------- # Pre-Training Transformer Decoder for End-to-End ASR Model with Unpaired Speech Data (https://arxiv.org/abs/2203.17113) # Github source: https://github.com/microsoft/SpeechT5/tree/main/Speech2C # Copyright (c) 2022 Microsoft # Licensed under The MIT License [s...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/modules/relative_pos_enc.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data/concat_dataset.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data/language_trible_dataset.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data/multimodal_corpus_dataset.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data/load_langpair_dataset.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data/hubert_dataset.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data/text_to_unit_dataset.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/criterions/speechlm_criterion.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/criterions/fasttext2unit_loss.py
import importlib import os for file in os.listdir(os.path.dirname(__file__)): if file.endswith(".py") and not file.startswith("_"): criterion_name = file[: file.find(".py")] importlib.import_module( "speechlm.criterions." + criterion_name )
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/criterions/__init__.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data_process/get_t2u_manifest.py
import sys def main(): for line in sys.stdin: line = line.replace("<unk>", "") line = " ".join(line.strip().split()) line = line.replace(" ", "|").upper() + "|" print(" ".join(line)) if __name__ == "__main__": main()
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data_process/wrd2ltr.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data_process/get_t2u_manifest_textonly.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data_process/filter_paireddata_by_len.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data_process/phoneize_with_sil.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data_process/covost2/prepare_covost_data.py
import argparse from tqdm import tqdm from pydub import AudioSegment import torchaudio import os def mp3_convert_wav(mp3_file, wav_file): try: sound = AudioSegment.from_mp3(mp3_file) sound=sound.set_frame_rate(16000) sound=sound.set_channels(1) sound=sound.set_sample_width(2) ...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data_process/covost2/mp3_to_wav.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data_process/phoneme_tokenizer/ltr2kaldi_phn_sil025.py
# ---------------------------------------------------------------------------- # SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329) # Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM # Code based on fairseq: https://github.com/facebookresearch/fai...
EXA-1-master
exa/models/unilm-master/speechlm/speechlm/data_process/phoneme_tokenizer/repeat_withou_insert_sil_less_4375.py
import json import sys import argparse sys.path.insert(0, './src') from logger_config import logger from metrics import compute_mrr, trec_eval from utils import save_json_to_file from data_utils import load_qrels, load_msmarco_predictions parser = argparse.ArgumentParser(description='compute metrics for ms-marco pred...
EXA-1-master
exa/models/unilm-master/simlm/misc/compute_metrics_marco.py
import json import os import sys import tqdm import argparse sys.path.insert(0, './src') from typing import List, Dict from utils import save_json_to_file from logger_config import logger from data_utils import load_qrels, load_corpus, load_queries, load_msmarco_predictions, ScoredDoc from metrics import get_rel_thre...
EXA-1-master
exa/models/unilm-master/simlm/misc/marco_pred_to_cases.py
import os import io import gzip import json import random import argparse import ir_datasets import numpy as np import sys sys.path.insert(0, 'src/') from tqdm import tqdm from typing import Dict, List from datasets import Dataset from logger_config import logger from utils import save_json_to_file from data_utils im...
EXA-1-master
exa/models/unilm-master/simlm/misc/prepare_msmarco_data.py
import os import json import argparse import sys import numpy as np sys.path.insert(0, 'src/') from tqdm import tqdm from typing import Dict, Any from logger_config import logger from data_utils import load_query_answers, load_corpus, save_to_readable_format parser = argparse.ArgumentParser(description='data prepro...
EXA-1-master
exa/models/unilm-master/simlm/misc/dpr/mine_hard_negatives.py
import os import argparse import json import sys sys.path.insert(0, 'src/') from tqdm import tqdm from typing import Dict, Any from datasets import Dataset from evaluate_dpr_retrieval import has_answers, SimpleTokenizer, evaluate_retrieval from data_utils import load_query_answers, load_corpus from utils import save_...
EXA-1-master
exa/models/unilm-master/simlm/misc/dpr/format_and_evaluate.py
""" Most of the tokenization code here is copied from Facebook/DPR & DrQA codebase to avoid adding an extra dependency """ import argparse import copy import json import logging import re import unicodedata from tqdm import tqdm import numpy as np import regex logger = logging.getLogger(__name__) class Tokens(obje...
EXA-1-master
exa/models/unilm-master/simlm/misc/dpr/evaluate_dpr_retrieval.py
import torch import pytrec_eval from typing import List, Dict, Tuple from data_utils import ScoredDoc from logger_config import logger def trec_eval(qrels: Dict[str, Dict[str, int]], predictions: Dict[str, List[ScoredDoc]], k_values: Tuple[int] = (10, 50, 100, 200, 1000)) -> Dict[str, fl...
EXA-1-master
exa/models/unilm-master/simlm/src/metrics.py
import logging import torch from typing import Dict from transformers.utils.logging import enable_explicit_format from transformers.trainer_callback import PrinterCallback from transformers import ( AutoTokenizer, HfArgumentParser, EvalPrediction, Trainer, set_seed, PreTrainedTokenizerFast ) f...
EXA-1-master
exa/models/unilm-master/simlm/src/train_cross_encoder.py
import os import torch from dataclasses import dataclass, field from typing import Optional from transformers import TrainingArguments from logger_config import logger @dataclass class Arguments(TrainingArguments): model_name_or_path: str = field( default='bert-base-uncased', metadata={"help": "...
EXA-1-master
exa/models/unilm-master/simlm/src/config.py
import os import logging from transformers.trainer_callback import TrainerCallback def _setup_logger(): log_format = logging.Formatter("[%(asctime)s %(levelname)s] %(message)s") logger = logging.getLogger() logger.setLevel(logging.INFO) console_handler = logging.StreamHandler() console_handler.s...
EXA-1-master
exa/models/unilm-master/simlm/src/logger_config.py
EXA-1-master
exa/models/unilm-master/simlm/src/__init__.py
import logging import numpy as np from typing import Dict from transformers.utils.logging import enable_explicit_format from transformers.trainer_callback import PrinterCallback from transformers import ( AutoTokenizer, HfArgumentParser, set_seed, PreTrainedTokenizerFast, EvalPrediction, ) from lo...
EXA-1-master
exa/models/unilm-master/simlm/src/train_rlm.py
import os import random import tqdm import json from typing import Dict, List, Any from datasets import load_dataset, Dataset from dataclasses import dataclass, field from logger_config import logger from config import Arguments from utils import save_json_to_file @dataclass class ScoredDoc: qid: str pid: s...
EXA-1-master
exa/models/unilm-master/simlm/src/data_utils.py
import json import torch import torch.distributed as dist from typing import List, Union, Optional, Tuple, Mapping, Dict def save_json_to_file(objects: Union[List, dict], path: str, line_by_line: bool = False): if line_by_line: assert isinstance(objects, list), 'Only list can be saved in line by line for...
EXA-1-master
exa/models/unilm-master/simlm/src/utils.py
import logging import torch from typing import Dict from functools import partial from transformers.utils.logging import enable_explicit_format from transformers.trainer_callback import PrinterCallback from transformers import ( AutoTokenizer, HfArgumentParser, EvalPrediction, Trainer, set_seed, ...
EXA-1-master
exa/models/unilm-master/simlm/src/train_biencoder.py
import os import copy import torch import torch.nn as nn import torch.nn.functional as F from dataclasses import dataclass from typing import Optional, Dict, Tuple from torch import Tensor from transformers import ( AutoModel, PreTrainedModel, ) from transformers.modeling_outputs import ModelOutput from confi...
EXA-1-master
exa/models/unilm-master/simlm/src/models/biencoder_model.py
import copy import os import torch import torch.nn as nn from contextlib import nullcontext from torch import Tensor from torch.distributions import Categorical from typing import Dict, Optional, Tuple from dataclasses import dataclass from transformers import AutoModelForMaskedLM, ElectraModel from transformers.model...
EXA-1-master
exa/models/unilm-master/simlm/src/models/rlm.py
from .biencoder_model import BiencoderModel, BiencoderModelForInference, BiencoderOutput from .cross_encoder_model import Reranker, RerankerForInference from .rlm import ReplaceLM, ReplaceLMOutput
EXA-1-master
exa/models/unilm-master/simlm/src/models/__init__.py
import torch import torch.nn as nn from typing import Optional, Dict from transformers import ( PreTrainedModel, AutoModelForSequenceClassification ) from transformers.modeling_outputs import SequenceClassifierOutput from config import Arguments class Reranker(nn.Module): def __init__(self, hf_model: Pr...
EXA-1-master
exa/models/unilm-master/simlm/src/models/cross_encoder_model.py
from .biencoder_trainer import BiencoderTrainer from .reranker_trainer import RerankerTrainer from .rlm_trainer import ReplaceLMTrainer
EXA-1-master
exa/models/unilm-master/simlm/src/trainers/__init__.py
import os from typing import Optional from transformers.trainer import Trainer from logger_config import logger from models import ReplaceLM, ReplaceLMOutput from utils import AverageMeter class ReplaceLMTrainer(Trainer): def __init__(self, *pargs, **kwargs): super(ReplaceLMTrainer, self).__init__(*parg...
EXA-1-master
exa/models/unilm-master/simlm/src/trainers/rlm_trainer.py
import os from typing import Optional, Union from transformers.trainer import Trainer from transformers.modeling_outputs import SequenceClassifierOutput from logger_config import logger from metrics import accuracy from utils import AverageMeter class RerankerTrainer(Trainer): def __init__(self, *pargs, **kwar...
EXA-1-master
exa/models/unilm-master/simlm/src/trainers/reranker_trainer.py
import os import torch from typing import Optional, Dict, Tuple from transformers.trainer import Trainer from logger_config import logger from metrics import accuracy, batch_mrr from models import BiencoderOutput, BiencoderModel from utils import AverageMeter def _unpack_qp(inputs: Dict[str, torch.Tensor]) -> Tuple...
EXA-1-master
exa/models/unilm-master/simlm/src/trainers/biencoder_trainer.py
import json import os import glob import tqdm import torch from contextlib import nullcontext from torch.utils.data import DataLoader from functools import partial from collections import defaultdict from datasets import Dataset from typing import Dict, List, Tuple from transformers.file_utils import PaddingStrategy f...
EXA-1-master
exa/models/unilm-master/simlm/src/inference/search_main.py
import os import tqdm import torch from contextlib import nullcontext from torch.utils.data import DataLoader from functools import partial from datasets import Dataset from typing import Dict, List from transformers.file_utils import PaddingStrategy from transformers.modeling_outputs import SequenceClassifierOutput f...
EXA-1-master
exa/models/unilm-master/simlm/src/inference/rerank_main.py
import os import tqdm import torch from contextlib import nullcontext from torch.utils.data import DataLoader from functools import partial from datasets import Dataset, load_dataset from typing import Dict, List from transformers.file_utils import PaddingStrategy from transformers.modeling_outputs import SequenceClas...
EXA-1-master
exa/models/unilm-master/simlm/src/inference/gen_teacher_scores.py
EXA-1-master
exa/models/unilm-master/simlm/src/inference/__init__.py
import os import tqdm import torch from contextlib import nullcontext from torch.utils.data import DataLoader from functools import partial from datasets import load_dataset from typing import Dict, List from transformers.file_utils import PaddingStrategy from transformers import ( AutoTokenizer, PreTrainedTok...
EXA-1-master
exa/models/unilm-master/simlm/src/inference/encode_main.py
from typing import List, Dict def _slice_with_mod(elements: List, offset: int, cnt: int) -> List: return [elements[(offset + idx) % len(elements)] for idx in range(cnt)] def group_doc_ids(examples: Dict[str, List], negative_size: int, offset: int, use_first_...
EXA-1-master
exa/models/unilm-master/simlm/src/loaders/loader_utils.py
import os import random from typing import Tuple, Dict, List, Optional from datasets import load_dataset, DatasetDict, Dataset from transformers.file_utils import PaddingStrategy from transformers import PreTrainedTokenizerFast, Trainer from config import Arguments from logger_config import logger from .loader_utils ...
EXA-1-master
exa/models/unilm-master/simlm/src/loaders/biencoder_dataloader.py
import random from typing import Tuple from transformers import PreTrainedTokenizerFast from datasets import Dataset, load_dataset from config import Arguments from logger_config import logger def split_dataset(dataset: Dataset, num_eval_examples: int, max_train_samples: int = No...
EXA-1-master
exa/models/unilm-master/simlm/src/loaders/rlm_dataloader.py
from .biencoder_dataloader import RetrievalDataLoader from .cross_encoder_dataloader import CrossEncoderDataLoader from .rlm_dataloader import ReplaceLMDataloader
EXA-1-master
exa/models/unilm-master/simlm/src/loaders/__init__.py
import os.path import random from typing import Tuple, Dict, List, Optional from datasets import load_dataset, DatasetDict, Dataset from transformers.file_utils import PaddingStrategy from transformers import PreTrainedTokenizerFast, Trainer from config import Arguments from logger_config import logger from .loader_u...
EXA-1-master
exa/models/unilm-master/simlm/src/loaders/cross_encoder_dataloader.py
import copy from dataclasses import dataclass from typing import List, Dict, Optional, Any from transformers import BatchEncoding, BertTokenizerFast from transformers.data.data_collator import _torch_collate_batch from transformers.file_utils import PaddingStrategy from config import Arguments from .collator_utils im...
EXA-1-master
exa/models/unilm-master/simlm/src/collators/rlm_collator.py
from .biencoder_collator import BiencoderCollator from .cross_encoder_collator import CrossEncoderCollator from .rlm_collator import DataCollatorForReplaceLM
EXA-1-master
exa/models/unilm-master/simlm/src/collators/__init__.py
import torch from dataclasses import dataclass from typing import List, Dict, Any from transformers import BatchEncoding, DataCollatorWithPadding @dataclass class CrossEncoderCollator(DataCollatorWithPadding): def __call__(self, features: List[Dict[str, Any]]) -> BatchEncoding: unpack_features = [] ...
EXA-1-master
exa/models/unilm-master/simlm/src/collators/cross_encoder_collator.py
import torch from dataclasses import dataclass from typing import List, Dict, Any from transformers import DataCollatorWithPadding, BatchEncoding def _unpack_doc_values(features: List[Dict[str, Any]]) -> List[Dict[str, Any]]: doc_examples = [] for f in features: keys = list(f.keys()) lists_pe...
EXA-1-master
exa/models/unilm-master/simlm/src/collators/biencoder_collator.py
import torch import random import warnings from transformers import BertTokenizer, BertTokenizerFast, BatchEncoding from typing import List, Union, Tuple, Any, Dict def whole_word_mask(tokenizer: Union[BertTokenizer, BertTokenizerFast], input_tokens: List[str], mlm_prob: float...
EXA-1-master
exa/models/unilm-master/simlm/src/collators/collator_utils.py
#!/usr/bin/env python3 import torch from setuptools import find_packages, setup torch_ver = [int(x) for x in torch.__version__.split(".")[:2]] assert torch_ver >= [1, 4], "Requires PyTorch >= 1.4" setup( name="layoutlm", version="0.0", author="Yiheng Xu", url="https://github.com/microsoft/unilm/tree/m...
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/setup.py
# coding=utf-8 from __future__ import absolute_import, division, print_function import argparse import glob import logging import os import random import numpy as np import torch from torch.utils.data import DataLoader, RandomSampler, SequentialSampler from torch.utils.data.distributed import DistributedSampler from ...
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/examples/classification/run_classification.py
import argparse import json import os from PIL import Image from transformers import AutoTokenizer def bbox_string(box, width, length): return ( str(int(1000 * (box[0] / width))) + " " + str(int(1000 * (box[1] / length))) + " " + str(int(1000 * (box[2] / width))) +...
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/examples/seq_labeling/preprocess.py
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a cop...
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/examples/seq_labeling/run_seq_labeling.py
# flake8: noqa from .data.funsd import FunsdDataset from .modeling.layoutlm import ( LayoutlmConfig, LayoutlmForSequenceClassification, LayoutlmForTokenClassification, )
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/layoutlm/__init__.py
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/layoutlm/modeling/__init__.py
import logging import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from transformers import BertConfig, BertModel, BertPreTrainedModel from transformers.modeling_bert import BertLayerNorm logger = logging.getLogger(__name__) LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_MAP = {} LAYOUTLM_PRETRAINED...
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/layoutlm/modeling/layoutlm.py
# coding=utf-8 import copy import json import logging import os import re from multiprocessing import Pool import torch from lxml import html from torch.utils.data import TensorDataset from tqdm import tqdm from transformers import DataProcessor logger = logging.getLogger(__name__) def get_text(node): textnodes...
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/layoutlm/data/rvl_cdip.py
# flake8: noqa from .funsd import FunsdDataset
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/layoutlm/data/__init__.py
import logging import os import torch from torch.utils.data import Dataset logger = logging.getLogger(__name__) class FunsdDataset(Dataset): def __init__(self, args, tokenizer, labels, pad_token_label_id, mode): if args.local_rank not in [-1, 0] and mode == "train": torch.distributed.barrier...
EXA-1-master
exa/models/unilm-master/layoutlm/deprecated/layoutlm/data/funsd.py
from setuptools import setup, find_packages setup( name = "adalm", version = "0.0", author = "Microsoft", author_email = "", description = "domain adaptation toolkit", keywords = "domain adaptation with extended vocab", license='Apache', url = "https://github.com/littlefive5/AdaLM", ...
EXA-1-master
exa/models/unilm-master/adalm/setup.py
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a cop...
EXA-1-master
exa/models/unilm-master/adalm/finetune/run_ner.py
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a cop...
EXA-1-master
exa/models/unilm-master/adalm/finetune/utils_ner.py
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a cop...
EXA-1-master
exa/models/unilm-master/adalm/finetune/run_pico.py
EXA-1-master
exa/models/unilm-master/adalm/finetune/__init__.py
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a cop...
EXA-1-master
exa/models/unilm-master/adalm/finetune/run_classifier.py
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a cop...
EXA-1-master
exa/models/unilm-master/adalm/finetune/utils_for_glue.py
from __future__ import absolute_import from __future__ import division from numpy.core.fromnumeric import argsort from text_encoder import SubwordTextEncoder import tokenizer import tempfile import argparse from transformers import BertTokenizer import random import math import numpy as np def merge_output_file_with_b...
EXA-1-master
exa/models/unilm-master/adalm/incr_bpe/vocab_extend.py
# coding=utf-8 # Copyright 2018 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable...
EXA-1-master
exa/models/unilm-master/adalm/incr_bpe/text_encoder.py
# coding=utf-8 # Copyright 2018 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable...
EXA-1-master
exa/models/unilm-master/adalm/incr_bpe/tokenizer.py
#-*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from text_encoder import SubwordTextEncoder import tokenizer import os import tempfile import tensorflow as tf tf.flags.DEFINE_string('output_filename', '/tmp/my.subword_text_encoder', 'where to store...
EXA-1-master
exa/models/unilm-master/adalm/incr_bpe/subword_builder.py
from setuptools import setup, find_packages setup( name = 'PaLM-rlhf-pytorch', packages = find_packages(exclude=[]), version = '0.2.1', license='MIT', description = 'PaLM + Reinforcement Learning with Human Feedback - Pytorch', author = 'Phil Wang', author_email = 'lucidrains@gmail.com', long_descripti...
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/setup.py
import gzip import random import tqdm import numpy as np import torch from lion_pytorch import Lion from torch.nn import functional as F from torch.utils.data import DataLoader, Dataset from palm_rlhf_pytorch import PaLM from accelerate import Accelerator # constants NUM_BATCHES = int(1e5) BATCH_SIZE = 4 GRADIENT_A...
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/train.py
import torch from torch import nn, einsum import torch.nn.functional as F from collections import namedtuple from functools import wraps from packaging import version from einops import rearrange # constants Config = namedtuple('EfficientAttentionConfig', ['enable_flash', 'enable_math', 'enable_mem_efficient']) # ...
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/palm_rlhf_pytorch/attention.py
import math import copy from pathlib import Path from collections import namedtuple from functools import wraps from itertools import zip_longest from tqdm import tqdm from beartype import beartype from beartype.typing import Tuple, Optional import torch from torch import einsum, nn import torch.nn.functional as F f...
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/palm_rlhf_pytorch/palm.py
from palm_rlhf_pytorch.palm import PaLM from palm_rlhf_pytorch.reward import RewardModel from palm_rlhf_pytorch.ppo import RLHFTrainer, ActorCritic
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/palm_rlhf_pytorch/__init__.py
import math import torch from torch import einsum, nn import torch.nn.functional as F from einops import rearrange def exists(val): return val is not None # decorators def eval_decorator(fn): def inner(self, *args, **kwargs): was_training = self.training self.eval() out = fn(self, *a...
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/palm_rlhf_pytorch/utils.py
from torch.optim import AdamW, Adam from lion_pytorch import Lion def separate_weight_decayable_params(params): wd_params, no_wd_params = [], [] for param in params: param_list = no_wd_params if param.ndim < 2 else wd_params param_list.append(param) return wd_params, no_wd_params def get_o...
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/palm_rlhf_pytorch/optimizer.py
import torch from torch import nn # helper functions def exists(val): return val is not None def default(val, d): return val if exists(val) else d # LoRA - https://arxiv.org/abs/2106.09685 class LoRA(nn.Module): def __init__( self, dim, dim_out, r = 8, alpha = No...
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/palm_rlhf_pytorch/lora.py
import math from pathlib import Path import copy from tqdm import tqdm from functools import partial from collections import deque, namedtuple from random import randrange from beartype import beartype from beartype.typing import List, Optional, Callable, Deque import torch from torch import nn import torch.nn.functi...
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/palm_rlhf_pytorch/ppo.py
import copy from pathlib import Path from tqdm import tqdm from beartype import beartype from beartype.typing import Tuple, Optional import torch from torch import nn import torch.nn.functional as F from einops import rearrange, repeat, reduce, pack, unpack from einops.layers.torch import Rearrange, Reduce from pal...
EXA-1-master
exa/models/PaLM-rlhf-pytorch-main 2/palm_rlhf_pytorch/reward.py
from setuptools import setup, find_packages setup( name = 'MaMMUT-pytorch', packages = find_packages(exclude=[]), version = '0.0.2', license='MIT', description = 'MaMMUT - Pytorch', author = 'Phil Wang', author_email = 'lucidrains@gmail.com', long_description_content_type = 'text/markdown', url = 'ht...
EXA-1-master
exa/models/MaMMUT-pytorch/setup.py
import torch from torch import einsum, nn import torch.nn.functional as F from einops import rearrange, repeat # helper functions def exists(val): return val is not None def default(val, d): return val if exists(val) else d def divisible_by(numer, denom): return (numer % denom) == 0 # normalization # t...
EXA-1-master
exa/models/MaMMUT-pytorch/mammut_pytorch/mammut_pytorch.py
from mammut_pytorch.mammut_pytorch import MaMMUT
EXA-1-master
exa/models/MaMMUT-pytorch/mammut_pytorch/__init__.py
# coding=utf-8 import os import sys from pathlib import Path from subprocess import DEVNULL, PIPE, run from setuptools import find_packages, setup project_root = Path(__file__).parent # modified from https://github.com/lhotse-speech/lhotse/blob/master/setup.py # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!...
EXA-1-master
exa/models/valle/vall-e-main/setup.py
from . import data, models, modules, utils
EXA-1-master
exa/models/valle/vall-e-main/valle/__init__.py
EXA-1-master
exa/models/valle/vall-e-main/valle/bin/__init__.py
#!/usr/bin/env python3 # Copyright 2023 (authors: Feiteng Li) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 ...
EXA-1-master
exa/models/valle/vall-e-main/valle/bin/tokenizer.py
#!/usr/bin/env python3 # Copyright 2023 (authors: Feiteng Li) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 ...
EXA-1-master
exa/models/valle/vall-e-main/valle/bin/infer.py
#!/usr/bin/env python3 # Copyright 2021 Xiaomi Corp. (authors: Fangjun Kuang) # Copyright 2023 (authors: Feiteng Li) # # See ../../../../LICENSE for clarification regarding multiple authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use...
EXA-1-master
exa/models/valle/vall-e-main/valle/bin/display_manifest_statistics.py