code
stringlengths
87
55.2k
code_codestyle
int64
0
349
style_context
stringlengths
135
49.1k
style_context_codestyle
int64
0
349
label
int64
0
1
import copy import os from typing import Union from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """microsoft/git-base""": """https://huggingface.co/microsoft/git-base/resolve/main/config.json""", } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Dict ='git_vision_model' def __init__( self , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=224 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_="quick_gelu" , SCREAMING_SNAKE_CASE_=1e-5 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.02 , **SCREAMING_SNAKE_CASE_ , ) -> Tuple: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :Dict = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :List[str] = num_channels UpperCamelCase :Optional[int] = patch_size UpperCamelCase :Optional[int] = image_size UpperCamelCase :List[Any] = initializer_range UpperCamelCase :Union[str, Any] = attention_dropout UpperCamelCase :Tuple = layer_norm_eps UpperCamelCase :Optional[Any] = hidden_act @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> "PretrainedConfig": cls._set_token_in_kwargs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Dict = cls.get_config_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) # get the vision config dict if we are loading from GITConfig if config_dict.get('''model_type''' ) == "git": UpperCamelCase :Tuple = config_dict['''vision_config'''] if "model_type" in config_dict and hasattr(cls , '''model_type''' ) and config_dict["model_type"] != cls.model_type: logger.warning( F'''You are using a model of type {config_dict["model_type"]} to instantiate a model of type ''' F'''{cls.model_type}. This is not supported for all configurations of models and can yield errors.''' ) return cls.from_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Optional[Any] ='git' def __init__( self , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=3_0522 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=6 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=1024 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-12 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_="absolute" , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=101 , SCREAMING_SNAKE_CASE_=102 , SCREAMING_SNAKE_CASE_=None , **SCREAMING_SNAKE_CASE_ , ) -> int: super().__init__(bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , pad_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) if vision_config is None: UpperCamelCase :Tuple = {} logger.info('''vision_config is None. initializing the GitVisionConfig with default values.''' ) UpperCamelCase :Union[str, Any] = GitVisionConfig(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = vocab_size UpperCamelCase :Optional[Any] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :List[Any] = num_attention_heads UpperCamelCase :Dict = hidden_act UpperCamelCase :List[str] = intermediate_size UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :Optional[int] = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = max_position_embeddings UpperCamelCase :Tuple = initializer_range UpperCamelCase :Any = layer_norm_eps UpperCamelCase :int = position_embedding_type UpperCamelCase :Dict = use_cache UpperCamelCase :Tuple = tie_word_embeddings UpperCamelCase :Union[str, Any] = num_image_with_embedding UpperCamelCase :Optional[int] = bos_token_id UpperCamelCase :List[Any] = eos_token_id def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Union[str, Any] = copy.deepcopy(self.__dict__ ) UpperCamelCase :Optional[int] = self.vision_config.to_dict() UpperCamelCase :int = self.__class__.model_type return output
259
from json import JSONDecodeError # Workaround for requests.exceptions.JSONDecodeError import requests def _A ( SCREAMING_SNAKE_CASE__ : str = "isbn/0140328726" ): UpperCamelCase :Optional[int] = olid.strip().strip('''/''' ) # Remove leading/trailing whitespace & slashes if new_olid.count('''/''' ) != 1: UpperCamelCase :str = F'''{olid} is not a valid Open Library olid''' raise ValueError(SCREAMING_SNAKE_CASE__ ) return requests.get(F'''https://openlibrary.org/{new_olid}.json''' ).json() def _A ( SCREAMING_SNAKE_CASE__ : dict ): UpperCamelCase :str = { '''title''': '''Title''', '''publish_date''': '''Publish date''', '''authors''': '''Authors''', '''number_of_pages''': '''Number of pages:''', '''first_sentence''': '''First sentence''', '''isbn_10''': '''ISBN (10)''', '''isbn_13''': '''ISBN (13)''', } UpperCamelCase :Optional[Any] = {better_key: ol_book_data[key] for key, better_key in desired_keys.items()} UpperCamelCase :List[str] = [ get_openlibrary_data(author['''key'''] )['''name'''] for author in data['''Authors'''] ] UpperCamelCase :int = data['''First sentence''']['''value'''] for key, value in data.items(): if isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = ''', '''.join(SCREAMING_SNAKE_CASE__ ) return data if __name__ == "__main__": import doctest doctest.testmod() while True: __snake_case = input("""\nEnter the ISBN code to search (or 'quit' to stop): """).strip() if isbn.lower() in ("", "q", "quit", "exit", "stop"): break if len(isbn) not in (10, 13) or not isbn.isdigit(): print(f'''Sorry, {isbn} is not a valid ISBN. Please, input a valid ISBN.''') continue print(f'''\nSearching Open Library for ISBN: {isbn}...\n''') try: __snake_case = summarize_book(get_openlibrary_data(f'''isbn/{isbn}''')) print("""\n""".join(f'''{key}: {value}''' for key, value in book_summary.items())) except JSONDecodeError: # Workaround for requests.exceptions.RequestException: print(f'''Sorry, there are no results for ISBN: {isbn}.''')
259
1
import argparse import json import os import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerate import Accelerator, DistributedType from accelerate.utils.deepspeed import DummyOptim, DummyScheduler __snake_case = 16 __snake_case = 32 def _A ( SCREAMING_SNAKE_CASE__ : Accelerator , SCREAMING_SNAKE_CASE__ : int = 16 , SCREAMING_SNAKE_CASE__ : str = "bert-base-cased" ): UpperCamelCase :int = AutoTokenizer.from_pretrained(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[Any] = load_dataset('''glue''' , '''mrpc''' ) def tokenize_function(SCREAMING_SNAKE_CASE__ : Optional[int] ): # max_length=None => use the model max length (it's actually the default) UpperCamelCase :int = tokenizer(examples['''sentence1'''] , examples['''sentence2'''] , truncation=SCREAMING_SNAKE_CASE__ , max_length=SCREAMING_SNAKE_CASE__ ) return outputs # Apply the method we just defined to all the examples in all the splits of the dataset UpperCamelCase :Optional[int] = datasets.map( SCREAMING_SNAKE_CASE__ , batched=SCREAMING_SNAKE_CASE__ , remove_columns=['''idx''', '''sentence1''', '''sentence2'''] , load_from_cache_file=SCREAMING_SNAKE_CASE__ ) # We also rename the 'label' column to 'labels' which is the expected name for labels by the models of the # transformers library UpperCamelCase :List[Any] = tokenized_datasets.rename_column('''label''' , '''labels''' ) def collate_fn(SCREAMING_SNAKE_CASE__ : List[str] ): # On TPU it's best to pad everything to the same length or training will be very slow. if accelerator.distributed_type == DistributedType.TPU: return tokenizer.pad(SCREAMING_SNAKE_CASE__ , padding='''max_length''' , max_length=128 , return_tensors='''pt''' ) return tokenizer.pad(SCREAMING_SNAKE_CASE__ , padding='''longest''' , return_tensors='''pt''' ) # Instantiate dataloaders. UpperCamelCase :Optional[Any] = DataLoader( tokenized_datasets['''train'''] , shuffle=SCREAMING_SNAKE_CASE__ , collate_fn=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = DataLoader( tokenized_datasets['''validation'''] , shuffle=SCREAMING_SNAKE_CASE__ , collate_fn=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ ) return train_dataloader, eval_dataloader def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : str ): model.eval() UpperCamelCase :Dict = 0 for step, batch in enumerate(SCREAMING_SNAKE_CASE__ ): # We could avoid this line since we set the accelerator with `device_placement=True`. batch.to(accelerator.device ) with torch.no_grad(): UpperCamelCase :Optional[int] = model(**SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = outputs.logits.argmax(dim=-1 ) # It is slightly faster to call this once, than multiple times UpperCamelCase , UpperCamelCase :Dict = accelerator.gather( (predictions, batch['''labels''']) ) # If we are in a multiprocess environment, the last batch has duplicates if accelerator.use_distributed: if step == len(SCREAMING_SNAKE_CASE__ ) - 1: UpperCamelCase :Union[str, Any] = predictions[: len(eval_dataloader.dataset ) - samples_seen] UpperCamelCase :int = references[: len(eval_dataloader.dataset ) - samples_seen] else: samples_seen += references.shape[0] metric.add_batch( predictions=SCREAMING_SNAKE_CASE__ , references=SCREAMING_SNAKE_CASE__ , ) UpperCamelCase :str = metric.compute() return eval_metric["accuracy"] def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): # Initialize accelerator UpperCamelCase :str = Accelerator() # Sample hyper-parameters for learning rate, batch size, seed and a few other HPs UpperCamelCase :Any = config['''lr'''] UpperCamelCase :List[str] = int(config['''num_epochs'''] ) UpperCamelCase :Any = int(config['''seed'''] ) UpperCamelCase :List[str] = int(config['''batch_size'''] ) UpperCamelCase :List[str] = args.model_name_or_path set_seed(SCREAMING_SNAKE_CASE__ ) UpperCamelCase , UpperCamelCase :Optional[int] = get_dataloaders(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Instantiate the model (we build the model here so that the seed also control new weights initialization) UpperCamelCase :List[str] = AutoModelForSequenceClassification.from_pretrained(SCREAMING_SNAKE_CASE__ , return_dict=SCREAMING_SNAKE_CASE__ ) # Instantiate optimizer UpperCamelCase :Optional[Any] = ( AdamW if accelerator.state.deepspeed_plugin is None or '''optimizer''' not in accelerator.state.deepspeed_plugin.deepspeed_config else DummyOptim ) UpperCamelCase :Optional[Any] = optimizer_cls(params=model.parameters() , lr=SCREAMING_SNAKE_CASE__ ) if accelerator.state.deepspeed_plugin is not None: UpperCamelCase :Tuple = accelerator.state.deepspeed_plugin.deepspeed_config[ '''gradient_accumulation_steps''' ] else: UpperCamelCase :List[Any] = 1 UpperCamelCase :List[Any] = (len(SCREAMING_SNAKE_CASE__ ) * num_epochs) // gradient_accumulation_steps # Instantiate scheduler if ( accelerator.state.deepspeed_plugin is None or "scheduler" not in accelerator.state.deepspeed_plugin.deepspeed_config ): UpperCamelCase :List[Any] = get_linear_schedule_with_warmup( optimizer=SCREAMING_SNAKE_CASE__ , num_warmup_steps=0 , num_training_steps=SCREAMING_SNAKE_CASE__ , ) else: UpperCamelCase :Any = DummyScheduler(SCREAMING_SNAKE_CASE__ , total_num_steps=SCREAMING_SNAKE_CASE__ , warmup_num_steps=0 ) # Prepare everything # There is no specific order to remember, we just need to unpack the objects in the same order we gave them to the # prepare method. UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Any = accelerator.prepare( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # We need to keep track of how many total steps we have iterated over UpperCamelCase :Dict = 0 # We also need to keep track of the stating epoch so files are named properly UpperCamelCase :Tuple = 0 UpperCamelCase :Optional[int] = evaluate.load('''glue''' , '''mrpc''' ) UpperCamelCase :List[str] = num_epochs if args.partial_train_epoch is not None: UpperCamelCase :Optional[Any] = args.partial_train_epoch if args.resume_from_checkpoint: accelerator.load_state(args.resume_from_checkpoint ) UpperCamelCase :int = args.resume_from_checkpoint.split('''epoch_''' )[1] UpperCamelCase :Any = '''''' for char in epoch_string: if char.isdigit(): state_epoch_num += char else: break UpperCamelCase :Dict = int(SCREAMING_SNAKE_CASE__ ) + 1 UpperCamelCase :List[str] = evaluation_loop(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) accelerator.print('''resumed checkpoint performance:''' , SCREAMING_SNAKE_CASE__ ) accelerator.print('''resumed checkpoint\'s scheduler\'s lr:''' , lr_scheduler.get_lr()[0] ) accelerator.print('''resumed optimizers\'s lr:''' , optimizer.param_groups[0]['''lr'''] ) with open(os.path.join(args.output_dir , F'''state_{starting_epoch-1}.json''' ) , '''r''' ) as f: UpperCamelCase :Union[str, Any] = json.load(SCREAMING_SNAKE_CASE__ ) assert resumed_state["accuracy"] == accuracy, "Accuracy mismatch, loading from checkpoint failed" assert ( resumed_state["lr"] == lr_scheduler.get_lr()[0] ), "Scheduler learning rate mismatch, loading from checkpoint failed" assert ( resumed_state["optimizer_lr"] == optimizer.param_groups[0]["lr"] ), "Optimizer learning rate mismatch, loading from checkpoint failed" assert resumed_state["epoch"] == starting_epoch - 1, "Epoch mismatch, loading from checkpoint failed" return # Now we train the model UpperCamelCase :Optional[Any] = {} for epoch in range(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): model.train() for step, batch in enumerate(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Dict = model(**SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = outputs.loss UpperCamelCase :Dict = loss / gradient_accumulation_steps accelerator.backward(SCREAMING_SNAKE_CASE__ ) if step % gradient_accumulation_steps == 0: optimizer.step() lr_scheduler.step() optimizer.zero_grad() overall_step += 1 UpperCamelCase :int = F'''epoch_{epoch}''' UpperCamelCase :Union[str, Any] = os.path.join(args.output_dir , SCREAMING_SNAKE_CASE__ ) accelerator.save_state(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = evaluation_loop(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = accuracy UpperCamelCase :Optional[int] = lr_scheduler.get_lr()[0] UpperCamelCase :List[str] = optimizer.param_groups[0]['''lr'''] UpperCamelCase :Any = epoch UpperCamelCase :str = overall_step accelerator.print(F'''epoch {epoch}:''' , SCREAMING_SNAKE_CASE__ ) accelerator.wait_for_everyone() if accelerator.is_main_process: with open(os.path.join(args.output_dir , F'''state_{epoch}.json''' ) , '''w''' ) as f: json.dump(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) def _A ( ): UpperCamelCase :Union[str, Any] = argparse.ArgumentParser(description='''Simple example of training script tracking peak GPU memory usage.''' ) parser.add_argument( '''--model_name_or_path''' , type=SCREAMING_SNAKE_CASE__ , default='''bert-base-cased''' , help='''Path to pretrained model or model identifier from huggingface.co/models.''' , required=SCREAMING_SNAKE_CASE__ , ) parser.add_argument( '''--output_dir''' , type=SCREAMING_SNAKE_CASE__ , default='''.''' , help='''Optional save directory where all checkpoint folders will be stored. Default is the current working directory.''' , ) parser.add_argument( '''--resume_from_checkpoint''' , type=SCREAMING_SNAKE_CASE__ , default=SCREAMING_SNAKE_CASE__ , help='''If the training should continue from a checkpoint folder.''' , ) parser.add_argument( '''--partial_train_epoch''' , type=SCREAMING_SNAKE_CASE__ , default=SCREAMING_SNAKE_CASE__ , help='''If passed, the training will stop after this number of epochs.''' , ) parser.add_argument( '''--num_epochs''' , type=SCREAMING_SNAKE_CASE__ , default=2 , help='''Number of train epochs.''' , ) UpperCamelCase :Optional[int] = parser.parse_args() UpperCamelCase :Optional[int] = {'''lr''': 2e-5, '''num_epochs''': args.num_epochs, '''seed''': 42, '''batch_size''': 16} training_function(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": main()
259
import inspect import tempfile import unittest from huggingface_hub import hf_hub_download from transformers import is_torch_available from transformers.testing_utils import is_flaky, require_torch, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin __snake_case = 1E-4 if is_torch_available(): import torch from transformers import AutoformerConfig, AutoformerForPrediction, AutoformerModel from transformers.models.autoformer.modeling_autoformer import AutoformerDecoder, AutoformerEncoder @require_torch class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=14 , SCREAMING_SNAKE_CASE_=10 , SCREAMING_SNAKE_CASE_=19 , SCREAMING_SNAKE_CASE_=5 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=[1, 2, 3, 4, 5] , SCREAMING_SNAKE_CASE_=25 , SCREAMING_SNAKE_CASE_=5 , ) -> str: UpperCamelCase :Any = d_model UpperCamelCase :List[str] = parent UpperCamelCase :List[Any] = batch_size UpperCamelCase :str = prediction_length UpperCamelCase :str = context_length UpperCamelCase :int = cardinality UpperCamelCase :Optional[Any] = num_time_features UpperCamelCase :Optional[Any] = lags_sequence UpperCamelCase :str = embedding_dimension UpperCamelCase :str = is_training UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :Tuple = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :List[Any] = attention_probs_dropout_prob UpperCamelCase :Optional[int] = context_length UpperCamelCase :Tuple = prediction_length + label_length UpperCamelCase :Optional[Any] = label_length UpperCamelCase :Optional[int] = moving_average UpperCamelCase :Union[str, Any] = autocorrelation_factor def UpperCAmelCase ( self ) -> Optional[int]: return AutoformerConfig( d_model=self.d_model , encoder_layers=self.num_hidden_layers , decoder_layers=self.num_hidden_layers , encoder_attention_heads=self.num_attention_heads , decoder_attention_heads=self.num_attention_heads , encoder_ffn_dim=self.intermediate_size , decoder_ffn_dim=self.intermediate_size , dropout=self.hidden_dropout_prob , attention_dropout=self.attention_probs_dropout_prob , prediction_length=self.prediction_length , context_length=self.context_length , label_length=self.label_length , lags_sequence=self.lags_sequence , num_time_features=self.num_time_features , num_static_categorical_features=1 , cardinality=[self.cardinality] , embedding_dimension=[self.embedding_dimension] , moving_average=self.moving_average , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :Optional[Any] = config.context_length + max(config.lags_sequence ) UpperCamelCase :Union[str, Any] = ids_tensor([self.batch_size, 1] , config.cardinality[0] ) UpperCamelCase :List[str] = floats_tensor([self.batch_size, _past_length, config.num_time_features] ) UpperCamelCase :Union[str, Any] = floats_tensor([self.batch_size, _past_length] ) UpperCamelCase :Any = floats_tensor([self.batch_size, _past_length] ) > 0.5 # decoder inputs UpperCamelCase :Tuple = floats_tensor([self.batch_size, config.prediction_length, config.num_time_features] ) UpperCamelCase :int = floats_tensor([self.batch_size, config.prediction_length] ) UpperCamelCase :Union[str, Any] = { '''past_values''': past_values, '''static_categorical_features''': static_categorical_features, '''past_time_features''': past_time_features, '''past_observed_mask''': past_observed_mask, '''future_time_features''': future_time_features, '''future_values''': future_values, } return inputs_dict def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.get_config() UpperCamelCase :Union[str, Any] = self.prepare_autoformer_inputs_dict(SCREAMING_SNAKE_CASE_ ) return config, inputs_dict def UpperCAmelCase ( self ) -> Any: UpperCamelCase , UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() return config, inputs_dict def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[Any]: UpperCamelCase :int = AutoformerModel(config=SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ).eval() UpperCamelCase :Any = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = outputs.encoder_last_hidden_state UpperCamelCase :str = outputs.last_hidden_state with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Any = model.get_encoder() encoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = AutoformerEncoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = model.create_network_inputs(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Tuple = model.decomposition_layer(transformer_inputs[:, : config.context_length, ...] ) UpperCamelCase :Tuple = torch.cat( (transformer_inputs[:, : config.context_length, ...], feature[:, : config.context_length, ...]) , dim=-1 , ) UpperCamelCase :Optional[Any] = encoder(inputs_embeds=SCREAMING_SNAKE_CASE_ )[0] self.parent.assertTrue((encoder_last_hidden_state_a - encoder_last_hidden_state).abs().max().item() < 1e-3 ) UpperCamelCase :Optional[Any] = ( torch.mean(transformer_inputs[:, : config.context_length, ...] , dim=1 ) .unsqueeze(1 ) .repeat(1 , config.prediction_length , 1 ) ) UpperCamelCase :Union[str, Any] = torch.zeros( [transformer_inputs.shape[0], config.prediction_length, transformer_inputs.shape[2]] , device=enc_input.device , ) UpperCamelCase :Tuple = torch.cat( ( torch.cat((seasonal_input[:, -config.label_length :, ...], zeros) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) UpperCamelCase :Optional[Any] = torch.cat( ( torch.cat((trend_input[:, -config.label_length :, ...], mean) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Union[str, Any] = model.get_decoder() decoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = AutoformerDecoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = decoder( trend=SCREAMING_SNAKE_CASE_ , inputs_embeds=SCREAMING_SNAKE_CASE_ , encoder_hidden_states=SCREAMING_SNAKE_CASE_ , )[0] self.parent.assertTrue((last_hidden_state_a - last_hidden_state).abs().max().item() < 1e-3 ) @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[str] =(AutoformerModel, AutoformerForPrediction) if is_torch_available() else () UpperCamelCase_ : List[str] =(AutoformerForPrediction,) if is_torch_available() else () UpperCamelCase_ : Optional[Any] ={'feature-extraction': AutoformerModel} if is_torch_available() else {} UpperCamelCase_ : Any =False UpperCamelCase_ : List[str] =False UpperCamelCase_ : Dict =False UpperCamelCase_ : Dict =False UpperCamelCase_ : int =False UpperCamelCase_ : Optional[int] =False def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = AutoformerModelTester(self ) UpperCamelCase :int = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase , UpperCamelCase :str = self.model_tester.prepare_config_and_inputs() for model_class in self.all_model_classes: UpperCamelCase :Optional[int] = model_class(SCREAMING_SNAKE_CASE_ ) with tempfile.TemporaryDirectory() as tmpdirname: model.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :List[str] = model_class.from_pretrained(SCREAMING_SNAKE_CASE_ , output_loading_info=SCREAMING_SNAKE_CASE_ ) self.assertEqual(info['''missing_keys'''] , [] ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_encoder_decoder_model_standalone(*SCREAMING_SNAKE_CASE_ ) @unittest.skip(reason='''Model has no tokens embeddings''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = inspect.signature(getattr(SCREAMING_SNAKE_CASE_ , '''forward''' ) ) # The main input is the name of the argument after `self` UpperCamelCase :List[str] = list(model_signature.parameters.keys() )[1] self.assertEqual(AutoformerModel.main_input_name , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase , UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Optional[Any] = [ '''past_values''', '''past_time_features''', '''past_observed_mask''', '''static_categorical_features''', '''static_real_features''', '''future_values''', '''future_time_features''', ] if model.__class__.__name__ in ["AutoformerForPrediction"]: expected_arg_names.append('''future_observed_mask''' ) expected_arg_names.extend( [ '''decoder_attention_mask''', '''head_mask''', '''decoder_head_mask''', '''cross_attn_head_mask''', '''encoder_outputs''', '''past_key_values''', '''output_hidden_states''', '''output_attentions''', '''use_cache''', '''return_dict''', ] ) self.assertListEqual(arg_names[: len(SCREAMING_SNAKE_CASE_ )] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :List[Any] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = True UpperCamelCase :Dict = getattr(self.model_tester , '''seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = getattr(self.model_tester , '''decoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = getattr(self.model_tester , '''encoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = getattr(self.model_tester , '''d_model''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = getattr(self.model_tester , '''num_attention_heads''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = d_model // num_attention_heads for model_class in self.all_model_classes: UpperCamelCase :Tuple = True UpperCamelCase :Tuple = False UpperCamelCase :Any = True UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :int = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) # check that output_attentions also work using config del inputs_dict["output_attentions"] UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) UpperCamelCase :List[str] = len(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = 7 if "last_hidden_state" in outputs: correct_outlen += 1 if "trend" in outputs: correct_outlen += 1 if "past_key_values" in outputs: correct_outlen += 1 # past_key_values have been returned if "loss" in outputs: correct_outlen += 1 if "params" in outputs: correct_outlen += 1 self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # decoder attentions UpperCamelCase :Union[str, Any] = outputs.decoder_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(decoder_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # cross attentions UpperCamelCase :Union[str, Any] = outputs.cross_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(cross_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # Check attention is always last and order is fine UpperCamelCase :Any = True UpperCamelCase :int = True UpperCamelCase :Any = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) self.assertEqual(out_len + 2 , len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(self_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) @is_flaky() def UpperCAmelCase ( self ) -> List[Any]: super().test_retain_grad_hidden_states_attentions() def _A ( SCREAMING_SNAKE_CASE__ : int="train-batch.pt" ): UpperCamelCase :Union[str, Any] = hf_hub_download(repo_id='''hf-internal-testing/tourism-monthly-batch''' , filename=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) UpperCamelCase :Tuple = torch.load(SCREAMING_SNAKE_CASE__ , map_location=SCREAMING_SNAKE_CASE__ ) return batch @require_torch @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :int = AutoformerModel.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = prepare_batch() with torch.no_grad(): UpperCamelCase :Optional[Any] = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , future_values=batch['''future_values'''] , future_time_features=batch['''future_time_features'''] , )[0] UpperCamelCase :Union[str, Any] = torch.Size( (64, model.config.prediction_length + model.config.label_length, model.config.feature_size) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = torch.tensor( [[0.3593, -1.3398, 0.6330], [0.2279, 1.5396, -0.1792], [0.0450, 1.3225, -0.2335]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Dict = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , ).encoder_last_hidden_state UpperCamelCase :Union[str, Any] = torch.Size((64, model.config.context_length, model.config.d_model) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = torch.tensor( [[-0.0734, -0.9036, 0.8358], [4.7186, 2.4113, 1.9581], [1.7953, 2.3558, 1.2970]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Tuple = model.generate( static_categorical_features=batch['''static_categorical_features'''] , past_time_features=batch['''past_time_features'''] , past_values=batch['''past_values'''] , future_time_features=batch['''future_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , ) UpperCamelCase :Optional[int] = torch.Size((64, model.config.num_parallel_samples, model.config.prediction_length) ) self.assertEqual(outputs.sequences.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor([3130.6763, 4056.5293, 7053.0786] , device=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = outputs.sequences.mean(dim=1 ) self.assertTrue(torch.allclose(mean_prediction[0, -3:] , SCREAMING_SNAKE_CASE_ , rtol=1e-1 ) )
259
1
import pyarrow.parquet as pq import pytest from datasets import Audio, Dataset, DatasetDict, Features, NamedSplit, Sequence, Value, config from datasets.features.image import Image from datasets.io.parquet import ParquetDatasetReader, ParquetDatasetWriter, get_writer_batch_size from ..utils import assert_arrow_memory_doesnt_increase, assert_arrow_memory_increases def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Union[str, Any] ): assert isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) assert dataset.num_rows == 4 assert dataset.num_columns == 3 assert dataset.column_names == ["col_1", "col_2", "col_3"] for feature, expected_dtype in expected_features.items(): assert dataset.features[feature].dtype == expected_dtype @pytest.mark.parametrize('''keep_in_memory''' , [False, True] ) def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Union[str, Any] ): UpperCamelCase :Union[str, Any] = tmp_path / '''cache''' UpperCamelCase :Optional[Any] = {'''col_1''': '''string''', '''col_2''': '''int64''', '''col_3''': '''float64'''} with assert_arrow_memory_increases() if keep_in_memory else assert_arrow_memory_doesnt_increase(): UpperCamelCase :Union[str, Any] = ParquetDatasetReader(SCREAMING_SNAKE_CASE__ , cache_dir=SCREAMING_SNAKE_CASE__ , keep_in_memory=SCREAMING_SNAKE_CASE__ ).read() _check_parquet_dataset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) @pytest.mark.parametrize( '''features''' , [ None, {'''col_1''': '''string''', '''col_2''': '''int64''', '''col_3''': '''float64'''}, {'''col_1''': '''string''', '''col_2''': '''string''', '''col_3''': '''string'''}, {'''col_1''': '''int32''', '''col_2''': '''int32''', '''col_3''': '''int32'''}, {'''col_1''': '''float32''', '''col_2''': '''float32''', '''col_3''': '''float32'''}, ] , ) def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[int] ): UpperCamelCase :Optional[int] = tmp_path / '''cache''' UpperCamelCase :Any = {'''col_1''': '''string''', '''col_2''': '''int64''', '''col_3''': '''float64'''} UpperCamelCase :Optional[int] = features.copy() if features else default_expected_features UpperCamelCase :Dict = ( Features({feature: Value(SCREAMING_SNAKE_CASE__ ) for feature, dtype in features.items()} ) if features is not None else None ) UpperCamelCase :Dict = ParquetDatasetReader(SCREAMING_SNAKE_CASE__ , features=SCREAMING_SNAKE_CASE__ , cache_dir=SCREAMING_SNAKE_CASE__ ).read() _check_parquet_dataset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) @pytest.mark.parametrize('''split''' , [None, NamedSplit('''train''' ), '''train''', '''test'''] ) def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[Any] ): UpperCamelCase :Any = tmp_path / '''cache''' UpperCamelCase :Union[str, Any] = {'''col_1''': '''string''', '''col_2''': '''int64''', '''col_3''': '''float64'''} UpperCamelCase :int = ParquetDatasetReader(SCREAMING_SNAKE_CASE__ , cache_dir=SCREAMING_SNAKE_CASE__ , split=SCREAMING_SNAKE_CASE__ ).read() _check_parquet_dataset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) assert dataset.split == split if split else "train" @pytest.mark.parametrize('''path_type''' , [str, list] ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : Optional[Any] ): if issubclass(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = parquet_path elif issubclass(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[int] = [parquet_path] UpperCamelCase :List[Any] = tmp_path / '''cache''' UpperCamelCase :Optional[Any] = {'''col_1''': '''string''', '''col_2''': '''int64''', '''col_3''': '''float64'''} UpperCamelCase :Union[str, Any] = ParquetDatasetReader(SCREAMING_SNAKE_CASE__ , cache_dir=SCREAMING_SNAKE_CASE__ ).read() _check_parquet_dataset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Tuple=("train",) ): assert isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) for split in splits: UpperCamelCase :int = dataset_dict[split] assert dataset.num_rows == 4 assert dataset.num_columns == 3 assert dataset.column_names == ["col_1", "col_2", "col_3"] for feature, expected_dtype in expected_features.items(): assert dataset.features[feature].dtype == expected_dtype @pytest.mark.parametrize('''keep_in_memory''' , [False, True] ) def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :Dict = tmp_path / '''cache''' UpperCamelCase :List[Any] = {'''col_1''': '''string''', '''col_2''': '''int64''', '''col_3''': '''float64'''} with assert_arrow_memory_increases() if keep_in_memory else assert_arrow_memory_doesnt_increase(): UpperCamelCase :Dict = ParquetDatasetReader( {'''train''': parquet_path} , cache_dir=SCREAMING_SNAKE_CASE__ , keep_in_memory=SCREAMING_SNAKE_CASE__ ).read() _check_parquet_datasetdict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) @pytest.mark.parametrize( '''features''' , [ None, {'''col_1''': '''string''', '''col_2''': '''int64''', '''col_3''': '''float64'''}, {'''col_1''': '''string''', '''col_2''': '''string''', '''col_3''': '''string'''}, {'''col_1''': '''int32''', '''col_2''': '''int32''', '''col_3''': '''int32'''}, {'''col_1''': '''float32''', '''col_2''': '''float32''', '''col_3''': '''float32'''}, ] , ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Dict ): UpperCamelCase :Union[str, Any] = tmp_path / '''cache''' UpperCamelCase :str = {'''col_1''': '''string''', '''col_2''': '''int64''', '''col_3''': '''float64'''} UpperCamelCase :Union[str, Any] = features.copy() if features else default_expected_features UpperCamelCase :Optional[Any] = ( Features({feature: Value(SCREAMING_SNAKE_CASE__ ) for feature, dtype in features.items()} ) if features is not None else None ) UpperCamelCase :Union[str, Any] = ParquetDatasetReader({'''train''': parquet_path} , features=SCREAMING_SNAKE_CASE__ , cache_dir=SCREAMING_SNAKE_CASE__ ).read() _check_parquet_datasetdict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) @pytest.mark.parametrize('''split''' , [None, NamedSplit('''train''' ), '''train''', '''test'''] ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[int] ): if split: UpperCamelCase :Tuple = {split: parquet_path} else: UpperCamelCase :Any = '''train''' UpperCamelCase :List[Any] = {'''train''': parquet_path, '''test''': parquet_path} UpperCamelCase :int = tmp_path / '''cache''' UpperCamelCase :str = {'''col_1''': '''string''', '''col_2''': '''int64''', '''col_3''': '''float64'''} UpperCamelCase :str = ParquetDatasetReader(SCREAMING_SNAKE_CASE__ , cache_dir=SCREAMING_SNAKE_CASE__ ).read() _check_parquet_datasetdict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , splits=list(path.keys() ) ) assert all(dataset[split].split == split for split in path.keys() ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :int = ParquetDatasetWriter(SCREAMING_SNAKE_CASE__ , tmp_path / '''foo.parquet''' ) assert writer.write() > 0 UpperCamelCase :List[Any] = pq.ParquetFile(tmp_path / '''foo.parquet''' ) UpperCamelCase :Dict = pf.read() assert dataset.data.table == output_table def _A ( SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Tuple ): UpperCamelCase :Tuple = str(shared_datadir / '''test_image_rgb.jpg''' ) UpperCamelCase :Tuple = {'''image''': [image_path]} UpperCamelCase :Tuple = Features({'''image''': Image()} ) UpperCamelCase :Any = Dataset.from_dict(SCREAMING_SNAKE_CASE__ , features=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = ParquetDatasetWriter(SCREAMING_SNAKE_CASE__ , tmp_path / '''foo.parquet''' ) assert writer.write() > 0 UpperCamelCase :Optional[Any] = Dataset.from_parquet(str(tmp_path / '''foo.parquet''' ) ) assert dataset.features == reloaded_dataset.features UpperCamelCase :Optional[int] = ParquetDatasetReader(str(tmp_path / '''foo.parquet''' ) , streaming=SCREAMING_SNAKE_CASE__ ).read() assert dataset.features == reloaded_iterable_dataset.features @pytest.mark.parametrize( '''feature, expected''' , [ (Features({'''foo''': Value('''int32''' )} ), None), (Features({'''image''': Image(), '''foo''': Value('''int32''' )} ), config.PARQUET_ROW_GROUP_SIZE_FOR_IMAGE_DATASETS), (Features({'''nested''': Sequence(Audio() )} ), config.PARQUET_ROW_GROUP_SIZE_FOR_AUDIO_DATASETS), ] , ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[Any] ): assert get_writer_batch_size(SCREAMING_SNAKE_CASE__ ) == expected
259
import inspect import logging import os import random import shutil import tempfile import unittest import pytest import torch from torch import nn from torch.utils.data import DataLoader, TensorDataset from accelerate import Accelerator from accelerate.test_utils import execute_subprocess_async, require_cuda from accelerate.utils import ProjectConfiguration, set_seed __snake_case = logging.getLogger(__name__) def _A ( SCREAMING_SNAKE_CASE__ : Dict=2 , SCREAMING_SNAKE_CASE__ : Dict=3 , SCREAMING_SNAKE_CASE__ : Any=16 , SCREAMING_SNAKE_CASE__ : int = 10 , SCREAMING_SNAKE_CASE__ : int = 2 ): def get_dataset(SCREAMING_SNAKE_CASE__ : List[Any] ): UpperCamelCase :Union[str, Any] = torch.randn(batch_size * n_batches , 1 ) return TensorDataset(SCREAMING_SNAKE_CASE__ , a * x + b + 0.1 * torch.randn(batch_size * n_batches , 1 ) ) UpperCamelCase :str = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) return (train_dataloader, valid_dataloader) def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Any=None ): UpperCamelCase :Dict = [] for epoch in range(SCREAMING_SNAKE_CASE__ ): # Train quickly model.train() for batch in dataloader: UpperCamelCase , UpperCamelCase :Optional[Any] = batch UpperCamelCase :int = model(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = torch.nn.functional.mse_loss(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) accelerator.backward(SCREAMING_SNAKE_CASE__ ) optimizer.step() optimizer.zero_grad() rands.append(random.random() ) # Introduce some randomness if scheduler is not None: scheduler.step() return rands class UpperCAmelCase_ ( nn.Module ): """simple docstring""" def __init__( self ) -> str: super().__init__() UpperCamelCase :Optional[int] = nn.Parameter(torch.randn(1 ) ) UpperCamelCase :int = nn.Parameter(torch.randn(1 ) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> int: return x * self.a + self.b class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Dict: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Tuple = ProjectConfiguration(total_limit=1 , project_dir=SCREAMING_SNAKE_CASE_ , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Dict = Accelerator(project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Union[str, Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() # Save second state accelerator.save_state() self.assertEqual(len(os.listdir(accelerator.project_dir ) ) , 1 ) def UpperCAmelCase ( self ) -> str: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[str] = DummyModel() UpperCamelCase :Union[str, Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Dict = dummy_dataloaders() # Train baseline UpperCamelCase :Dict = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial UpperCamelCase :int = os.path.join(SCREAMING_SNAKE_CASE_ , '''initial''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[int] = optimizer.state_dict() UpperCamelCase :Optional[int] = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Any = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :List[Any] = dummy_dataloaders() UpperCamelCase :List[str] = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Tuple = model.a.item(), model.b.item() UpperCamelCase :Tuple = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything UpperCamelCase :Optional[int] = os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoint''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) # Load everything back in and make sure all states work accelerator.load_state(SCREAMING_SNAKE_CASE_ ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Union[str, Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :Optional[int] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :int = dummy_dataloaders() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() ((UpperCamelCase) , (UpperCamelCase)) :List[str] = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() UpperCamelCase :Any = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[int] = model.a.item(), model.b.item() UpperCamelCase :Any = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Union[str, Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Optional[Any] = ProjectConfiguration(iteration=1 , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything accelerator.save_state() # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_1''' ) ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :str = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[Any] = torch.tensor([1, 2, 3] ) UpperCamelCase :Any = torch.tensor([2, 3, 4] ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :Optional[Any] = torch.optim.Adam(net.parameters() ) UpperCamelCase :Optional[Any] = Accelerator() with self.assertRaises(SCREAMING_SNAKE_CASE_ ) as ve: accelerator.register_for_checkpointing(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = str(ve.exception ) self.assertTrue('''Item at index 0''' in message ) self.assertTrue('''Item at index 1''' in message ) self.assertFalse('''Item at index 2''' in message ) self.assertFalse('''Item at index 3''' in message ) def UpperCAmelCase ( self ) -> Any: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :List[str] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase :Any = torch.optim.lr_scheduler.StepLR(SCREAMING_SNAKE_CASE_ , step_size=1 , gamma=0.99 ) UpperCamelCase , UpperCamelCase :Any = dummy_dataloaders() UpperCamelCase :Optional[int] = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :str = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() UpperCamelCase :int = scheduler.state_dict() train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertNotEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) self.assertEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) def UpperCAmelCase ( self ) -> Union[str, Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ , total_limit=2 ) # Train baseline UpperCamelCase :Tuple = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = accelerator.prepare(SCREAMING_SNAKE_CASE_ ) # Save 3 states: for _ in range(11 ): accelerator.save_state() self.assertTrue(not os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_9''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_10''' ) ) ) @require_cuda def UpperCAmelCase ( self ) -> int: UpperCamelCase :int = ['''torchrun''', F'''--nproc_per_node={torch.cuda.device_count()}''', inspect.getfile(self.__class__ )] execute_subprocess_async(SCREAMING_SNAKE_CASE_ , env=os.environ.copy() ) if __name__ == "__main__": __snake_case = """/tmp/accelerate/state_checkpointing""" __snake_case = DummyModel() __snake_case = torch.optim.Adam(params=model.parameters(), lr=1E-3) __snake_case = torch.optim.lr_scheduler.StepLR(optimizer, step_size=1, gamma=0.9_9) __snake_case , __snake_case = dummy_dataloaders() __snake_case = ProjectConfiguration(automatic_checkpoint_naming=True) # Train baseline __snake_case = Accelerator(project_dir=savedir, project_config=project_config, mixed_precision="""no""") if accelerator.process_index == 0: if os.path.exists(savedir): shutil.rmtree(savedir) os.makedirs(savedir) __snake_case , __snake_case , __snake_case , __snake_case , __snake_case = accelerator.prepare( model, optimizer, train_dataloader, valid_dataloader, scheduler ) __snake_case , __snake_case = accelerator.prepare(model, optimizer) train(3, model, train_dataloader, optimizer, accelerator, scheduler) # Check that the intial optimizer is loaded on the GPU for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert param_device.type == accelerator.device.type __snake_case = model.cpu() accelerator.wait_for_everyone() accelerator.save_state() accelerator.wait_for_everyone() # Check CPU state accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""cpu""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == torch.device("""cpu""").type ), f"Loaded optimizer states did not match, expected to be loaded on the CPU but got {param_device}" # Check device state model.to(accelerator.device) accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""on_device""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == accelerator.device.type ), f"Loaded optimizer states did not match, expected to be loaded on {accelerator.device} but got {param_device}" # Check error with pytest.raises(TypeError, match="""Unsupported optimizer map location passed"""): accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""invalid""") accelerator.wait_for_everyone() if accelerator.process_index == 0: shutil.rmtree(savedir) accelerator.wait_for_everyone()
259
1
import argparse import json from dataclasses import dataclass, field from functools import partial from pathlib import Path from typing import List import timm import torch import torch.nn as nn from huggingface_hub import hf_hub_download from torch import Tensor from transformers import AutoImageProcessor, ResNetConfig, ResNetForImageClassification from transformers.utils import logging logging.set_verbosity_info() __snake_case = logging.get_logger() @dataclass class UpperCAmelCase_ : """simple docstring""" UpperCamelCase_ : nn.Module UpperCamelCase_ : List[nn.Module] =field(default_factory=lowercase ) UpperCamelCase_ : list =field(default_factory=lowercase ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> List[Any]: UpperCamelCase :Tuple = len(list(m.modules() ) ) == 1 or isinstance(SCREAMING_SNAKE_CASE_ , nn.Convad ) or isinstance(SCREAMING_SNAKE_CASE_ , nn.BatchNormad ) if has_not_submodules: self.traced.append(SCREAMING_SNAKE_CASE_ ) def __call__( self , SCREAMING_SNAKE_CASE_ ) -> List[Any]: for m in self.module.modules(): self.handles.append(m.register_forward_hook(self._forward_hook ) ) self.module(SCREAMING_SNAKE_CASE_ ) [x.remove() for x in self.handles] return self @property def UpperCAmelCase ( self ) -> Tuple: # check the len of the state_dict keys to see if we have learnable params return list(filter(lambda SCREAMING_SNAKE_CASE_ : len(list(x.state_dict().keys() ) ) > 0 , self.traced ) ) @dataclass class UpperCAmelCase_ : """simple docstring""" UpperCamelCase_ : nn.Module UpperCamelCase_ : nn.Module UpperCamelCase_ : int =0 UpperCamelCase_ : List =field(default_factory=lowercase ) UpperCamelCase_ : List =field(default_factory=lowercase ) def __call__( self , SCREAMING_SNAKE_CASE_ ) -> Any: UpperCamelCase :Tuple = Tracker(self.dest )(SCREAMING_SNAKE_CASE_ ).parametrized UpperCamelCase :List[Any] = Tracker(self.src )(SCREAMING_SNAKE_CASE_ ).parametrized UpperCamelCase :Optional[Any] = list(filter(lambda SCREAMING_SNAKE_CASE_ : type(SCREAMING_SNAKE_CASE_ ) not in self.src_skip , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Dict = list(filter(lambda SCREAMING_SNAKE_CASE_ : type(SCREAMING_SNAKE_CASE_ ) not in self.dest_skip , SCREAMING_SNAKE_CASE_ ) ) if len(SCREAMING_SNAKE_CASE_ ) != len(SCREAMING_SNAKE_CASE_ ): raise Exception( F'''Numbers of operations are different. Source module has {len(SCREAMING_SNAKE_CASE_ )} operations while''' F''' destination module has {len(SCREAMING_SNAKE_CASE_ )}.''' ) for dest_m, src_m in zip(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ): dest_m.load_state_dict(src_m.state_dict() ) if self.verbose == 1: print(F'''Transfered from={src_m} to={dest_m}''' ) def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : ResNetConfig , SCREAMING_SNAKE_CASE__ : Path , SCREAMING_SNAKE_CASE__ : bool = True ): print(F'''Converting {name}...''' ) with torch.no_grad(): UpperCamelCase :int = timm.create_model(SCREAMING_SNAKE_CASE__ , pretrained=SCREAMING_SNAKE_CASE__ ).eval() UpperCamelCase :Tuple = ResNetForImageClassification(SCREAMING_SNAKE_CASE__ ).eval() UpperCamelCase :List[Any] = ModuleTransfer(src=SCREAMING_SNAKE_CASE__ , dest=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = torch.randn((1, 3, 224, 224) ) module_transfer(SCREAMING_SNAKE_CASE__ ) assert torch.allclose(from_model(SCREAMING_SNAKE_CASE__ ) , our_model(SCREAMING_SNAKE_CASE__ ).logits ), "The model logits don't match the original one." UpperCamelCase :int = F'''resnet{"-".join(name.split("resnet" ) )}''' print(SCREAMING_SNAKE_CASE__ ) if push_to_hub: our_model.push_to_hub( repo_path_or_name=save_directory / checkpoint_name , commit_message='''Add model''' , use_temp_dir=SCREAMING_SNAKE_CASE__ , ) # we can use the convnext one UpperCamelCase :Dict = AutoImageProcessor.from_pretrained('''facebook/convnext-base-224-22k-1k''' ) image_processor.push_to_hub( repo_path_or_name=save_directory / checkpoint_name , commit_message='''Add image processor''' , use_temp_dir=SCREAMING_SNAKE_CASE__ , ) print(F'''Pushed {checkpoint_name}''' ) def _A ( SCREAMING_SNAKE_CASE__ : Path , SCREAMING_SNAKE_CASE__ : str = None , SCREAMING_SNAKE_CASE__ : bool = True ): UpperCamelCase :Optional[int] = '''imagenet-1k-id2label.json''' UpperCamelCase :int = 1000 UpperCamelCase :str = (1, num_labels) UpperCamelCase :Any = '''huggingface/label-files''' UpperCamelCase :Union[str, Any] = num_labels UpperCamelCase :List[str] = json.load(open(hf_hub_download(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) , '''r''' ) ) UpperCamelCase :int = {int(SCREAMING_SNAKE_CASE__ ): v for k, v in idalabel.items()} UpperCamelCase :Optional[Any] = idalabel UpperCamelCase :List[str] = {v: k for k, v in idalabel.items()} UpperCamelCase :Union[str, Any] = partial(SCREAMING_SNAKE_CASE__ , num_labels=SCREAMING_SNAKE_CASE__ , idalabel=SCREAMING_SNAKE_CASE__ , labelaid=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = { '''resnet18''': ImageNetPreTrainedConfig( depths=[2, 2, 2, 2] , hidden_sizes=[64, 128, 256, 512] , layer_type='''basic''' ), '''resnet26''': ImageNetPreTrainedConfig( depths=[2, 2, 2, 2] , hidden_sizes=[256, 512, 1024, 2048] , layer_type='''bottleneck''' ), '''resnet34''': ImageNetPreTrainedConfig( depths=[3, 4, 6, 3] , hidden_sizes=[64, 128, 256, 512] , layer_type='''basic''' ), '''resnet50''': ImageNetPreTrainedConfig( depths=[3, 4, 6, 3] , hidden_sizes=[256, 512, 1024, 2048] , layer_type='''bottleneck''' ), '''resnet101''': ImageNetPreTrainedConfig( depths=[3, 4, 23, 3] , hidden_sizes=[256, 512, 1024, 2048] , layer_type='''bottleneck''' ), '''resnet152''': ImageNetPreTrainedConfig( depths=[3, 8, 36, 3] , hidden_sizes=[256, 512, 1024, 2048] , layer_type='''bottleneck''' ), } if model_name: convert_weight_and_push(SCREAMING_SNAKE_CASE__ , names_to_config[model_name] , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) else: for model_name, config in names_to_config.items(): convert_weight_and_push(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) return config, expected_shape if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--model_name""", default=None, type=str, help=( """The name of the model you wish to convert, it must be one of the supported resnet* architecture,""" """ currently: resnet18,26,34,50,101,152. If `None`, all of them will the converted.""" ), ) parser.add_argument( """--pytorch_dump_folder_path""", default=None, type=Path, required=True, help="""Path to the output PyTorch model directory.""", ) parser.add_argument( """--push_to_hub""", default=True, type=bool, required=False, help="""If True, push model and image processor to the hub.""", ) __snake_case = parser.parse_args() __snake_case = args.pytorch_dump_folder_path pytorch_dump_folder_path.mkdir(exist_ok=True, parents=True) convert_weights_and_push(pytorch_dump_folder_path, args.model_name, args.push_to_hub)
259
import numpy as np __snake_case = [ ["""a""", """b""", """c""", """d""", """e"""], ["""f""", """g""", """h""", """i""", """k"""], ["""l""", """m""", """n""", """o""", """p"""], ["""q""", """r""", """s""", """t""", """u"""], ["""v""", """w""", """x""", """y""", """z"""], ] class UpperCAmelCase_ : """simple docstring""" def __init__( self ) -> None: UpperCamelCase :Dict = np.array(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> np.ndarray: UpperCamelCase , UpperCamelCase :Tuple = np.where(letter == self.SQUARE ) UpperCamelCase :List[Any] = np.concatenate([indexa + 1, indexa + 1] ) return indexes def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :int = self.SQUARE[indexa - 1, indexa - 1] return letter def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() UpperCamelCase :int = message.replace(''' ''' , '''''' ) UpperCamelCase :Dict = message.replace('''j''' , '''i''' ) UpperCamelCase :str = np.empty((2, len(SCREAMING_SNAKE_CASE_ )) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Union[str, Any] = numbers[0] UpperCamelCase :Dict = numbers[1] UpperCamelCase :Any = first_step.reshape(2 * len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = int(second_step[numbers_index * 2] ) UpperCamelCase :List[str] = int(second_step[(numbers_index * 2) + 1] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = encoded_message + letter return encoded_message def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() message.replace(''' ''' , '''''' ) UpperCamelCase :Optional[int] = np.empty(2 * len(SCREAMING_SNAKE_CASE_ ) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :List[str] = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Dict = numbers[0] UpperCamelCase :List[str] = numbers[1] UpperCamelCase :int = first_step.reshape((2, len(SCREAMING_SNAKE_CASE_ )) ) UpperCamelCase :Any = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Any = int(second_step[0, numbers_index] ) UpperCamelCase :List[Any] = int(second_step[1, numbers_index] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = decoded_message + letter return decoded_message
259
1
import string import numpy def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : int ): return b if a == 0 else greatest_common_divisor(b % a , SCREAMING_SNAKE_CASE__ ) class UpperCAmelCase_ : """simple docstring""" UpperCamelCase_ : Tuple =string.ascii_uppercase + string.digits # This cipher takes alphanumerics into account # i.e. a total of 36 characters # take x and return x % len(key_string) UpperCamelCase_ : Union[str, Any] =numpy.vectorize(lambda lowercase : x % 36 ) UpperCamelCase_ : List[Any] =numpy.vectorize(lowercase ) def __init__( self , SCREAMING_SNAKE_CASE_ ) -> None: UpperCamelCase :List[Any] = self.modulus(SCREAMING_SNAKE_CASE_ ) # mod36 calc's on the encrypt key self.check_determinant() # validate the determinant of the encryption key UpperCamelCase :Dict = encrypt_key.shape[0] def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> int: return self.key_string.index(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: return self.key_string[round(SCREAMING_SNAKE_CASE_ )] def UpperCAmelCase ( self ) -> None: UpperCamelCase :str = round(numpy.linalg.det(self.encrypt_key ) ) if det < 0: UpperCamelCase :Optional[int] = det % len(self.key_string ) UpperCamelCase :int = len(self.key_string ) if greatest_common_divisor(SCREAMING_SNAKE_CASE_ , len(self.key_string ) ) != 1: UpperCamelCase :Optional[Any] = ( F'''determinant modular {req_l} of encryption key({det}) ''' F'''is not co prime w.r.t {req_l}.\nTry another key.''' ) raise ValueError(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :List[Any] = [char for char in text.upper() if char in self.key_string] UpperCamelCase :List[Any] = chars[-1] while len(SCREAMING_SNAKE_CASE_ ) % self.break_key != 0: chars.append(SCREAMING_SNAKE_CASE_ ) return "".join(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Optional[Any] = self.process_text(text.upper() ) UpperCamelCase :str = '''''' for i in range(0 , len(SCREAMING_SNAKE_CASE_ ) - self.break_key + 1 , self.break_key ): UpperCamelCase :Tuple = text[i : i + self.break_key] UpperCamelCase :Tuple = [self.replace_letters(SCREAMING_SNAKE_CASE_ ) for char in batch] UpperCamelCase :str = numpy.array([vec] ).T UpperCamelCase :str = self.modulus(self.encrypt_key.dot(SCREAMING_SNAKE_CASE_ ) ).T.tolist()[ 0 ] UpperCamelCase :str = ''''''.join( self.replace_digits(SCREAMING_SNAKE_CASE_ ) for num in batch_encrypted ) encrypted += encrypted_batch return encrypted def UpperCAmelCase ( self ) -> numpy.ndarray: UpperCamelCase :Any = round(numpy.linalg.det(self.encrypt_key ) ) if det < 0: UpperCamelCase :str = det % len(self.key_string ) UpperCamelCase :List[Any] = None for i in range(len(self.key_string ) ): if (det * i) % len(self.key_string ) == 1: UpperCamelCase :int = i break UpperCamelCase :Dict = ( det_inv * numpy.linalg.det(self.encrypt_key ) * numpy.linalg.inv(self.encrypt_key ) ) return self.to_int(self.modulus(SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Optional[int] = self.make_decrypt_key() UpperCamelCase :Union[str, Any] = self.process_text(text.upper() ) UpperCamelCase :Union[str, Any] = '''''' for i in range(0 , len(SCREAMING_SNAKE_CASE_ ) - self.break_key + 1 , self.break_key ): UpperCamelCase :Any = text[i : i + self.break_key] UpperCamelCase :Any = [self.replace_letters(SCREAMING_SNAKE_CASE_ ) for char in batch] UpperCamelCase :List[Any] = numpy.array([vec] ).T UpperCamelCase :Any = self.modulus(decrypt_key.dot(SCREAMING_SNAKE_CASE_ ) ).T.tolist()[0] UpperCamelCase :Any = ''''''.join( self.replace_digits(SCREAMING_SNAKE_CASE_ ) for num in batch_decrypted ) decrypted += decrypted_batch return decrypted def _A ( ): UpperCamelCase :Optional[int] = int(input('''Enter the order of the encryption key: ''' ) ) UpperCamelCase :List[str] = [] print('''Enter each row of the encryption key with space separated integers''' ) for _ in range(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Tuple = [int(SCREAMING_SNAKE_CASE__ ) for x in input().split()] hill_matrix.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = HillCipher(numpy.array(SCREAMING_SNAKE_CASE__ ) ) print('''Would you like to encrypt or decrypt some text? (1 or 2)''' ) UpperCamelCase :Optional[int] = input('''\n1. Encrypt\n2. Decrypt\n''' ) if option == "1": UpperCamelCase :int = input('''What text would you like to encrypt?: ''' ) print('''Your encrypted text is:''' ) print(hc.encrypt(SCREAMING_SNAKE_CASE__ ) ) elif option == "2": UpperCamelCase :Optional[int] = input('''What text would you like to decrypt?: ''' ) print('''Your decrypted text is:''' ) print(hc.decrypt(SCREAMING_SNAKE_CASE__ ) ) if __name__ == "__main__": import doctest doctest.testmod() main()
259
import argparse import collections import numpy as np import torch from flax import traverse_util from tax import checkpoints from transformers import MTaConfig, UMTaEncoderModel, UMTaForConditionalGeneration from transformers.utils import logging logging.set_verbosity_info() def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Tuple ): return params[F'''{prefix}/{prefix}/relpos_bias/rel_embedding'''][:, i, :] def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Any="attention" ): UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/key/kernel'''][:, i, :, :] ) UpperCamelCase :Optional[Any] = k_tmp.reshape(k_tmp.shape[0] , k_tmp.shape[1] * k_tmp.shape[2] ) UpperCamelCase :Optional[int] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/out/kernel'''][:, i, :, :] ) UpperCamelCase :List[Any] = o_tmp.reshape(o_tmp.shape[0] * o_tmp.shape[1] , o_tmp.shape[2] ) UpperCamelCase :Union[str, Any] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/query/kernel'''][:, i, :, :] ) UpperCamelCase :Any = q_tmp.reshape(q_tmp.shape[0] , q_tmp.shape[1] * q_tmp.shape[2] ) UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/value/kernel'''][:, i, :, :] ) UpperCamelCase :str = v_tmp.reshape(v_tmp.shape[0] , v_tmp.shape[1] * v_tmp.shape[2] ) return k, o, q, v def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : List[str]=False ): if split_mlp_wi: UpperCamelCase :List[Any] = params[F'''{prefix}/{prefix}/mlp/wi_0/kernel'''][:, i, :] UpperCamelCase :int = params[F'''{prefix}/{prefix}/mlp/wi_1/kernel'''][:, i, :] UpperCamelCase :str = (wi_a, wi_a) else: UpperCamelCase :Optional[Any] = params[F'''{prefix}/{prefix}/mlp/wi/kernel'''][:, i, :] UpperCamelCase :Optional[int] = params[F'''{prefix}/{prefix}/mlp/wo/kernel'''][:, i, :] return wi, wo def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[int] ): return params[F'''{prefix}/{prefix}/{layer_name}/scale'''][:, i] def _A ( SCREAMING_SNAKE_CASE__ : dict , *, SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : bool , SCREAMING_SNAKE_CASE__ : bool = False ): UpperCamelCase :Tuple = traverse_util.flatten_dict(variables['''target'''] ) UpperCamelCase :List[Any] = {'''/'''.join(SCREAMING_SNAKE_CASE__ ): v for k, v in old.items()} # v1.1 models have a gated GeLU with wi_0 and wi_1 instead of wi UpperCamelCase :int = '''encoder/encoder/mlp/wi_0/kernel''' in old print('''Split MLP:''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = collections.OrderedDict() # Shared embeddings. UpperCamelCase :int = old['''token_embedder/embedding'''] # Encoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :Dict = k.T UpperCamelCase :Optional[Any] = o.T UpperCamelCase :int = q.T UpperCamelCase :Any = v.T # Block i, layer 1 (MLP). UpperCamelCase :Tuple = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Any = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[Any] = wi[0].T UpperCamelCase :Tuple = wi[1].T else: UpperCamelCase :Optional[Any] = wi.T UpperCamelCase :Dict = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :List[str] = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' ).T UpperCamelCase :Optional[Any] = old['''encoder/encoder_norm/scale'''] if not scalable_attention: UpperCamelCase :str = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''encoder''' ).T UpperCamelCase :Any = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''decoder''' ).T if not is_encoder_only: # Decoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :Union[str, Any] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_self_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Dict = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''self_attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :int = k.T UpperCamelCase :Optional[int] = o.T UpperCamelCase :Tuple = q.T UpperCamelCase :List[str] = v.T # Block i, layer 1 (Cross Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_cross_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[Any] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''encoder_decoder_attention''' ) UpperCamelCase :Tuple = layer_norm UpperCamelCase :Optional[Any] = k.T UpperCamelCase :List[str] = o.T UpperCamelCase :List[str] = q.T UpperCamelCase :str = v.T # Block i, layer 2 (MLP). UpperCamelCase :List[str] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Optional[int] = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[str] = wi[0].T UpperCamelCase :str = wi[1].T else: UpperCamelCase :Dict = wi.T UpperCamelCase :Optional[Any] = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :Tuple = tax_relpos_bias_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' ).T UpperCamelCase :Union[str, Any] = old['''decoder/decoder_norm/scale'''] # LM Head (only in v1.1 checkpoints, in v1.0 embeddings are used instead) if "decoder/logits_dense/kernel" in old: UpperCamelCase :Union[str, Any] = old['''decoder/logits_dense/kernel'''].T return new def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : bool ): UpperCamelCase :Optional[int] = collections.OrderedDict([(k, torch.from_numpy(v.copy() )) for (k, v) in converted_params.items()] ) # Add what is missing. if "encoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if not is_encoder_only: if "decoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if "lm_head.weight" not in state_dict: # For old 1.0 models. print('''Using shared word embeddings as lm_head.''' ) UpperCamelCase :List[Any] = state_dict['''shared.weight'''] return state_dict def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Dict = checkpoints.load_tax_checkpoint(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = convert_tax_to_pytorch( SCREAMING_SNAKE_CASE__ , num_layers=config.num_layers , is_encoder_only=SCREAMING_SNAKE_CASE__ , scalable_attention=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = make_state_dict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) model.load_state_dict(SCREAMING_SNAKE_CASE__ , strict=SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : bool = False , SCREAMING_SNAKE_CASE__ : bool = False , ): UpperCamelCase :Any = MTaConfig.from_json_file(SCREAMING_SNAKE_CASE__ ) print(F'''Building PyTorch model from configuration: {config}''' ) # Non-v1.1 checkpoints could also use T5Model, but this works for all. # The v1.0 checkpoints will simply have an LM head that is the word embeddings. if is_encoder_only: UpperCamelCase :List[str] = UMTaEncoderModel(SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :Any = UMTaForConditionalGeneration(SCREAMING_SNAKE_CASE__ ) # Load weights from tf checkpoint load_tax_weights_in_ta(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Save pytorch-model print(F'''Save PyTorch model to {pytorch_dump_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) # Verify that we can load the checkpoint. model.from_pretrained(SCREAMING_SNAKE_CASE__ ) print('''Done''' ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser(description="""Converts a native T5X checkpoint into a PyTorch checkpoint.""") # Required parameters parser.add_argument( """--t5x_checkpoint_path""", default=None, type=str, required=True, help="""Path to the T5X checkpoint.""" ) parser.add_argument( """--config_file""", default=None, type=str, required=True, help="""The config json file corresponding to the pre-trained T5 model.\nThis specifies the model architecture.""", ) parser.add_argument( """--pytorch_dump_path""", default=None, type=str, required=True, help="""Path to the output PyTorch model.""" ) parser.add_argument( """--is_encoder_only""", action="""store_true""", help="""Check if the model is encoder-decoder model""", default=False ) parser.add_argument( """--scalable_attention""", action="""store_true""", help="""Whether the model uses scaled attention (umt5 model)""", default=False, ) __snake_case = parser.parse_args() convert_tax_checkpoint_to_pytorch( args.tax_checkpoint_path, args.config_file, args.pytorch_dump_path, args.is_encoder_only, args.scalable_attention, )
259
1
import argparse import json from collections import OrderedDict from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import PoolFormerConfig, PoolFormerForImageClassification, PoolFormerImageProcessor from transformers.utils import logging logging.set_verbosity_info() __snake_case = logging.get_logger(__name__) def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : List[Any] ): UpperCamelCase :Tuple = original_name.split('''.''' )[0] UpperCamelCase :Optional[int] = key.split('''.''' ) UpperCamelCase :Optional[int] = int(key_list[key_list.index(SCREAMING_SNAKE_CASE__ ) - 2] ) UpperCamelCase :Dict = int(key_list[key_list.index(SCREAMING_SNAKE_CASE__ ) - 1] ) UpperCamelCase :Optional[int] = orig_block_num - offset UpperCamelCase :Union[str, Any] = key.replace(F'''{orig_block_num}.{layer_num}.{original_name}''' , F'''block.{new_block_num}.{layer_num}.{new_name}''' ) return key def _A ( SCREAMING_SNAKE_CASE__ : Dict ): UpperCamelCase :Optional[Any] = OrderedDict() UpperCamelCase , UpperCamelCase :Optional[Any] = 0, 0 for key, value in state_dict.items(): if key.startswith('''network''' ): UpperCamelCase :Union[str, Any] = key.replace('''network''' , '''poolformer.encoder''' ) if "proj" in key: # Works for the first embedding as well as the internal embedding layers if key.endswith('''bias''' ) and "patch_embed" not in key: patch_emb_offset += 1 UpperCamelCase :str = key[: key.find('''proj''' )] UpperCamelCase :int = key.replace(SCREAMING_SNAKE_CASE__ , F'''patch_embeddings.{total_embed_found}.''' ) UpperCamelCase :List[Any] = key.replace('''proj''' , '''projection''' ) if key.endswith('''bias''' ): total_embed_found += 1 if "patch_embeddings" in key: UpperCamelCase :Union[str, Any] = '''poolformer.encoder.''' + key if "mlp.fc1" in key: UpperCamelCase :Union[str, Any] = replace_key_with_offset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''mlp.fc1''' , '''output.conv1''' ) if "mlp.fc2" in key: UpperCamelCase :Tuple = replace_key_with_offset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''mlp.fc2''' , '''output.conv2''' ) if "norm1" in key: UpperCamelCase :Any = replace_key_with_offset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''norm1''' , '''before_norm''' ) if "norm2" in key: UpperCamelCase :List[str] = replace_key_with_offset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''norm2''' , '''after_norm''' ) if "layer_scale_1" in key: UpperCamelCase :int = replace_key_with_offset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''layer_scale_1''' , '''layer_scale_1''' ) if "layer_scale_2" in key: UpperCamelCase :Optional[Any] = replace_key_with_offset(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''layer_scale_2''' , '''layer_scale_2''' ) if "head" in key: UpperCamelCase :str = key.replace('''head''' , '''classifier''' ) UpperCamelCase :Optional[int] = value return new_state_dict def _A ( ): UpperCamelCase :List[str] = '''http://images.cocodataset.org/val2017/000000039769.jpg''' UpperCamelCase :int = Image.open(requests.get(SCREAMING_SNAKE_CASE__ , stream=SCREAMING_SNAKE_CASE__ ).raw ) return image @torch.no_grad() def _A ( SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Union[str, Any] = PoolFormerConfig() # set attributes based on model_name UpperCamelCase :List[str] = '''huggingface/label-files''' UpperCamelCase :Optional[Any] = model_name[-3:] UpperCamelCase :List[str] = 1000 UpperCamelCase :Optional[int] = '''imagenet-1k-id2label.json''' UpperCamelCase :Tuple = (1, 1000) # set config attributes UpperCamelCase :Dict = json.load(open(hf_hub_download(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) , '''r''' ) ) UpperCamelCase :Tuple = {int(SCREAMING_SNAKE_CASE__ ): v for k, v in idalabel.items()} UpperCamelCase :Tuple = idalabel UpperCamelCase :Union[str, Any] = {v: k for k, v in idalabel.items()} if size == "s12": UpperCamelCase :Tuple = [2, 2, 6, 2] UpperCamelCase :int = [64, 128, 320, 512] UpperCamelCase :Optional[Any] = 4.0 UpperCamelCase :List[Any] = 0.9 elif size == "s24": UpperCamelCase :Optional[int] = [4, 4, 12, 4] UpperCamelCase :Optional[Any] = [64, 128, 320, 512] UpperCamelCase :Dict = 4.0 UpperCamelCase :int = 0.9 elif size == "s36": UpperCamelCase :List[Any] = [6, 6, 18, 6] UpperCamelCase :Optional[int] = [64, 128, 320, 512] UpperCamelCase :Any = 4.0 UpperCamelCase :Optional[Any] = 1e-6 UpperCamelCase :Any = 0.9 elif size == "m36": UpperCamelCase :Any = [6, 6, 18, 6] UpperCamelCase :Any = [96, 192, 384, 768] UpperCamelCase :Any = 4.0 UpperCamelCase :Tuple = 1e-6 UpperCamelCase :List[Any] = 0.95 elif size == "m48": UpperCamelCase :List[str] = [8, 8, 24, 8] UpperCamelCase :int = [96, 192, 384, 768] UpperCamelCase :Optional[int] = 4.0 UpperCamelCase :str = 1e-6 UpperCamelCase :Optional[int] = 0.95 else: raise ValueError(F'''Size {size} not supported''' ) # load image processor UpperCamelCase :str = PoolFormerImageProcessor(crop_pct=SCREAMING_SNAKE_CASE__ ) # Prepare image UpperCamelCase :List[str] = prepare_img() UpperCamelCase :Optional[Any] = image_processor(images=SCREAMING_SNAKE_CASE__ , return_tensors='''pt''' ).pixel_values logger.info(F'''Converting model {model_name}...''' ) # load original state dict UpperCamelCase :str = torch.load(SCREAMING_SNAKE_CASE__ , map_location=torch.device('''cpu''' ) ) # rename keys UpperCamelCase :Union[str, Any] = rename_keys(SCREAMING_SNAKE_CASE__ ) # create HuggingFace model and load state dict UpperCamelCase :str = PoolFormerForImageClassification(SCREAMING_SNAKE_CASE__ ) model.load_state_dict(SCREAMING_SNAKE_CASE__ ) model.eval() # Define image processor UpperCamelCase :str = PoolFormerImageProcessor(crop_pct=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = image_processor(images=prepare_img() , return_tensors='''pt''' ).pixel_values # forward pass UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = outputs.logits # define expected logit slices for different models if size == "s12": UpperCamelCase :Any = torch.tensor([-0.30_45, -0.67_58, -0.48_69] ) elif size == "s24": UpperCamelCase :List[Any] = torch.tensor([0.44_02, -0.13_74, -0.80_45] ) elif size == "s36": UpperCamelCase :str = torch.tensor([-0.60_80, -0.51_33, -0.58_98] ) elif size == "m36": UpperCamelCase :Tuple = torch.tensor([0.39_52, 0.22_63, -1.26_68] ) elif size == "m48": UpperCamelCase :List[str] = torch.tensor([0.11_67, -0.06_56, -0.34_23] ) else: raise ValueError(F'''Size {size} not supported''' ) # verify logits assert logits.shape == expected_shape assert torch.allclose(logits[0, :3] , SCREAMING_SNAKE_CASE__ , atol=1e-2 ) # finally, save model and image processor logger.info(F'''Saving PyTorch model and image processor to {pytorch_dump_folder_path}...''' ) Path(SCREAMING_SNAKE_CASE__ ).mkdir(exist_ok=SCREAMING_SNAKE_CASE__ ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) print(F'''Saving image processor to {pytorch_dump_folder_path}''' ) image_processor.save_pretrained(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser() parser.add_argument( """--model_name""", default="""poolformer_s12""", type=str, help="""Name of the model you'd like to convert.""", ) parser.add_argument( """--checkpoint_path""", default=None, type=str, help="""Path to the original PyTorch checkpoint (.pth file).""" ) parser.add_argument( """--pytorch_dump_folder_path""", default=None, type=str, help="""Path to the folder to output PyTorch model.""" ) __snake_case = parser.parse_args() convert_poolformer_checkpoint(args.model_name, args.checkpoint_path, args.pytorch_dump_folder_path)
259
def _A ( SCREAMING_SNAKE_CASE__ : list[int] , SCREAMING_SNAKE_CASE__ : list[int] ): UpperCamelCase :Tuple = len(SCREAMING_SNAKE_CASE__ ) print('''The following activities are selected:''' ) # The first activity is always selected UpperCamelCase :Dict = 0 print(SCREAMING_SNAKE_CASE__ , end=''',''' ) # Consider rest of the activities for j in range(SCREAMING_SNAKE_CASE__ ): # If this activity has start time greater than # or equal to the finish time of previously # selected activity, then select it if start[j] >= finish[i]: print(SCREAMING_SNAKE_CASE__ , end=''',''' ) UpperCamelCase :List[str] = j if __name__ == "__main__": import doctest doctest.testmod() __snake_case = [1, 3, 0, 5, 8, 5] __snake_case = [2, 4, 6, 7, 9, 9] print_max_activities(start, finish)
259
1
import argparse import requests import torch from PIL import Image from transformers import SwinConfig, SwinForMaskedImageModeling, ViTImageProcessor def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :int = SwinConfig(image_size=192 ) if "base" in model_name: UpperCamelCase :Tuple = 6 UpperCamelCase :Tuple = 128 UpperCamelCase :int = (2, 2, 18, 2) UpperCamelCase :Dict = (4, 8, 16, 32) elif "large" in model_name: UpperCamelCase :Optional[Any] = 12 UpperCamelCase :Any = 192 UpperCamelCase :Tuple = (2, 2, 18, 2) UpperCamelCase :List[Any] = (6, 12, 24, 48) else: raise ValueError('''Model not supported, only supports base and large variants''' ) UpperCamelCase :Tuple = window_size UpperCamelCase :Any = embed_dim UpperCamelCase :str = depths UpperCamelCase :Optional[Any] = num_heads return config def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): if "encoder.mask_token" in name: UpperCamelCase :Optional[Any] = name.replace('''encoder.mask_token''' , '''embeddings.mask_token''' ) if "encoder.patch_embed.proj" in name: UpperCamelCase :Dict = name.replace('''encoder.patch_embed.proj''' , '''embeddings.patch_embeddings.projection''' ) if "encoder.patch_embed.norm" in name: UpperCamelCase :Any = name.replace('''encoder.patch_embed.norm''' , '''embeddings.norm''' ) if "attn.proj" in name: UpperCamelCase :Tuple = name.replace('''attn.proj''' , '''attention.output.dense''' ) if "attn" in name: UpperCamelCase :str = name.replace('''attn''' , '''attention.self''' ) if "norm1" in name: UpperCamelCase :Union[str, Any] = name.replace('''norm1''' , '''layernorm_before''' ) if "norm2" in name: UpperCamelCase :Tuple = name.replace('''norm2''' , '''layernorm_after''' ) if "mlp.fc1" in name: UpperCamelCase :Tuple = name.replace('''mlp.fc1''' , '''intermediate.dense''' ) if "mlp.fc2" in name: UpperCamelCase :Optional[Any] = name.replace('''mlp.fc2''' , '''output.dense''' ) if name == "encoder.norm.weight": UpperCamelCase :Dict = '''layernorm.weight''' if name == "encoder.norm.bias": UpperCamelCase :List[str] = '''layernorm.bias''' if "decoder" in name: pass else: UpperCamelCase :str = '''swin.''' + name return name def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : Any ): for key in orig_state_dict.copy().keys(): UpperCamelCase :Union[str, Any] = orig_state_dict.pop(SCREAMING_SNAKE_CASE__ ) if "attn_mask" in key: pass elif "qkv" in key: UpperCamelCase :Dict = key.split('''.''' ) UpperCamelCase :Dict = int(key_split[2] ) UpperCamelCase :int = int(key_split[4] ) UpperCamelCase :str = model.swin.encoder.layers[layer_num].blocks[block_num].attention.self.all_head_size if "weight" in key: UpperCamelCase :Dict = val[:dim, :] UpperCamelCase :str = val[ dim : dim * 2, : ] UpperCamelCase :Dict = val[-dim:, :] else: UpperCamelCase :Optional[Any] = val[ :dim ] UpperCamelCase :List[Any] = val[ dim : dim * 2 ] UpperCamelCase :Union[str, Any] = val[ -dim: ] else: UpperCamelCase :Optional[Any] = val return orig_state_dict def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[Any] ): UpperCamelCase :Optional[Any] = torch.load(SCREAMING_SNAKE_CASE__ , map_location='''cpu''' )['''model'''] UpperCamelCase :Any = get_swin_config(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[Any] = SwinForMaskedImageModeling(SCREAMING_SNAKE_CASE__ ) model.eval() UpperCamelCase :Optional[int] = convert_state_dict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) model.load_state_dict(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = '''http://images.cocodataset.org/val2017/000000039769.jpg''' UpperCamelCase :Dict = ViTImageProcessor(size={'''height''': 192, '''width''': 192} ) UpperCamelCase :int = Image.open(requests.get(SCREAMING_SNAKE_CASE__ , stream=SCREAMING_SNAKE_CASE__ ).raw ) UpperCamelCase :List[str] = image_processor(images=SCREAMING_SNAKE_CASE__ , return_tensors='''pt''' ) with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**SCREAMING_SNAKE_CASE__ ).logits print(outputs.keys() ) print('''Looks ok!''' ) if pytorch_dump_folder_path is not None: print(F'''Saving model {model_name} to {pytorch_dump_folder_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) print(F'''Saving image processor to {pytorch_dump_folder_path}''' ) image_processor.save_pretrained(SCREAMING_SNAKE_CASE__ ) if push_to_hub: print(F'''Pushing model and image processor for {model_name} to hub''' ) model.push_to_hub(F'''microsoft/{model_name}''' ) image_processor.push_to_hub(F'''microsoft/{model_name}''' ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--model_name""", default="""swin-base-simmim-window6-192""", type=str, choices=["""swin-base-simmim-window6-192""", """swin-large-simmim-window12-192"""], help="""Name of the Swin SimMIM model you'd like to convert.""", ) parser.add_argument( """--checkpoint_path""", default="""/Users/nielsrogge/Documents/SwinSimMIM/simmim_pretrain__swin_base__img192_window6__100ep.pth""", type=str, help="""Path to the original PyTorch checkpoint (.pth file).""", ) parser.add_argument( """--pytorch_dump_folder_path""", default=None, type=str, help="""Path to the output PyTorch model directory.""" ) parser.add_argument( """--push_to_hub""", action="""store_true""", help="""Whether or not to push the converted model to the 🤗 hub.""" ) __snake_case = parser.parse_args() convert_swin_checkpoint(args.model_name, args.checkpoint_path, args.pytorch_dump_folder_path, args.push_to_hub)
259
import copy import os from typing import Union from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """microsoft/git-base""": """https://huggingface.co/microsoft/git-base/resolve/main/config.json""", } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Dict ='git_vision_model' def __init__( self , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=224 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_="quick_gelu" , SCREAMING_SNAKE_CASE_=1e-5 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.02 , **SCREAMING_SNAKE_CASE_ , ) -> Tuple: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :Dict = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :List[str] = num_channels UpperCamelCase :Optional[int] = patch_size UpperCamelCase :Optional[int] = image_size UpperCamelCase :List[Any] = initializer_range UpperCamelCase :Union[str, Any] = attention_dropout UpperCamelCase :Tuple = layer_norm_eps UpperCamelCase :Optional[Any] = hidden_act @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> "PretrainedConfig": cls._set_token_in_kwargs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Dict = cls.get_config_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) # get the vision config dict if we are loading from GITConfig if config_dict.get('''model_type''' ) == "git": UpperCamelCase :Tuple = config_dict['''vision_config'''] if "model_type" in config_dict and hasattr(cls , '''model_type''' ) and config_dict["model_type"] != cls.model_type: logger.warning( F'''You are using a model of type {config_dict["model_type"]} to instantiate a model of type ''' F'''{cls.model_type}. This is not supported for all configurations of models and can yield errors.''' ) return cls.from_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Optional[Any] ='git' def __init__( self , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=3_0522 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=6 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=1024 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-12 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_="absolute" , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=101 , SCREAMING_SNAKE_CASE_=102 , SCREAMING_SNAKE_CASE_=None , **SCREAMING_SNAKE_CASE_ , ) -> int: super().__init__(bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , pad_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) if vision_config is None: UpperCamelCase :Tuple = {} logger.info('''vision_config is None. initializing the GitVisionConfig with default values.''' ) UpperCamelCase :Union[str, Any] = GitVisionConfig(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = vocab_size UpperCamelCase :Optional[Any] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :List[Any] = num_attention_heads UpperCamelCase :Dict = hidden_act UpperCamelCase :List[str] = intermediate_size UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :Optional[int] = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = max_position_embeddings UpperCamelCase :Tuple = initializer_range UpperCamelCase :Any = layer_norm_eps UpperCamelCase :int = position_embedding_type UpperCamelCase :Dict = use_cache UpperCamelCase :Tuple = tie_word_embeddings UpperCamelCase :Union[str, Any] = num_image_with_embedding UpperCamelCase :Optional[int] = bos_token_id UpperCamelCase :List[Any] = eos_token_id def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Union[str, Any] = copy.deepcopy(self.__dict__ ) UpperCamelCase :Optional[int] = self.vision_config.to_dict() UpperCamelCase :int = self.__class__.model_type return output
259
1
import socket def _A ( ): UpperCamelCase :Dict = socket.socket(socket.AF_INET , socket.SOCK_STREAM ) UpperCamelCase :int = socket.gethostname() UpperCamelCase :Optional[Any] = 12312 sock.connect((host, port) ) sock.send(B'''Hello server!''' ) with open('''Received_file''' , '''wb''' ) as out_file: print('''File opened''' ) print('''Receiving data...''' ) while True: UpperCamelCase :List[Any] = sock.recv(1024 ) if not data: break out_file.write(SCREAMING_SNAKE_CASE__ ) print('''Successfully received the file''' ) sock.close() print('''Connection closed''' ) if __name__ == "__main__": main()
259
import time from contextlib import contextmanager from pathlib import Path import pytest import requests from huggingface_hub.hf_api import HfApi, HfFolder __snake_case = """__DUMMY_TRANSFORMERS_USER__""" __snake_case = """Dummy User""" __snake_case = """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""" __snake_case = """https://hub-ci.huggingface.co""" __snake_case = CI_HUB_ENDPOINT + """/datasets/{repo_id}/resolve/{revision}/{path}""" __snake_case = CI_HUB_ENDPOINT + """/{repo_id}/resolve/{revision}/{filename}""" __snake_case = Path("""~/.huggingface/hub_ci_token""").expanduser() @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): monkeypatch.setattr( '''huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any ): monkeypatch.setattr('''datasets.config.HF_ENDPOINT''' , SCREAMING_SNAKE_CASE__ ) monkeypatch.setattr('''datasets.config.HUB_DATASETS_URL''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): monkeypatch.setattr('''huggingface_hub.hf_api.HfFolder.path_token''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : List[Any] ): HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield HfFolder.delete_token() @pytest.fixture(scope='''session''' ) def _A ( ): return HfApi(endpoint=SCREAMING_SNAKE_CASE__ ) @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi ): UpperCamelCase :Tuple = HfFolder.get_token() HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield CI_HUB_USER_TOKEN if previous_token is not None: HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Dict ): def _cleanup_repo(SCREAMING_SNAKE_CASE__ : Tuple ): hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) return _cleanup_repo @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): @contextmanager def _temporary_repo(SCREAMING_SNAKE_CASE__ : Any ): try: yield repo_id finally: cleanup_repo(SCREAMING_SNAKE_CASE__ ) return _temporary_repo @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :Union[str, Any] = F'''repo_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :int = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data/text_data.txt''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Dict ): return hf_private_dataset_repo_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Optional[int] = F'''repo_zipped_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Any = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str] ): return hf_private_dataset_repo_zipped_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Dict = F'''repo_zipped_img_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Dict = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple ): return hf_private_dataset_repo_zipped_img_data_
259
1
import string def _A ( SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Dict = '''''' for i in sequence: UpperCamelCase :str = ord(SCREAMING_SNAKE_CASE__ ) if 65 <= extract <= 90: output += chr(155 - extract ) elif 97 <= extract <= 122: output += chr(219 - extract ) else: output += i return output def _A ( SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Union[str, Any] = string.ascii_letters UpperCamelCase :Any = string.ascii_lowercase[::-1] + string.ascii_uppercase[::-1] return "".join( letters_reversed[letters.index(SCREAMING_SNAKE_CASE__ )] if c in letters else c for c in sequence ) def _A ( ): from timeit import timeit print('''Running performance benchmarks...''' ) UpperCamelCase :List[str] = '''from string import printable ; from __main__ import atbash, atbash_slow''' print(F'''> atbash_slow(): {timeit("atbash_slow(printable)" , setup=SCREAMING_SNAKE_CASE__ )} seconds''' ) print(F'''> atbash(): {timeit("atbash(printable)" , setup=SCREAMING_SNAKE_CASE__ )} seconds''' ) if __name__ == "__main__": for example in ("ABCDEFGH", "123GGjj", "testStringtest", "with space"): print(f'''{example} encrypted in atbash: {atbash(example)}''') benchmark()
259
from __future__ import annotations import unittest from transformers import RoFormerConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask from ...test_pipeline_mixin import PipelineTesterMixin if is_tf_available(): import tensorflow as tf from transformers import ( TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerModel, ) from transformers.models.roformer.modeling_tf_roformer import ( TFRoFormerSelfAttention, TFRoFormerSinusoidalPositionalEmbedding, ) class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=99 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=512 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=None , ) -> Dict: UpperCamelCase :Any = parent UpperCamelCase :Dict = 13 UpperCamelCase :List[Any] = 7 UpperCamelCase :List[Any] = True UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = True UpperCamelCase :List[str] = True UpperCamelCase :Dict = 99 UpperCamelCase :Any = 32 UpperCamelCase :Tuple = 2 UpperCamelCase :Union[str, Any] = 4 UpperCamelCase :List[str] = 37 UpperCamelCase :Dict = '''gelu''' UpperCamelCase :Dict = 0.1 UpperCamelCase :Tuple = 0.1 UpperCamelCase :Dict = 512 UpperCamelCase :str = 16 UpperCamelCase :Optional[Any] = 2 UpperCamelCase :Dict = 0.02 UpperCamelCase :Optional[int] = 3 UpperCamelCase :int = 4 UpperCamelCase :Dict = None def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size, self.seq_length] , self.vocab_size ) UpperCamelCase :Optional[int] = None if self.use_input_mask: UpperCamelCase :Dict = random_attention_mask([self.batch_size, self.seq_length] ) UpperCamelCase :Dict = None if self.use_token_type_ids: UpperCamelCase :List[Any] = ids_tensor([self.batch_size, self.seq_length] , self.type_vocab_size ) UpperCamelCase :Union[str, Any] = None UpperCamelCase :Optional[int] = None UpperCamelCase :Any = None if self.use_labels: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size] , self.type_sequence_label_size ) UpperCamelCase :Optional[Any] = ids_tensor([self.batch_size, self.seq_length] , self.num_labels ) UpperCamelCase :int = ids_tensor([self.batch_size] , self.num_choices ) UpperCamelCase :Union[str, Any] = RoFormerConfig( vocab_size=self.vocab_size , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , max_position_embeddings=self.max_position_embeddings , type_vocab_size=self.type_vocab_size , initializer_range=self.initializer_range , return_dict=SCREAMING_SNAKE_CASE_ , ) return config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[Any] = TFRoFormerModel(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = {'''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids} UpperCamelCase :int = [input_ids, input_mask] UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = True UpperCamelCase :Union[str, Any] = TFRoFormerForCausalLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Any = model(SCREAMING_SNAKE_CASE_ )['''logits'''] self.parent.assertListEqual( list(prediction_scores.numpy().shape ) , [self.batch_size, self.seq_length, self.vocab_size] ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :str = TFRoFormerForMaskedLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.vocab_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :List[Any] = self.num_labels UpperCamelCase :int = TFRoFormerForSequenceClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Optional[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = self.num_choices UpperCamelCase :Any = TFRoFormerForMultipleChoice(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :int = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :Any = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :List[Any] = { '''input_ids''': multiple_choice_inputs_ids, '''attention_mask''': multiple_choice_input_mask, '''token_type_ids''': multiple_choice_token_type_ids, } UpperCamelCase :Dict = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_choices) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :Union[str, Any] = self.num_labels UpperCamelCase :Dict = TFRoFormerForTokenClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Tuple = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :Union[str, Any] = TFRoFormerForQuestionAnswering(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.start_logits.shape , (self.batch_size, self.seq_length) ) self.parent.assertEqual(result.end_logits.shape , (self.batch_size, self.seq_length) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() ( ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ) :Union[str, Any] = config_and_inputs UpperCamelCase :Any = {'''input_ids''': input_ids, '''token_type_ids''': token_type_ids, '''attention_mask''': input_mask} return config, inputs_dict @require_tf class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : str =( ( TFRoFormerModel, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerForMultipleChoice, ) if is_tf_available() else () ) UpperCamelCase_ : Tuple =( { 'feature-extraction': TFRoFormerModel, 'fill-mask': TFRoFormerForMaskedLM, 'question-answering': TFRoFormerForQuestionAnswering, 'text-classification': TFRoFormerForSequenceClassification, 'text-generation': TFRoFormerForCausalLM, 'token-classification': TFRoFormerForTokenClassification, 'zero-shot': TFRoFormerForSequenceClassification, } if is_tf_available() else {} ) UpperCamelCase_ : Tuple =False UpperCamelCase_ : Optional[Any] =False def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: if pipeline_test_casse_name == "TextGenerationPipelineTests": return True return False def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = TFRoFormerModelTester(self ) UpperCamelCase :Optional[int] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> List[str]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_masked_lm(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_lm_head(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_multiple_choice(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_question_answering(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_sequence_classification(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_token_classification(*SCREAMING_SNAKE_CASE_ ) @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = TFRoFormerModel.from_pretrained('''junnyu/roformer_chinese_base''' ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Tuple = TFRoFormerForMaskedLM.from_pretrained('''junnyu/roformer_chinese_base''' ) UpperCamelCase :Union[str, Any] = tf.constant([[0, 1, 2, 3, 4, 5]] ) UpperCamelCase :str = model(SCREAMING_SNAKE_CASE_ )[0] # TODO Replace vocab size UpperCamelCase :Tuple = 5_0000 UpperCamelCase :Optional[Any] = [1, 6, vocab_size] self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) print(output[:, :3, :3] ) # TODO Replace values below with what was printed above. UpperCamelCase :int = tf.constant( [ [ [-0.1205_3341, -1.026_4901, 0.2922_1946], [-1.513_3783, 0.19_7433, 0.1519_0607], [-5.013_5403, -3.90_0256, -0.8403_8764], ] ] ) tf.debugging.assert_near(output[:, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Optional[int] =1E-4 def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = tf.constant([[4, 10]] ) UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=6 , embedding_dim=6 ) UpperCamelCase :str = emba(input_ids.shape ) UpperCamelCase :List[str] = tf.constant( [[0.0000, 0.0000, 0.0000, 1.0000, 1.0000, 1.0000], [0.8415, 0.0464, 0.0022, 0.5403, 0.9989, 1.0000]] ) tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Dict = tf.constant( [ [0.0000, 0.0000, 0.0000, 0.0000, 0.0000], [0.8415, 0.8219, 0.8020, 0.7819, 0.7617], [0.9093, 0.9364, 0.9581, 0.9749, 0.9870], ] ) UpperCamelCase :Dict = TFRoFormerSinusoidalPositionalEmbedding(num_positions=512 , embedding_dim=512 ) emba([2, 16, 512] ) UpperCamelCase :Any = emba.weight[:3, :5] tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[Any] =1E-4 def UpperCAmelCase ( self ) -> List[str]: # 2,12,16,64 UpperCamelCase :List[Any] = tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = -tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=32 , embedding_dim=64 ) UpperCamelCase :int = embed_positions([2, 16, 768] )[None, None, :, :] UpperCamelCase , UpperCamelCase :List[str] = TFRoFormerSelfAttention.apply_rotary_position_embeddings( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = tf.constant( [ [0.0000, 0.0100, 0.0200, 0.0300, 0.0400, 0.0500, 0.0600, 0.0700], [-0.2012, 0.8897, 0.0263, 0.9401, 0.2074, 0.9463, 0.3481, 0.9343], [-1.7057, 0.6271, -1.2145, 1.3897, -0.6303, 1.7647, -0.1173, 1.8985], [-2.1731, -1.6397, -2.7358, 0.2854, -2.1840, 1.7183, -1.3018, 2.4871], [0.2717, -3.6173, -2.9206, -2.1988, -3.6638, 0.3858, -2.9155, 2.2980], [3.9859, -2.1580, -0.7984, -4.4904, -4.1181, -2.0252, -4.4782, 1.1253], ] ) UpperCamelCase :Optional[int] = tf.constant( [ [0.0000, -0.0100, -0.0200, -0.0300, -0.0400, -0.0500, -0.0600, -0.0700], [0.2012, -0.8897, -0.0263, -0.9401, -0.2074, -0.9463, -0.3481, -0.9343], [1.7057, -0.6271, 1.2145, -1.3897, 0.6303, -1.7647, 0.1173, -1.8985], [2.1731, 1.6397, 2.7358, -0.2854, 2.1840, -1.7183, 1.3018, -2.4871], [-0.2717, 3.6173, 2.9206, 2.1988, 3.6638, -0.3858, 2.9155, -2.2980], [-3.9859, 2.1580, 0.7984, 4.4904, 4.1181, 2.0252, 4.4782, -1.1253], ] ) tf.debugging.assert_near(query_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) tf.debugging.assert_near(key_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance )
259
1
import math from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """facebook/data2vec-base-960h""": """https://huggingface.co/facebook/data2vec-audio-base-960h/resolve/main/config.json""", # See all Data2VecAudio models at https://huggingface.co/models?filter=data2vec-audio } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[str] ='data2vec-audio' def __init__( self , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-5 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=(512, 512, 512, 512, 512, 512, 512) , SCREAMING_SNAKE_CASE_=(5, 2, 2, 2, 2, 2, 2) , SCREAMING_SNAKE_CASE_=(10, 3, 3, 3, 3, 2, 2) , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=19 , SCREAMING_SNAKE_CASE_=5 , SCREAMING_SNAKE_CASE_=0.05 , SCREAMING_SNAKE_CASE_=10 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=10 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_="sum" , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=256 , SCREAMING_SNAKE_CASE_=(512, 512, 512, 512, 1500) , SCREAMING_SNAKE_CASE_=(5, 3, 3, 1, 1) , SCREAMING_SNAKE_CASE_=(1, 2, 3, 1, 1) , SCREAMING_SNAKE_CASE_=512 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_=1 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=None , **SCREAMING_SNAKE_CASE_ , ) -> List[Any]: super().__init__(**SCREAMING_SNAKE_CASE_ , pad_token_id=SCREAMING_SNAKE_CASE_ , bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = hidden_size UpperCamelCase :List[Any] = feat_extract_activation UpperCamelCase :Tuple = list(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = list(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = list(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = conv_bias UpperCamelCase :Tuple = num_conv_pos_embeddings UpperCamelCase :List[str] = num_conv_pos_embedding_groups UpperCamelCase :int = conv_pos_kernel_size UpperCamelCase :List[str] = len(self.conv_dim ) UpperCamelCase :int = num_hidden_layers UpperCamelCase :str = intermediate_size UpperCamelCase :str = hidden_act UpperCamelCase :int = num_attention_heads UpperCamelCase :Any = hidden_dropout UpperCamelCase :Optional[int] = attention_dropout UpperCamelCase :Dict = activation_dropout UpperCamelCase :Union[str, Any] = feat_proj_dropout UpperCamelCase :str = final_dropout UpperCamelCase :Tuple = layerdrop UpperCamelCase :Any = layer_norm_eps UpperCamelCase :Tuple = initializer_range UpperCamelCase :Optional[int] = vocab_size UpperCamelCase :int = use_weighted_layer_sum if ( (len(self.conv_stride ) != self.num_feat_extract_layers) or (len(self.conv_kernel ) != self.num_feat_extract_layers) or (len(self.conv_dim ) != self.num_feat_extract_layers) ): raise ValueError( '''Configuration for convolutional layers is incorrect. It is required that `len(config.conv_dim)` ==''' ''' `len(config.conv_stride)` == `len(config.conv_kernel)`, but is `len(config.conv_dim) =''' F''' {len(self.conv_dim )}`, `len(config.conv_stride) = {len(self.conv_stride )}`,''' F''' `len(config.conv_kernel) = {len(self.conv_kernel )}`.''' ) # fine-tuning config parameters for SpecAugment: https://arxiv.org/abs/1904.08779 UpperCamelCase :str = mask_time_prob UpperCamelCase :Optional[Any] = mask_time_length UpperCamelCase :Any = mask_time_min_masks UpperCamelCase :List[Any] = mask_feature_prob UpperCamelCase :Optional[int] = mask_feature_length UpperCamelCase :List[Any] = mask_feature_min_masks # ctc loss UpperCamelCase :Any = ctc_loss_reduction UpperCamelCase :str = ctc_zero_infinity # adapter UpperCamelCase :int = add_adapter UpperCamelCase :Optional[int] = adapter_kernel_size UpperCamelCase :Union[str, Any] = adapter_stride UpperCamelCase :str = num_adapter_layers UpperCamelCase :Optional[int] = output_hidden_size or hidden_size # SequenceClassification-specific parameter. Feel free to ignore for other classes. UpperCamelCase :int = classifier_proj_size # XVector-specific parameters. Feel free to ignore for other classes. UpperCamelCase :Union[str, Any] = list(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = list(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = list(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = xvector_output_dim @property def UpperCAmelCase ( self ) -> Optional[Any]: return math.prod(self.conv_stride )
259
import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, _config_zero_init, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from torch import nn from transformers import MODEL_MAPPING, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTModel from transformers.models.dpt.modeling_dpt import DPT_PRETRAINED_MODEL_ARCHIVE_LIST if is_vision_available(): from PIL import Image from transformers import DPTImageProcessor class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=[0, 1, 2, 3] , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=[1, 384, 24, 24] , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=None , ) -> int: UpperCamelCase :List[Any] = parent UpperCamelCase :List[str] = batch_size UpperCamelCase :Optional[Any] = image_size UpperCamelCase :Optional[Any] = patch_size UpperCamelCase :Optional[Any] = num_channels UpperCamelCase :Union[str, Any] = is_training UpperCamelCase :Dict = use_labels UpperCamelCase :List[Any] = hidden_size UpperCamelCase :Optional[int] = num_hidden_layers UpperCamelCase :Any = backbone_out_indices UpperCamelCase :int = num_attention_heads UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :Optional[int] = hidden_dropout_prob UpperCamelCase :int = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = initializer_range UpperCamelCase :List[Any] = num_labels UpperCamelCase :Any = backbone_featmap_shape UpperCamelCase :Optional[int] = scope UpperCamelCase :Optional[int] = is_hybrid # sequence length of DPT = num_patches + 1 (we add 1 for the [CLS] token) UpperCamelCase :Tuple = (image_size // patch_size) ** 2 UpperCamelCase :int = num_patches + 1 def UpperCAmelCase ( self ) -> str: UpperCamelCase :Tuple = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size] ) UpperCamelCase :int = None if self.use_labels: UpperCamelCase :str = ids_tensor([self.batch_size, self.image_size, self.image_size] , self.num_labels ) UpperCamelCase :Any = self.get_config() return config, pixel_values, labels def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Tuple = { '''global_padding''': '''same''', '''layer_type''': '''bottleneck''', '''depths''': [3, 4, 9], '''out_features''': ['''stage1''', '''stage2''', '''stage3'''], '''embedding_dynamic_padding''': True, '''hidden_sizes''': [96, 192, 384, 768], '''num_groups''': 2, } return DPTConfig( image_size=self.image_size , patch_size=self.patch_size , num_channels=self.num_channels , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , backbone_out_indices=self.backbone_out_indices , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , is_decoder=SCREAMING_SNAKE_CASE_ , initializer_range=self.initializer_range , is_hybrid=self.is_hybrid , backbone_config=SCREAMING_SNAKE_CASE_ , backbone_featmap_shape=self.backbone_featmap_shape , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[int] = DPTModel(config=SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Optional[int] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :Tuple = self.num_labels UpperCamelCase :Any = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Union[str, Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.predicted_depth.shape , (self.batch_size, self.image_size, self.image_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :int = self.num_labels UpperCamelCase :str = DPTForSemanticSegmentation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :List[str] = model(SCREAMING_SNAKE_CASE_ , labels=SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual( result.logits.shape , (self.batch_size, self.num_labels, self.image_size, self.image_size) ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[Any] = self.prepare_config_and_inputs() UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = config_and_inputs UpperCamelCase :List[Any] = {'''pixel_values''': pixel_values} return config, inputs_dict @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Tuple =(DPTModel, DPTForDepthEstimation, DPTForSemanticSegmentation) if is_torch_available() else () UpperCamelCase_ : Optional[Any] =( { 'depth-estimation': DPTForDepthEstimation, 'feature-extraction': DPTModel, 'image-segmentation': DPTForSemanticSegmentation, } if is_torch_available() else {} ) UpperCamelCase_ : List[Any] =False UpperCamelCase_ : Optional[int] =False UpperCamelCase_ : Union[str, Any] =False def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = DPTModelTester(self ) UpperCamelCase :List[Any] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() @unittest.skip(reason='''DPT does not use inputs_embeds''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(model.get_input_embeddings() , (nn.Module) ) UpperCamelCase :Optional[int] = model.get_output_embeddings() self.assertTrue(x is None or isinstance(SCREAMING_SNAKE_CASE_ , nn.Linear ) ) def UpperCAmelCase ( self ) -> int: UpperCamelCase , UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Optional[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Any = ['''pixel_values'''] self.assertListEqual(arg_names[:1] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Any = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_depth_estimation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_semantic_segmentation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :int = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ): continue UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.train() UpperCamelCase :Union[str, Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Optional[int]: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Union[str, Any] = False UpperCamelCase :Dict = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ) or not model_class.supports_gradient_checkpointing: continue UpperCamelCase :Tuple = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.gradient_checkpointing_enable() model.train() UpperCamelCase :List[Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = _config_zero_init(SCREAMING_SNAKE_CASE_ ) for model_class in self.all_model_classes: UpperCamelCase :Tuple = model_class(config=SCREAMING_SNAKE_CASE_ ) # Skip the check for the backbone UpperCamelCase :List[str] = [] for name, module in model.named_modules(): if module.__class__.__name__ == "DPTViTHybridEmbeddings": UpperCamelCase :Tuple = [F'''{name}.{key}''' for key in module.state_dict().keys()] break for name, param in model.named_parameters(): if param.requires_grad: if name in backbone_params: continue self.assertIn( ((param.data.mean() * 1e9).round() / 1e9).item() , [0.0, 1.0] , msg=F'''Parameter {name} of model {model_class} seems not properly initialized''' , ) @unittest.skip('''Will be fixed soon by reducing the size of the model used for common tests.''' ) def UpperCAmelCase ( self ) -> Tuple: pass @slow def UpperCAmelCase ( self ) -> Any: for model_name in DPT_PRETRAINED_MODEL_ARCHIVE_LIST[1:]: UpperCamelCase :int = DPTModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: # We do this test only for DPTForDepthEstimation since it is the only model that uses readout_type UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Optional[Any] = '''add''' with self.assertRaises(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :int = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) def _A ( ): UpperCamelCase :List[Any] = Image.open('''./tests/fixtures/tests_samples/COCO/000000039769.png''' ) return image @require_torch @require_vision @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> str: UpperCamelCase :Any = DPTImageProcessor.from_pretrained('''Intel/dpt-hybrid-midas''' ) UpperCamelCase :int = DPTForDepthEstimation.from_pretrained('''Intel/dpt-hybrid-midas''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = prepare_img() UpperCamelCase :Union[str, Any] = image_processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' ).to(SCREAMING_SNAKE_CASE_ ) # forward pass with torch.no_grad(): UpperCamelCase :Union[str, Any] = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = outputs.predicted_depth # verify the predicted depth UpperCamelCase :List[str] = torch.Size((1, 384, 384) ) self.assertEqual(predicted_depth.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor( [[[5.6437, 5.6146, 5.6511], [5.4371, 5.5649, 5.5958], [5.5215, 5.5184, 5.5293]]] ).to(SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(outputs.predicted_depth[:3, :3, :3] / 100 , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) )
259
1
# Copyright 2023 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING # rely on isort to merge the imports from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available __snake_case = { """configuration_vivit""": ["""VIVIT_PRETRAINED_CONFIG_ARCHIVE_MAP""", """VivitConfig"""], } try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: __snake_case = ["""VivitImageProcessor"""] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: __snake_case = [ """VIVIT_PRETRAINED_MODEL_ARCHIVE_LIST""", """VivitModel""", """VivitPreTrainedModel""", """VivitForVideoClassification""", ] if TYPE_CHECKING: from .configuration_vivit import VIVIT_PRETRAINED_CONFIG_ARCHIVE_MAP, VivitConfig try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .image_processing_vivit import VivitImageProcessor try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_vivit import ( VIVIT_PRETRAINED_MODEL_ARCHIVE_LIST, VivitForVideoClassification, VivitModel, VivitPreTrainedModel, ) else: import sys __snake_case = _LazyModule(__name__, globals()["""__file__"""], _import_structure, module_spec=__spec__)
259
def _A ( ): for n in range(1 , 1000000 ): yield n * (n + 1) // 2 def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :Optional[int] = 1 UpperCamelCase :List[Any] = 2 while i * i <= n: UpperCamelCase :str = 0 while n % i == 0: n //= i multiplicity += 1 divisors_count *= multiplicity + 1 i += 1 if n > 1: divisors_count *= 2 return divisors_count def _A ( ): return next(i for i in triangle_number_generator() if count_divisors(SCREAMING_SNAKE_CASE__ ) > 500 ) if __name__ == "__main__": print(solution())
259
1
from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """google/realm-cc-news-pretrained-embedder""": ( """https://huggingface.co/google/realm-cc-news-pretrained-embedder/resolve/main/config.json""" ), """google/realm-cc-news-pretrained-encoder""": ( """https://huggingface.co/google/realm-cc-news-pretrained-encoder/resolve/main/config.json""" ), """google/realm-cc-news-pretrained-scorer""": ( """https://huggingface.co/google/realm-cc-news-pretrained-scorer/resolve/main/config.json""" ), """google/realm-cc-news-pretrained-openqa""": ( """https://huggingface.co/google/realm-cc-news-pretrained-openqa/aresolve/main/config.json""" ), """google/realm-orqa-nq-openqa""": """https://huggingface.co/google/realm-orqa-nq-openqa/resolve/main/config.json""", """google/realm-orqa-nq-reader""": """https://huggingface.co/google/realm-orqa-nq-reader/resolve/main/config.json""", """google/realm-orqa-wq-openqa""": """https://huggingface.co/google/realm-orqa-wq-openqa/resolve/main/config.json""", """google/realm-orqa-wq-reader""": """https://huggingface.co/google/realm-orqa-wq-reader/resolve/main/config.json""", # See all REALM models at https://huggingface.co/models?filter=realm } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Tuple ='realm' def __init__( self , SCREAMING_SNAKE_CASE_=3_0522 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=128 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=8 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu_new" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=512 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-12 , SCREAMING_SNAKE_CASE_=256 , SCREAMING_SNAKE_CASE_=10 , SCREAMING_SNAKE_CASE_=1e-3 , SCREAMING_SNAKE_CASE_=5 , SCREAMING_SNAKE_CASE_=320 , SCREAMING_SNAKE_CASE_=1335_3718 , SCREAMING_SNAKE_CASE_=5000 , SCREAMING_SNAKE_CASE_=1 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_=2 , **SCREAMING_SNAKE_CASE_ , ) -> Optional[Any]: super().__init__(pad_token_id=SCREAMING_SNAKE_CASE_ , bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) # Common config UpperCamelCase :List[str] = vocab_size UpperCamelCase :List[str] = max_position_embeddings UpperCamelCase :Dict = hidden_size UpperCamelCase :str = retriever_proj_size UpperCamelCase :Union[str, Any] = num_hidden_layers UpperCamelCase :Optional[int] = num_attention_heads UpperCamelCase :Union[str, Any] = num_candidates UpperCamelCase :int = intermediate_size UpperCamelCase :Union[str, Any] = hidden_act UpperCamelCase :Any = hidden_dropout_prob UpperCamelCase :int = attention_probs_dropout_prob UpperCamelCase :Union[str, Any] = initializer_range UpperCamelCase :Union[str, Any] = type_vocab_size UpperCamelCase :Optional[int] = layer_norm_eps # Reader config UpperCamelCase :List[str] = span_hidden_size UpperCamelCase :Dict = max_span_width UpperCamelCase :Union[str, Any] = reader_layer_norm_eps UpperCamelCase :Union[str, Any] = reader_beam_size UpperCamelCase :Union[str, Any] = reader_seq_len # Retrieval config UpperCamelCase :Any = num_block_records UpperCamelCase :Dict = searcher_beam_size
259
def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] ): # Return True if there is node that has not iterated. UpperCamelCase :Tuple = [False] * len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = [] queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = True while queue: UpperCamelCase :Optional[Any] = queue.pop(0 ) for ind in range(len(graph[u] ) ): if visited[ind] is False and graph[u][ind] > 0: queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = True UpperCamelCase :Optional[int] = u return visited[t] def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : str ): # This array is filled by BFS and to store path UpperCamelCase :Optional[int] = [-1] * (len(SCREAMING_SNAKE_CASE__ )) UpperCamelCase :Optional[int] = 0 while bfs(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Dict = float('''Inf''' ) UpperCamelCase :str = sink while s != source: # Find the minimum value in select path UpperCamelCase :Optional[Any] = min(SCREAMING_SNAKE_CASE__ , graph[parent[s]][s] ) UpperCamelCase :Any = parent[s] max_flow += path_flow UpperCamelCase :Tuple = sink while v != source: UpperCamelCase :List[str] = parent[v] graph[u][v] -= path_flow graph[v][u] += path_flow UpperCamelCase :Any = parent[v] return max_flow __snake_case = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] __snake_case , __snake_case = 0, 5 print(ford_fulkerson(graph, source, sink))
259
1
from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :int = int(number**0.5 ) return number == sq * sq def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :int = x_num * y_den * z_den + y_num * x_den * z_den + z_num * x_den * y_den UpperCamelCase :int = x_den * y_den * z_den UpperCamelCase :int = gcd(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) top //= hcf bottom //= hcf return top, bottom def _A ( SCREAMING_SNAKE_CASE__ : int = 35 ): UpperCamelCase :set = set() UpperCamelCase :int UpperCamelCase :Fraction = Fraction(0 ) UpperCamelCase :tuple[int, int] for x_num in range(1 , order + 1 ): for x_den in range(x_num + 1 , order + 1 ): for y_num in range(1 , order + 1 ): for y_den in range(y_num + 1 , order + 1 ): # n=1 UpperCamelCase :Union[str, Any] = x_num * y_den + x_den * y_num UpperCamelCase :int = x_den * y_den UpperCamelCase :Tuple = gcd(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: UpperCamelCase :Optional[Any] = add_three( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) unique_s.add(SCREAMING_SNAKE_CASE__ ) # n=2 UpperCamelCase :List[str] = ( x_num * x_num * y_den * y_den + x_den * x_den * y_num * y_num ) UpperCamelCase :List[str] = x_den * x_den * y_den * y_den if is_sq(SCREAMING_SNAKE_CASE__ ) and is_sq(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = int(sqrt(SCREAMING_SNAKE_CASE__ ) ) UpperCamelCase :Optional[Any] = int(sqrt(SCREAMING_SNAKE_CASE__ ) ) UpperCamelCase :List[Any] = gcd(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: UpperCamelCase :int = add_three( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) unique_s.add(SCREAMING_SNAKE_CASE__ ) # n=-1 UpperCamelCase :Tuple = x_num * y_num UpperCamelCase :List[str] = x_den * y_num + x_num * y_den UpperCamelCase :Dict = gcd(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: UpperCamelCase :List[str] = add_three( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) unique_s.add(SCREAMING_SNAKE_CASE__ ) # n=2 UpperCamelCase :Union[str, Any] = x_num * x_num * y_num * y_num UpperCamelCase :List[Any] = ( x_den * x_den * y_num * y_num + x_num * x_num * y_den * y_den ) if is_sq(SCREAMING_SNAKE_CASE__ ) and is_sq(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Tuple = int(sqrt(SCREAMING_SNAKE_CASE__ ) ) UpperCamelCase :List[str] = int(sqrt(SCREAMING_SNAKE_CASE__ ) ) UpperCamelCase :Dict = gcd(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: UpperCamelCase :List[Any] = add_three( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) unique_s.add(SCREAMING_SNAKE_CASE__ ) for num, den in unique_s: total += Fraction(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) return total.denominator + total.numerator if __name__ == "__main__": print(f'''{solution() = }''')
259
from __future__ import annotations from typing import Any def _A ( SCREAMING_SNAKE_CASE__ : list[Any] ): create_state_space_tree(SCREAMING_SNAKE_CASE__ , [] , 0 ) def _A ( SCREAMING_SNAKE_CASE__ : list[Any] , SCREAMING_SNAKE_CASE__ : list[Any] , SCREAMING_SNAKE_CASE__ : int ): if index == len(SCREAMING_SNAKE_CASE__ ): print(SCREAMING_SNAKE_CASE__ ) return create_state_space_tree(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , index + 1 ) current_subsequence.append(sequence[index] ) create_state_space_tree(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , index + 1 ) current_subsequence.pop() if __name__ == "__main__": __snake_case = [3, 1, 2, 4] generate_all_subsequences(seq) seq.clear() seq.extend(["""A""", """B""", """C"""]) generate_all_subsequences(seq)
259
1
import torch from transformers import CamembertForMaskedLM, CamembertTokenizer def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Dict=5 ): # Adapted from https://github.com/pytorch/fairseq/blob/master/fairseq/models/roberta/hub_interface.py assert masked_input.count('''<mask>''' ) == 1 UpperCamelCase :int = torch.tensor(tokenizer.encode(SCREAMING_SNAKE_CASE__ , add_special_tokens=SCREAMING_SNAKE_CASE__ ) ).unsqueeze(0 ) # Batch size 1 UpperCamelCase :Optional[Any] = model(SCREAMING_SNAKE_CASE__ )[0] # The last hidden-state is the first element of the output tuple UpperCamelCase :Optional[Any] = (input_ids.squeeze() == tokenizer.mask_token_id).nonzero().item() UpperCamelCase :List[Any] = logits[0, masked_index, :] UpperCamelCase :Optional[Any] = logits.softmax(dim=0 ) UpperCamelCase , UpperCamelCase :Optional[int] = prob.topk(k=SCREAMING_SNAKE_CASE__ , dim=0 ) UpperCamelCase :Dict = ''' '''.join( [tokenizer.convert_ids_to_tokens(indices[i].item() ) for i in range(len(SCREAMING_SNAKE_CASE__ ) )] ) UpperCamelCase :Union[str, Any] = tokenizer.mask_token UpperCamelCase :int = [] for index, predicted_token_bpe in enumerate(topk_predicted_token_bpe.split(''' ''' ) ): UpperCamelCase :Dict = predicted_token_bpe.replace('''\u2581''' , ''' ''' ) if " {0}".format(SCREAMING_SNAKE_CASE__ ) in masked_input: topk_filled_outputs.append( ( masked_input.replace(''' {0}'''.format(SCREAMING_SNAKE_CASE__ ) , SCREAMING_SNAKE_CASE__ ), values[index].item(), predicted_token, ) ) else: topk_filled_outputs.append( ( masked_input.replace(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ), values[index].item(), predicted_token, ) ) return topk_filled_outputs __snake_case = CamembertTokenizer.from_pretrained("""camembert-base""") __snake_case = CamembertForMaskedLM.from_pretrained("""camembert-base""") model.eval() __snake_case = """Le camembert est <mask> :)""" print(fill_mask(masked_input, model, tokenizer, topk=3))
259
from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, get_resize_output_image_size, normalize, rescale, resize, to_channel_dimension_format, ) from ...image_utils import ( IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, ChannelDimension, ImageInput, PILImageResampling, is_batched, to_numpy_array, valid_images, ) from ...utils import TensorType, logging __snake_case = logging.get_logger(__name__) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[Any] =['pixel_values'] def __init__( self , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = PILImageResampling.BICUBIC , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = 1 / 255 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> None: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = size if size is not None else {'''height''': 224, '''width''': 224} UpperCamelCase :Optional[Any] = get_size_dict(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = crop_size if crop_size is not None else {'''height''': 224, '''width''': 224} UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ , default_to_square=SCREAMING_SNAKE_CASE_ , param_name='''crop_size''' ) UpperCamelCase :Optional[int] = do_resize UpperCamelCase :int = do_rescale UpperCamelCase :Tuple = do_normalize UpperCamelCase :str = do_center_crop UpperCamelCase :int = crop_size UpperCamelCase :Tuple = size UpperCamelCase :List[str] = resample UpperCamelCase :Tuple = rescale_factor UpperCamelCase :Optional[Any] = image_mean if image_mean is not None else IMAGENET_DEFAULT_MEAN UpperCamelCase :Optional[int] = image_std if image_std is not None else IMAGENET_DEFAULT_STD def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = PILImageResampling.BILINEAR , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ ) if "shortest_edge" in size: UpperCamelCase :str = get_resize_output_image_size(SCREAMING_SNAKE_CASE_ , size=size['''shortest_edge'''] , default_to_square=SCREAMING_SNAKE_CASE_ ) # size = get_resize_output_image_size(image, size["shortest_edge"], size["longest_edge"]) elif "height" in size and "width" in size: UpperCamelCase :Optional[int] = (size['''height'''], size['''width''']) else: raise ValueError(F'''Size must contain \'height\' and \'width\' keys or \'shortest_edge\' key. Got {size.keys()}''' ) return resize(SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ , resample=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: UpperCamelCase :Union[str, Any] = get_size_dict(SCREAMING_SNAKE_CASE_ ) if "height" not in size or "width" not in size: raise ValueError(F'''The `size` parameter must contain the keys (height, width). Got {size.keys()}''' ) return center_crop(SCREAMING_SNAKE_CASE_ , size=(size['''height'''], size['''width''']) , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ ) -> np.ndarray: return rescale(SCREAMING_SNAKE_CASE_ , scale=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: return normalize(SCREAMING_SNAKE_CASE_ , mean=SCREAMING_SNAKE_CASE_ , std=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = ChannelDimension.FIRST , **SCREAMING_SNAKE_CASE_ , ) -> BatchFeature: UpperCamelCase :Union[str, Any] = do_resize if do_resize is not None else self.do_resize UpperCamelCase :Optional[int] = do_rescale if do_rescale is not None else self.do_rescale UpperCamelCase :Optional[Any] = do_normalize if do_normalize is not None else self.do_normalize UpperCamelCase :Union[str, Any] = do_center_crop if do_center_crop is not None else self.do_center_crop UpperCamelCase :Optional[int] = crop_size if crop_size is not None else self.crop_size UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ , param_name='''crop_size''' , default_to_square=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = resample if resample is not None else self.resample UpperCamelCase :List[str] = rescale_factor if rescale_factor is not None else self.rescale_factor UpperCamelCase :Optional[Any] = image_mean if image_mean is not None else self.image_mean UpperCamelCase :Dict = image_std if image_std is not None else self.image_std UpperCamelCase :Dict = size if size is not None else self.size UpperCamelCase :Optional[int] = get_size_dict(SCREAMING_SNAKE_CASE_ ) if not is_batched(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :str = [images] if not valid_images(SCREAMING_SNAKE_CASE_ ): raise ValueError( '''Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, ''' '''torch.Tensor, tf.Tensor or jax.ndarray.''' ) if do_resize and size is None: raise ValueError('''Size must be specified if do_resize is True.''' ) if do_center_crop and crop_size is None: raise ValueError('''Crop size must be specified if do_center_crop is True.''' ) if do_rescale and rescale_factor is None: raise ValueError('''Rescale factor must be specified if do_rescale is True.''' ) # All transformations expect numpy arrays. UpperCamelCase :Tuple = [to_numpy_array(SCREAMING_SNAKE_CASE_ ) for image in images] if do_resize: UpperCamelCase :List[Any] = [self.resize(image=SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ , resample=SCREAMING_SNAKE_CASE_ ) for image in images] if do_center_crop: UpperCamelCase :Tuple = [self.center_crop(image=SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ ) for image in images] if do_rescale: UpperCamelCase :Union[str, Any] = [self.rescale(image=SCREAMING_SNAKE_CASE_ , scale=SCREAMING_SNAKE_CASE_ ) for image in images] if do_normalize: UpperCamelCase :Union[str, Any] = [self.normalize(image=SCREAMING_SNAKE_CASE_ , mean=SCREAMING_SNAKE_CASE_ , std=SCREAMING_SNAKE_CASE_ ) for image in images] UpperCamelCase :List[str] = [to_channel_dimension_format(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) for image in images] UpperCamelCase :int = {'''pixel_values''': images} return BatchFeature(data=SCREAMING_SNAKE_CASE_ , tensor_type=SCREAMING_SNAKE_CASE_ )
259
1
from math import factorial, radians def _A ( SCREAMING_SNAKE_CASE__ : float , SCREAMING_SNAKE_CASE__ : int = 18 , SCREAMING_SNAKE_CASE__ : int = 10 ): UpperCamelCase :Dict = angle_in_degrees - ((angle_in_degrees // 3_60.0) * 3_60.0) # Converting from degrees to radians UpperCamelCase :Dict = radians(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[Any] = angle_in_radians UpperCamelCase :List[str] = 3 UpperCamelCase :Optional[int] = -1 for _ in range(SCREAMING_SNAKE_CASE__ ): result += (b * (angle_in_radians**a)) / factorial(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = -b # One positive term and the next will be negative and so on... a += 2 # Increased by 2 for every term. return round(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": __import__("""doctest""").testmod()
259
import os import sys import tempfile import torch from .state import AcceleratorState from .utils import PrecisionType, PrepareForLaunch, is_mps_available, patch_environment def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str]=() , SCREAMING_SNAKE_CASE__ : List[Any]=None , SCREAMING_SNAKE_CASE__ : List[Any]="no" , SCREAMING_SNAKE_CASE__ : Dict="29500" ): UpperCamelCase :List[Any] = False UpperCamelCase :Tuple = False if any(key.startswith('''KAGGLE''' ) for key in os.environ.keys() ): UpperCamelCase :Dict = True elif "IPython" in sys.modules: UpperCamelCase :int = '''google.colab''' in str(sys.modules['''IPython'''].get_ipython() ) try: UpperCamelCase :Any = PrecisionType(mixed_precision.lower() ) except ValueError: raise ValueError( F'''Unknown mixed_precision mode: {args.mixed_precision.lower()}. Choose between {PrecisionType.list()}.''' ) if (in_colab or in_kaggle) and (os.environ.get('''TPU_NAME''' , SCREAMING_SNAKE_CASE__ ) is not None): # TPU launch import torch_xla.distributed.xla_multiprocessing as xmp if len(AcceleratorState._shared_state ) > 0: raise ValueError( '''To train on TPU in Colab or Kaggle Kernel, the `Accelerator` should only be initialized inside ''' '''your training function. Restart your notebook and make sure no cells initializes an ''' '''`Accelerator`.''' ) if num_processes is None: UpperCamelCase :Tuple = 8 UpperCamelCase :Optional[int] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , distributed_type='''TPU''' ) print(F'''Launching a training on {num_processes} TPU cores.''' ) xmp.spawn(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' ) elif in_colab: # No need for a distributed launch otherwise as it's either CPU or one GPU. if torch.cuda.is_available(): print('''Launching training on one GPU.''' ) else: print('''Launching training on one CPU.''' ) function(*SCREAMING_SNAKE_CASE__ ) else: if num_processes is None: raise ValueError( '''You have to specify the number of GPUs you would like to use, add `num_processes=...` to your call.''' ) if num_processes > 1: # Multi-GPU launch from torch.multiprocessing import start_processes from torch.multiprocessing.spawn import ProcessRaisedException if len(AcceleratorState._shared_state ) > 0: raise ValueError( '''To launch a multi-GPU training from your notebook, the `Accelerator` should only be initialized ''' '''inside your training function. Restart your notebook and make sure no cells initializes an ''' '''`Accelerator`.''' ) if torch.cuda.is_initialized(): raise ValueError( '''To launch a multi-GPU training from your notebook, you need to avoid running any instruction ''' '''using `torch.cuda` in any cell. Restart your notebook and make sure no cells use any CUDA ''' '''function.''' ) # torch.distributed will expect a few environment variable to be here. We set the ones common to each # process here (the other ones will be set be the launcher). with patch_environment( world_size=SCREAMING_SNAKE_CASE__ , master_addr='''127.0.01''' , master_port=SCREAMING_SNAKE_CASE__ , mixed_precision=SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , distributed_type='''MULTI_GPU''' ) print(F'''Launching training on {num_processes} GPUs.''' ) try: start_processes(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' ) except ProcessRaisedException as e: if "Cannot re-initialize CUDA in forked subprocess" in e.args[0]: raise RuntimeError( '''CUDA has been initialized before the `notebook_launcher` could create a forked subprocess. ''' '''This likely stems from an outside import causing issues once the `notebook_launcher()` is called. ''' '''Please review your imports and test them when running the `notebook_launcher()` to identify ''' '''which one is problematic.''' ) from e else: # No need for a distributed launch otherwise as it's either CPU, GPU or MPS. if is_mps_available(): UpperCamelCase :Any = '''1''' print('''Launching training on MPS.''' ) elif torch.cuda.is_available(): print('''Launching training on one GPU.''' ) else: print('''Launching training on CPU.''' ) function(*SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Tuple=() , SCREAMING_SNAKE_CASE__ : int=2 ): from torch.multiprocessing import start_processes with tempfile.NamedTemporaryFile() as tmp_file: # torch.distributed will expect a few environment variable to be here. We set the ones common to each # process here (the other ones will be set be the launcher). with patch_environment( world_size=SCREAMING_SNAKE_CASE__ , master_addr='''127.0.01''' , master_port='''29500''' , accelerate_mixed_precision='''no''' , accelerate_debug_rdv_file=tmp_file.name , accelerate_use_cpu='''yes''' , ): UpperCamelCase :Optional[int] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , debug=SCREAMING_SNAKE_CASE__ ) start_processes(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' )
259
1
import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, _config_zero_init, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from torch import nn from transformers import MODEL_MAPPING, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTModel from transformers.models.dpt.modeling_dpt import DPT_PRETRAINED_MODEL_ARCHIVE_LIST if is_vision_available(): from PIL import Image from transformers import DPTImageProcessor class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=[0, 1, 2, 3] , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=[1, 384, 24, 24] , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=None , ) -> int: UpperCamelCase :List[Any] = parent UpperCamelCase :List[str] = batch_size UpperCamelCase :Optional[Any] = image_size UpperCamelCase :Optional[Any] = patch_size UpperCamelCase :Optional[Any] = num_channels UpperCamelCase :Union[str, Any] = is_training UpperCamelCase :Dict = use_labels UpperCamelCase :List[Any] = hidden_size UpperCamelCase :Optional[int] = num_hidden_layers UpperCamelCase :Any = backbone_out_indices UpperCamelCase :int = num_attention_heads UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :Optional[int] = hidden_dropout_prob UpperCamelCase :int = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = initializer_range UpperCamelCase :List[Any] = num_labels UpperCamelCase :Any = backbone_featmap_shape UpperCamelCase :Optional[int] = scope UpperCamelCase :Optional[int] = is_hybrid # sequence length of DPT = num_patches + 1 (we add 1 for the [CLS] token) UpperCamelCase :Tuple = (image_size // patch_size) ** 2 UpperCamelCase :int = num_patches + 1 def UpperCAmelCase ( self ) -> str: UpperCamelCase :Tuple = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size] ) UpperCamelCase :int = None if self.use_labels: UpperCamelCase :str = ids_tensor([self.batch_size, self.image_size, self.image_size] , self.num_labels ) UpperCamelCase :Any = self.get_config() return config, pixel_values, labels def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Tuple = { '''global_padding''': '''same''', '''layer_type''': '''bottleneck''', '''depths''': [3, 4, 9], '''out_features''': ['''stage1''', '''stage2''', '''stage3'''], '''embedding_dynamic_padding''': True, '''hidden_sizes''': [96, 192, 384, 768], '''num_groups''': 2, } return DPTConfig( image_size=self.image_size , patch_size=self.patch_size , num_channels=self.num_channels , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , backbone_out_indices=self.backbone_out_indices , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , is_decoder=SCREAMING_SNAKE_CASE_ , initializer_range=self.initializer_range , is_hybrid=self.is_hybrid , backbone_config=SCREAMING_SNAKE_CASE_ , backbone_featmap_shape=self.backbone_featmap_shape , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[int] = DPTModel(config=SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Optional[int] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :Tuple = self.num_labels UpperCamelCase :Any = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Union[str, Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.predicted_depth.shape , (self.batch_size, self.image_size, self.image_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :int = self.num_labels UpperCamelCase :str = DPTForSemanticSegmentation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :List[str] = model(SCREAMING_SNAKE_CASE_ , labels=SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual( result.logits.shape , (self.batch_size, self.num_labels, self.image_size, self.image_size) ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[Any] = self.prepare_config_and_inputs() UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = config_and_inputs UpperCamelCase :List[Any] = {'''pixel_values''': pixel_values} return config, inputs_dict @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Tuple =(DPTModel, DPTForDepthEstimation, DPTForSemanticSegmentation) if is_torch_available() else () UpperCamelCase_ : Optional[Any] =( { 'depth-estimation': DPTForDepthEstimation, 'feature-extraction': DPTModel, 'image-segmentation': DPTForSemanticSegmentation, } if is_torch_available() else {} ) UpperCamelCase_ : List[Any] =False UpperCamelCase_ : Optional[int] =False UpperCamelCase_ : Union[str, Any] =False def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = DPTModelTester(self ) UpperCamelCase :List[Any] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() @unittest.skip(reason='''DPT does not use inputs_embeds''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(model.get_input_embeddings() , (nn.Module) ) UpperCamelCase :Optional[int] = model.get_output_embeddings() self.assertTrue(x is None or isinstance(SCREAMING_SNAKE_CASE_ , nn.Linear ) ) def UpperCAmelCase ( self ) -> int: UpperCamelCase , UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Optional[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Any = ['''pixel_values'''] self.assertListEqual(arg_names[:1] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Any = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_depth_estimation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_semantic_segmentation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :int = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ): continue UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.train() UpperCamelCase :Union[str, Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Optional[int]: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Union[str, Any] = False UpperCamelCase :Dict = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ) or not model_class.supports_gradient_checkpointing: continue UpperCamelCase :Tuple = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.gradient_checkpointing_enable() model.train() UpperCamelCase :List[Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = _config_zero_init(SCREAMING_SNAKE_CASE_ ) for model_class in self.all_model_classes: UpperCamelCase :Tuple = model_class(config=SCREAMING_SNAKE_CASE_ ) # Skip the check for the backbone UpperCamelCase :List[str] = [] for name, module in model.named_modules(): if module.__class__.__name__ == "DPTViTHybridEmbeddings": UpperCamelCase :Tuple = [F'''{name}.{key}''' for key in module.state_dict().keys()] break for name, param in model.named_parameters(): if param.requires_grad: if name in backbone_params: continue self.assertIn( ((param.data.mean() * 1e9).round() / 1e9).item() , [0.0, 1.0] , msg=F'''Parameter {name} of model {model_class} seems not properly initialized''' , ) @unittest.skip('''Will be fixed soon by reducing the size of the model used for common tests.''' ) def UpperCAmelCase ( self ) -> Tuple: pass @slow def UpperCAmelCase ( self ) -> Any: for model_name in DPT_PRETRAINED_MODEL_ARCHIVE_LIST[1:]: UpperCamelCase :int = DPTModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: # We do this test only for DPTForDepthEstimation since it is the only model that uses readout_type UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Optional[Any] = '''add''' with self.assertRaises(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :int = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) def _A ( ): UpperCamelCase :List[Any] = Image.open('''./tests/fixtures/tests_samples/COCO/000000039769.png''' ) return image @require_torch @require_vision @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> str: UpperCamelCase :Any = DPTImageProcessor.from_pretrained('''Intel/dpt-hybrid-midas''' ) UpperCamelCase :int = DPTForDepthEstimation.from_pretrained('''Intel/dpt-hybrid-midas''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = prepare_img() UpperCamelCase :Union[str, Any] = image_processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' ).to(SCREAMING_SNAKE_CASE_ ) # forward pass with torch.no_grad(): UpperCamelCase :Union[str, Any] = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = outputs.predicted_depth # verify the predicted depth UpperCamelCase :List[str] = torch.Size((1, 384, 384) ) self.assertEqual(predicted_depth.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor( [[[5.6437, 5.6146, 5.6511], [5.4371, 5.5649, 5.5958], [5.5215, 5.5184, 5.5293]]] ).to(SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(outputs.predicted_depth[:3, :3, :3] / 100 , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) )
259
import sys def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Any = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] UpperCamelCase :List[Any] = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] for chain_length in range(2 , SCREAMING_SNAKE_CASE__ ): for a in range(1 , n - chain_length + 1 ): UpperCamelCase :Optional[Any] = a + chain_length - 1 UpperCamelCase :int = sys.maxsize for c in range(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Any = ( matrix[a][c] + matrix[c + 1][b] + array[a - 1] * array[c] * array[b] ) if cost < matrix[a][b]: UpperCamelCase :int = cost UpperCamelCase :List[str] = c return matrix, sol def _A ( SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] ): if i == j: print('''A''' + str(SCREAMING_SNAKE_CASE__ ) , end=''' ''' ) else: print('''(''' , end=''' ''' ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , optimal_solution[i][j] ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , optimal_solution[i][j] + 1 , SCREAMING_SNAKE_CASE__ ) print(''')''' , end=''' ''' ) def _A ( ): UpperCamelCase :Optional[int] = [30, 35, 15, 5, 10, 20, 25] UpperCamelCase :Optional[Any] = len(SCREAMING_SNAKE_CASE__ ) # Size of matrix created from above array will be # 30*35 35*15 15*5 5*10 10*20 20*25 UpperCamelCase , UpperCamelCase :Dict = matrix_chain_order(SCREAMING_SNAKE_CASE__ ) print('''No. of Operation required: ''' + str(matrix[1][n - 1] ) ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , 1 , n - 1 ) if __name__ == "__main__": main()
259
1
import os from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = {"""vocab_file""": """sentencepiece.bpe.model"""} __snake_case = { """vocab_file""": { """camembert-base""": """https://huggingface.co/camembert-base/resolve/main/sentencepiece.bpe.model""", } } __snake_case = { """camembert-base""": 5_12, } __snake_case = """▁""" class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : str =VOCAB_FILES_NAMES UpperCamelCase_ : List[Any] =PRETRAINED_VOCAB_FILES_MAP UpperCamelCase_ : Optional[int] =PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES UpperCamelCase_ : Any =['input_ids', 'attention_mask'] def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_="<s>" , SCREAMING_SNAKE_CASE_="</s>" , SCREAMING_SNAKE_CASE_="</s>" , SCREAMING_SNAKE_CASE_="<s>" , SCREAMING_SNAKE_CASE_="<unk>" , SCREAMING_SNAKE_CASE_="<pad>" , SCREAMING_SNAKE_CASE_="<mask>" , SCREAMING_SNAKE_CASE_=["<s>NOTUSED", "</s>NOTUSED"] , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> None: # Mask token behave like a normal word, i.e. include the space before it UpperCamelCase :List[str] = AddedToken(SCREAMING_SNAKE_CASE_ , lstrip=SCREAMING_SNAKE_CASE_ , rstrip=SCREAMING_SNAKE_CASE_ ) if isinstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) else mask_token UpperCamelCase :Any = {} if sp_model_kwargs is None else sp_model_kwargs super().__init__( bos_token=SCREAMING_SNAKE_CASE_ , eos_token=SCREAMING_SNAKE_CASE_ , unk_token=SCREAMING_SNAKE_CASE_ , sep_token=SCREAMING_SNAKE_CASE_ , cls_token=SCREAMING_SNAKE_CASE_ , pad_token=SCREAMING_SNAKE_CASE_ , mask_token=SCREAMING_SNAKE_CASE_ , additional_special_tokens=SCREAMING_SNAKE_CASE_ , sp_model_kwargs=self.sp_model_kwargs , **SCREAMING_SNAKE_CASE_ , ) UpperCamelCase :List[Any] = spm.SentencePieceProcessor(**self.sp_model_kwargs ) self.sp_model.Load(str(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[Any] = vocab_file # HACK: These tokens were added by fairseq but don't seem to be actually used when duplicated in the actual # sentencepiece vocabulary (this is the case for <s> and </s> UpperCamelCase :Dict = {'''<s>NOTUSED''': 0, '''<pad>''': 1, '''</s>NOTUSED''': 2, '''<unk>''': 3} UpperCamelCase :List[Any] = len(self.fairseq_tokens_to_ids ) UpperCamelCase :List[Any] = len(self.sp_model ) + len(self.fairseq_tokens_to_ids ) UpperCamelCase :List[str] = {v: k for k, v in self.fairseq_tokens_to_ids.items()} def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None ) -> List[int]: if token_ids_a is None: return [self.cls_token_id] + token_ids_a + [self.sep_token_id] UpperCamelCase :List[Any] = [self.cls_token_id] UpperCamelCase :Union[str, Any] = [self.sep_token_id] return cls + token_ids_a + sep + sep + token_ids_a + sep def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = False ) -> List[int]: if already_has_special_tokens: return super().get_special_tokens_mask( token_ids_a=SCREAMING_SNAKE_CASE_ , token_ids_a=SCREAMING_SNAKE_CASE_ , already_has_special_tokens=SCREAMING_SNAKE_CASE_ ) if token_ids_a is None: return [1] + ([0] * len(SCREAMING_SNAKE_CASE_ )) + [1] return [1] + ([0] * len(SCREAMING_SNAKE_CASE_ )) + [1, 1] + ([0] * len(SCREAMING_SNAKE_CASE_ )) + [1] def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None ) -> List[int]: UpperCamelCase :Any = [self.sep_token_id] UpperCamelCase :List[str] = [self.cls_token_id] if token_ids_a is None: return len(cls + token_ids_a + sep ) * [0] return len(cls + token_ids_a + sep + sep + token_ids_a + sep ) * [0] @property def UpperCAmelCase ( self ) -> Dict: return len(self.fairseq_tokens_to_ids ) + len(self.sp_model ) def UpperCAmelCase ( self ) -> List[str]: UpperCamelCase :List[Any] = {self.convert_ids_to_tokens(SCREAMING_SNAKE_CASE_ ): i for i in range(self.vocab_size )} vocab.update(self.added_tokens_encoder ) return vocab def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> List[str]: return self.sp_model.encode(SCREAMING_SNAKE_CASE_ , out_type=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> Any: if token in self.fairseq_tokens_to_ids: return self.fairseq_tokens_to_ids[token] elif self.sp_model.PieceToId(SCREAMING_SNAKE_CASE_ ) == 0: # Convert sentence piece unk token to fairseq unk token index return self.unk_token_id return self.fairseq_offset + self.sp_model.PieceToId(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: if index in self.fairseq_ids_to_tokens: return self.fairseq_ids_to_tokens[index] return self.sp_model.IdToPiece(index - self.fairseq_offset ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :Any = [] UpperCamelCase :Any = '''''' UpperCamelCase :int = False for token in tokens: # make sure that special tokens are not decoded using sentencepiece model if token in self.all_special_tokens: if not prev_is_special: out_string += " " out_string += self.sp_model.decode(SCREAMING_SNAKE_CASE_ ) + token UpperCamelCase :int = True UpperCamelCase :Union[str, Any] = [] else: current_sub_tokens.append(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = False out_string += self.sp_model.decode(SCREAMING_SNAKE_CASE_ ) return out_string.strip() def __getstate__( self ) -> Tuple: UpperCamelCase :Any = self.__dict__.copy() UpperCamelCase :List[Any] = None return state def __setstate__( self , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :int = d # for backward compatibility if not hasattr(self , '''sp_model_kwargs''' ): UpperCamelCase :Union[str, Any] = {} UpperCamelCase :Any = spm.SentencePieceProcessor(**self.sp_model_kwargs ) self.sp_model.Load(self.vocab_file ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None ) -> Tuple[str]: if not os.path.isdir(SCREAMING_SNAKE_CASE_ ): logger.error(F'''Vocabulary path ({save_directory}) should be a directory''' ) return UpperCamelCase :Union[str, Any] = os.path.join( SCREAMING_SNAKE_CASE_ , (filename_prefix + '''-''' if filename_prefix else '''''') + VOCAB_FILES_NAMES['''vocab_file'''] ) if os.path.abspath(self.vocab_file ) != os.path.abspath(SCREAMING_SNAKE_CASE_ ) and os.path.isfile(self.vocab_file ): copyfile(self.vocab_file , SCREAMING_SNAKE_CASE_ ) elif not os.path.isfile(self.vocab_file ): with open(SCREAMING_SNAKE_CASE_ , '''wb''' ) as fi: UpperCamelCase :List[Any] = self.sp_model.serialized_model_proto() fi.write(SCREAMING_SNAKE_CASE_ ) return (out_vocab_file,)
259
import argparse import json import os from pathlib import Path import requests import torch from transformers import JukeboxConfig, JukeboxModel from transformers.utils import logging logging.set_verbosity_info() __snake_case = logging.get_logger(__name__) __snake_case = """https://openaipublic.azureedge.net/jukebox/models/""" __snake_case = { """jukebox-1b-lyrics""": [ """5b/vqvae.pth.tar""", """5b/prior_level_0.pth.tar""", """5b/prior_level_1.pth.tar""", """1b_lyrics/prior_level_2.pth.tar""", ], """jukebox-5b-lyrics""": [ """5b/vqvae.pth.tar""", """5b/prior_level_0.pth.tar""", """5b/prior_level_1.pth.tar""", """5b_lyrics/prior_level_2.pth.tar""", ], } def _A ( SCREAMING_SNAKE_CASE__ : List[Any] ): if key.endswith('''.model.1.bias''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :int = key.replace('''.model.1.bias''' , '''.conv1d_1.bias''' ) elif key.endswith('''.model.1.weight''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Union[str, Any] = key.replace('''.model.1.weight''' , '''.conv1d_1.weight''' ) elif key.endswith('''.model.3.bias''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Optional[Any] = key.replace('''.model.3.bias''' , '''.conv1d_2.bias''' ) elif key.endswith('''.model.3.weight''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Optional[int] = key.replace('''.model.3.weight''' , '''.conv1d_2.weight''' ) if "conditioner_blocks.0." in key: UpperCamelCase :Any = key.replace('''conditioner_blocks.0''' , '''conditioner_blocks''' ) if "prime_prior" in key: UpperCamelCase :int = key.replace('''prime_prior''' , '''encoder''' ) if ".emb." in key and "total" not in key and "absolute" not in key and "relative" not in key: UpperCamelCase :Any = key.replace('''.emb.''' , '''.''' ) if key.endswith('''k''' ): # replace vqvae.X.k with vqvae.X.codebook return key.replace('''.k''' , '''.codebook''' ) if "y_emb." in key: return key.replace('''y_emb.''' , '''metadata_embedding.''' ) if "x_emb.emb." in key: UpperCamelCase :str = key.replace('''0.x_emb.emb''' , '''embed_tokens''' ) if "prime_state_ln" in key: return key.replace('''prime_state_ln''' , '''encoder.final_layer_norm''' ) if ".ln" in key: return key.replace('''.ln''' , '''.layer_norm''' ) if "_ln" in key: return key.replace('''_ln''' , '''_layer_norm''' ) if "prime_state_proj" in key: return key.replace('''prime_state_proj''' , '''encoder.proj_in''' ) if "prime_x_out" in key: return key.replace('''prime_x_out''' , '''encoder.lm_head''' ) if "prior.x_out" in key: return key.replace('''x_out''' , '''fc_proj_out''' ) if "x_emb" in key: return key.replace('''x_emb''' , '''embed_tokens''' ) return key def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Optional[int] = {} import re UpperCamelCase :int = re.compile(R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :str = re.compile( R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :int = re.compile(R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Tuple = re.compile(R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :int = re.compile( R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Optional[int] = re.compile(R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Optional[Any] = re.compile(R'''conditioner_blocks.(\d*).cond.model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :int = re.compile( R'''conditioner_blocks.(\d*).cond.model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Tuple = re.compile(R'''conditioner_blocks.(\d*).cond.model.(\d*).(bias|weight)''' ) for original_key, value in state_dict.items(): # rename vqvae.encoder keys if re_encoder_block_conv_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = re_encoder_block_conv_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = regex_match.groups() UpperCamelCase :List[str] = int(groups[2] ) * 2 + int(groups[3] ) UpperCamelCase :List[Any] = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :int = re_encoder_block_conv_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_encoder_block_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_encoder_block_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[Any] = regex_match.groups() UpperCamelCase :Any = int(groups[2] ) * 2 + int(groups[3] ) UpperCamelCase :Any = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :str = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}.''' UpperCamelCase :List[str] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Union[str, Any] = prefix + resnet_block UpperCamelCase :str = re_encoder_block_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_encoder_block_proj_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[int] = re_encoder_block_proj_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = regex_match.groups() UpperCamelCase :int = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.proj_out.{groups[-1]}''' UpperCamelCase :str = re_encoder_block_proj_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # rename vqvae.decoder keys elif re_decoder_block_conv_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_decoder_block_conv_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = regex_match.groups() UpperCamelCase :str = int(groups[2] ) * 2 + int(groups[3] ) - 2 UpperCamelCase :List[Any] = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :Union[str, Any] = re_decoder_block_conv_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_decoder_block_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_decoder_block_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = regex_match.groups() UpperCamelCase :List[str] = int(groups[2] ) * 2 + int(groups[3] ) - 2 UpperCamelCase :Optional[int] = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :Any = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}.''' UpperCamelCase :Optional[int] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Any = prefix + resnet_block UpperCamelCase :Optional[int] = re_decoder_block_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_decoder_block_proj_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[int] = re_decoder_block_proj_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[Any] = regex_match.groups() UpperCamelCase :List[Any] = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.proj_in.{groups[-1]}''' UpperCamelCase :Any = re_decoder_block_proj_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # rename prior cond.model to upsampler.upsample_block and resnet elif re_prior_cond_conv_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_prior_cond_conv_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = regex_match.groups() UpperCamelCase :str = int(groups[1] ) * 2 + int(groups[2] ) - 2 UpperCamelCase :Tuple = F'''conditioner_blocks.upsampler.upsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :int = re_prior_cond_conv_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_prior_cond_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = re_prior_cond_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = regex_match.groups() UpperCamelCase :Optional[Any] = int(groups[1] ) * 2 + int(groups[2] ) - 2 UpperCamelCase :int = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :Tuple = F'''conditioner_blocks.upsampler.upsample_block.{block_index}.''' UpperCamelCase :List[Any] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Any = prefix + resnet_block UpperCamelCase :Dict = re_prior_cond_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_prior_cond_proj_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = re_prior_cond_proj_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = regex_match.groups() UpperCamelCase :Dict = F'''conditioner_blocks.upsampler.proj_in.{groups[-1]}''' UpperCamelCase :Any = re_prior_cond_proj_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # keep original key else: UpperCamelCase :List[str] = original_key UpperCamelCase :Any = replace_key(SCREAMING_SNAKE_CASE__ ) if F'''{key_prefix}.{key}''' not in model_state_dict or key is None: print(F'''failed converting {original_key} to {key}, does not match''' ) # handle missmatched shape elif value.shape != model_state_dict[F'''{key_prefix}.{key}'''].shape: UpperCamelCase :Union[str, Any] = model_state_dict[F'''{key_prefix}.{key}'''] print(F'''{original_key}-> {key} : \nshape {val.shape} and { value.shape}, do not match''' ) UpperCamelCase :List[Any] = original_key UpperCamelCase :Any = original_key UpperCamelCase :Optional[int] = value return new_dict @torch.no_grad() def _A ( SCREAMING_SNAKE_CASE__ : List[str]=None , SCREAMING_SNAKE_CASE__ : Dict=None ): for file in MODEL_MAPPING[model_name]: if not os.path.isfile(F'''{pytorch_dump_folder_path}/{file.split("/" )[-1]}''' ): UpperCamelCase :Dict = requests.get(F'''{PREFIX}{file}''' , allow_redirects=SCREAMING_SNAKE_CASE__ ) os.makedirs(F'''{pytorch_dump_folder_path}/''' , exist_ok=SCREAMING_SNAKE_CASE__ ) open(F'''{pytorch_dump_folder_path}/{file.split("/" )[-1]}''' , '''wb''' ).write(r.content ) UpperCamelCase :Optional[int] = MODEL_MAPPING[model_name.split('''/''' )[-1]] UpperCamelCase :Any = JukeboxConfig.from_pretrained(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = JukeboxModel(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = [] UpperCamelCase :List[Any] = {} for i, dict_name in enumerate(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = torch.load(F'''{pytorch_dump_folder_path}/{dict_name.split("/" )[-1]}''' )['''model'''] UpperCamelCase :Tuple = {} for k in old_dic.keys(): if k.endswith('''.b''' ): UpperCamelCase :Optional[int] = old_dic[k] elif k.endswith('''.w''' ): UpperCamelCase :Optional[Any] = old_dic[k] elif "level_2" not in dict_name and "cond.model." in k: UpperCamelCase :Optional[Any] = old_dic[k] else: UpperCamelCase :Any = old_dic[k] UpperCamelCase :Any = '''vqvae''' if i == 0 else F'''priors.{3 - i}''' UpperCamelCase :Dict = fix_jukebox_keys(SCREAMING_SNAKE_CASE__ , model.state_dict() , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) weight_dict.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = weight_dict.pop(0 ) model.vqvae.load_state_dict(SCREAMING_SNAKE_CASE__ ) for i in range(len(SCREAMING_SNAKE_CASE__ ) ): model.priors[i].load_state_dict(weight_dict[2 - i] ) Path(SCREAMING_SNAKE_CASE__ ).mkdir(exist_ok=SCREAMING_SNAKE_CASE__ ) with open(F'''{pytorch_dump_folder_path}/mapping.json''' , '''w''' ) as txtfile: json.dump(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) print(F'''Saving model {model_name} to {pytorch_dump_folder_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) return weight_dict if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--model_name""", default="""jukebox-5b-lyrics""", type=str, help="""Name of the model you'd like to convert.""", ) parser.add_argument( """--pytorch_dump_folder_path""", default="""jukebox-5b-lyrics-converted""", type=str, help="""Path to the output PyTorch model directory.""", ) __snake_case = parser.parse_args() convert_openai_checkpoint(args.model_name, args.pytorch_dump_folder_path)
259
1
import copy import os from typing import Union from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """BAAI/AltCLIP""": """https://huggingface.co/BAAI/AltCLIP/resolve/main/config.json""", # See all AltCLIP models at https://huggingface.co/models?filter=altclip } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[str] ='altclip_text_model' def __init__( self , SCREAMING_SNAKE_CASE_=25_0002 , SCREAMING_SNAKE_CASE_=1024 , SCREAMING_SNAKE_CASE_=24 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=4096 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=514 , SCREAMING_SNAKE_CASE_=1 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-05 , SCREAMING_SNAKE_CASE_=1 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_="absolute" , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=768 , **SCREAMING_SNAKE_CASE_ , ) -> int: super().__init__(pad_token_id=SCREAMING_SNAKE_CASE_ , bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = vocab_size UpperCamelCase :List[Any] = hidden_size UpperCamelCase :Optional[int] = num_hidden_layers UpperCamelCase :List[str] = num_attention_heads UpperCamelCase :List[Any] = hidden_act UpperCamelCase :List[Any] = intermediate_size UpperCamelCase :Any = hidden_dropout_prob UpperCamelCase :Any = attention_probs_dropout_prob UpperCamelCase :Any = max_position_embeddings UpperCamelCase :Optional[int] = type_vocab_size UpperCamelCase :Any = initializer_range UpperCamelCase :Dict = initializer_factor UpperCamelCase :Union[str, Any] = layer_norm_eps UpperCamelCase :List[str] = position_embedding_type UpperCamelCase :Any = use_cache UpperCamelCase :Any = project_dim class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Tuple ='altclip_vision_model' def __init__( self , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_=512 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=224 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_="quick_gelu" , SCREAMING_SNAKE_CASE_=1e-5 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1.0 , **SCREAMING_SNAKE_CASE_ , ) -> Union[str, Any]: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = hidden_size UpperCamelCase :Tuple = intermediate_size UpperCamelCase :Tuple = projection_dim UpperCamelCase :Any = num_hidden_layers UpperCamelCase :List[str] = num_attention_heads UpperCamelCase :Optional[Any] = num_channels UpperCamelCase :str = patch_size UpperCamelCase :Union[str, Any] = image_size UpperCamelCase :str = initializer_range UpperCamelCase :Any = initializer_factor UpperCamelCase :int = attention_dropout UpperCamelCase :Union[str, Any] = layer_norm_eps UpperCamelCase :List[Any] = hidden_act @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> "PretrainedConfig": cls._set_token_in_kwargs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Optional[int] = cls.get_config_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) # get the vision config dict if we are loading from AltCLIPConfig if config_dict.get('''model_type''' ) == "altclip": UpperCamelCase :Union[str, Any] = config_dict['''vision_config'''] if "model_type" in config_dict and hasattr(cls , '''model_type''' ) and config_dict["model_type"] != cls.model_type: logger.warning( F'''You are using a model of type {config_dict["model_type"]} to instantiate a model of type ''' F'''{cls.model_type}. This is not supported for all configurations of models and can yield errors.''' ) return cls.from_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[Any] ='altclip' UpperCamelCase_ : List[str] =True def __init__( self , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=2.6592 , **SCREAMING_SNAKE_CASE_ ) -> Any: # If `_config_dict` exist, we use them for the backward compatibility. # We pop out these 2 attributes before calling `super().__init__` to avoid them being saved (which causes a lot # of confusion!). UpperCamelCase :Any = kwargs.pop('''text_config_dict''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = kwargs.pop('''vision_config_dict''' , SCREAMING_SNAKE_CASE_ ) super().__init__(**SCREAMING_SNAKE_CASE_ ) # Instead of simply assigning `[text|vision]_config_dict` to `[text|vision]_config`, we use the values in # `[text|vision]_config_dict` to update the values in `[text|vision]_config`. The values should be same in most # cases, but we don't want to break anything regarding `_config_dict` that existed before commit `8827e1b2`. if text_config_dict is not None: if text_config is None: UpperCamelCase :Union[str, Any] = {} # This is the complete result when using `text_config_dict`. UpperCamelCase :List[Any] = AltCLIPTextConfig(**SCREAMING_SNAKE_CASE_ ).to_dict() # Give a warning if the values exist in both `_text_config_dict` and `text_config` but being different. for key, value in _text_config_dict.items(): if key in text_config and value != text_config[key] and key not in ["transformers_version"]: # If specified in `text_config_dict` if key in text_config_dict: UpperCamelCase :Union[str, Any] = ( F'''`{key}` is found in both `text_config_dict` and `text_config` but with different values. ''' F'''The value `text_config_dict["{key}"]` will be used instead.''' ) # If inferred from default argument values (just to be super careful) else: UpperCamelCase :str = ( F'''`text_config_dict` is provided which will be used to initialize `AltCLIPTextConfig`. The ''' F'''value `text_config["{key}"]` will be overriden.''' ) logger.warning(SCREAMING_SNAKE_CASE_ ) # Update all values in `text_config` with the ones in `_text_config_dict`. text_config.update(_text_config_dict ) if vision_config_dict is not None: if vision_config is None: UpperCamelCase :Optional[Any] = {} # This is the complete result when using `vision_config_dict`. UpperCamelCase :int = AltCLIPVisionConfig(**SCREAMING_SNAKE_CASE_ ).to_dict() # convert keys to string instead of integer if "id2label" in _vision_config_dict: UpperCamelCase :int = { str(SCREAMING_SNAKE_CASE_ ): value for key, value in _vision_config_dict['''id2label'''].items() } # Give a warning if the values exist in both `_vision_config_dict` and `vision_config` but being different. for key, value in _vision_config_dict.items(): if key in vision_config and value != vision_config[key] and key not in ["transformers_version"]: # If specified in `vision_config_dict` if key in vision_config_dict: UpperCamelCase :Union[str, Any] = ( F'''`{key}` is found in both `vision_config_dict` and `vision_config` but with different ''' F'''values. The value `vision_config_dict["{key}"]` will be used instead.''' ) # If inferred from default argument values (just to be super careful) else: UpperCamelCase :Optional[int] = ( F'''`vision_config_dict` is provided which will be used to initialize `AltCLIPVisionConfig`. ''' F'''The value `vision_config["{key}"]` will be overriden.''' ) logger.warning(SCREAMING_SNAKE_CASE_ ) # Update all values in `vision_config` with the ones in `_vision_config_dict`. vision_config.update(_vision_config_dict ) if text_config is None: UpperCamelCase :List[str] = {} logger.info('''`text_config` is `None`. Initializing the `AltCLIPTextConfig` with default values.''' ) if vision_config is None: UpperCamelCase :str = {} logger.info('''`vision_config` is `None`. initializing the `AltCLIPVisionConfig` with default values.''' ) UpperCamelCase :Optional[Any] = AltCLIPTextConfig(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = AltCLIPVisionConfig(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = projection_dim UpperCamelCase :Optional[int] = logit_scale_init_value UpperCamelCase :Any = 1.0 @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> Tuple: return cls(text_config=text_config.to_dict() , vision_config=vision_config.to_dict() , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> int: UpperCamelCase :Union[str, Any] = copy.deepcopy(self.__dict__ ) UpperCamelCase :List[Any] = self.text_config.to_dict() UpperCamelCase :List[str] = self.vision_config.to_dict() UpperCamelCase :Tuple = self.__class__.model_type return output
259
import json import os import shutil import tempfile import unittest import numpy as np import pytest from transformers import MgpstrTokenizer from transformers.models.mgp_str.tokenization_mgp_str import VOCAB_FILES_NAMES from transformers.testing_utils import require_torch, require_vision from transformers.utils import IMAGE_PROCESSOR_NAME, is_torch_available, is_vision_available if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import MgpstrProcessor, ViTImageProcessor @require_torch @require_vision class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Union[str, Any] =ViTImageProcessor if is_vision_available() else None @property def UpperCAmelCase ( self ) -> Dict: return self.image_processor_tester.prepare_image_processor_dict() def UpperCAmelCase ( self ) -> int: UpperCamelCase :Union[str, Any] = (3, 32, 128) UpperCamelCase :Any = tempfile.mkdtemp() # fmt: off UpperCamelCase :int = ['''[GO]''', '''[s]''', '''0''', '''1''', '''2''', '''3''', '''4''', '''5''', '''6''', '''7''', '''8''', '''9''', '''a''', '''b''', '''c''', '''d''', '''e''', '''f''', '''g''', '''h''', '''i''', '''j''', '''k''', '''l''', '''m''', '''n''', '''o''', '''p''', '''q''', '''r''', '''s''', '''t''', '''u''', '''v''', '''w''', '''x''', '''y''', '''z'''] # fmt: on UpperCamelCase :Optional[int] = dict(zip(SCREAMING_SNAKE_CASE_ , range(len(SCREAMING_SNAKE_CASE_ ) ) ) ) UpperCamelCase :Optional[int] = os.path.join(self.tmpdirname , VOCAB_FILES_NAMES['''vocab_file'''] ) with open(self.vocab_file , '''w''' , encoding='''utf-8''' ) as fp: fp.write(json.dumps(SCREAMING_SNAKE_CASE_ ) + '''\n''' ) UpperCamelCase :Tuple = { '''do_normalize''': False, '''do_resize''': True, '''image_processor_type''': '''ViTImageProcessor''', '''resample''': 3, '''size''': {'''height''': 32, '''width''': 128}, } UpperCamelCase :str = os.path.join(self.tmpdirname , SCREAMING_SNAKE_CASE_ ) with open(self.image_processor_file , '''w''' , encoding='''utf-8''' ) as fp: json.dump(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> int: return MgpstrTokenizer.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: return ViTImageProcessor.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: shutil.rmtree(self.tmpdirname ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = np.random.randint(255 , size=(3, 30, 400) , dtype=np.uinta ) UpperCamelCase :List[Any] = Image.fromarray(np.moveaxis(SCREAMING_SNAKE_CASE_ , 0 , -1 ) ) return image_input def UpperCAmelCase ( self ) -> str: UpperCamelCase :str = self.get_tokenizer() UpperCamelCase :Union[str, Any] = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Dict = MgpstrProcessor.from_pretrained(self.tmpdirname , use_fast=SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[int] = self.get_tokenizer() UpperCamelCase :Dict = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Optional[int] = self.get_tokenizer(bos_token='''(BOS)''' , eos_token='''(EOS)''' ) UpperCamelCase :Optional[Any] = self.get_image_processor(do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) UpperCamelCase :int = MgpstrProcessor.from_pretrained( self.tmpdirname , bos_token='''(BOS)''' , eos_token='''(EOS)''' , do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer_add_kwargs.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor_add_kwargs.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.get_image_processor() UpperCamelCase :List[str] = self.get_tokenizer() UpperCamelCase :str = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = self.prepare_image_inputs() UpperCamelCase :List[str] = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) UpperCamelCase :Optional[Any] = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) for key in input_image_proc.keys(): self.assertAlmostEqual(input_image_proc[key].sum() , input_processor[key].sum() , delta=1e-2 ) def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Union[str, Any] = self.get_tokenizer() UpperCamelCase :int = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = '''test''' UpperCamelCase :Optional[int] = processor(text=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = tokenizer(SCREAMING_SNAKE_CASE_ ) for key in encoded_tok.keys(): self.assertListEqual(encoded_tok[key] , encoded_processor[key] ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :List[str] = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = '''test''' UpperCamelCase :str = self.prepare_image_inputs() UpperCamelCase :Dict = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , ['''pixel_values''', '''labels'''] ) # test if it raises when no input is passed with pytest.raises(SCREAMING_SNAKE_CASE_ ): processor() def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Any = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = [[1, 4, 5, 8, 1, 0, 8], [3, 4, 3, 1, 1, 8, 9], [3, 4, 3, 1, 1, 8, 9]] UpperCamelCase :Union[str, Any] = processor.char_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = tokenizer.batch_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = [seq.replace(''' ''' , '''''' ) for seq in decoded_tok] self.assertListEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :List[Any] = self.get_image_processor() UpperCamelCase :Optional[Any] = self.get_tokenizer() UpperCamelCase :Any = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = None UpperCamelCase :List[Any] = self.prepare_image_inputs() UpperCamelCase :Union[str, Any] = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , processor.model_input_names ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Optional[int] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.randn(1 , 27 , 38 ) UpperCamelCase :Union[str, Any] = torch.randn(1 , 27 , 5_0257 ) UpperCamelCase :Optional[Any] = torch.randn(1 , 27 , 3_0522 ) UpperCamelCase :Optional[Any] = processor.batch_decode([char_input, bpe_input, wp_input] ) self.assertListEqual(list(results.keys() ) , ['''generated_text''', '''scores''', '''char_preds''', '''bpe_preds''', '''wp_preds'''] )
259
1
import argparse import os import re __snake_case = """src/transformers/models/auto""" # re pattern that matches mapping introductions: # SUPER_MODEL_MAPPING_NAMES = OrderedDict or SUPER_MODEL_MAPPING = OrderedDict __snake_case = re.compile(R"""[A-Z_]+_MAPPING(\s+|_[A-Z_]+\s+)=\s+OrderedDict""") # re pattern that matches identifiers in mappings __snake_case = re.compile(R"""\s*\(\s*\"(\S[^\"]+)\"""") def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : bool = False ): with open(SCREAMING_SNAKE_CASE__ , '''r''' , encoding='''utf-8''' ) as f: UpperCamelCase :Dict = f.read() UpperCamelCase :List[Any] = content.split('''\n''' ) UpperCamelCase :Tuple = [] UpperCamelCase :List[Any] = 0 while line_idx < len(SCREAMING_SNAKE_CASE__ ): if _re_intro_mapping.search(lines[line_idx] ) is not None: UpperCamelCase :Union[str, Any] = len(re.search(R'''^(\s*)\S''' , lines[line_idx] ).groups()[0] ) + 8 # Start of a new mapping! while not lines[line_idx].startswith(''' ''' * indent + '''(''' ): new_lines.append(lines[line_idx] ) line_idx += 1 UpperCamelCase :List[Any] = [] while lines[line_idx].strip() != "]": # Blocks either fit in one line or not if lines[line_idx].strip() == "(": UpperCamelCase :int = line_idx while not lines[line_idx].startswith(''' ''' * indent + ''')''' ): line_idx += 1 blocks.append('''\n'''.join(lines[start_idx : line_idx + 1] ) ) else: blocks.append(lines[line_idx] ) line_idx += 1 # Sort blocks by their identifiers UpperCamelCase :List[str] = sorted(SCREAMING_SNAKE_CASE__ , key=lambda SCREAMING_SNAKE_CASE__ : _re_identifier.search(SCREAMING_SNAKE_CASE__ ).groups()[0] ) new_lines += blocks else: new_lines.append(lines[line_idx] ) line_idx += 1 if overwrite: with open(SCREAMING_SNAKE_CASE__ , '''w''' , encoding='''utf-8''' ) as f: f.write('''\n'''.join(SCREAMING_SNAKE_CASE__ ) ) elif "\n".join(SCREAMING_SNAKE_CASE__ ) != content: return True def _A ( SCREAMING_SNAKE_CASE__ : bool = False ): UpperCamelCase :List[str] = [os.path.join(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) for f in os.listdir(SCREAMING_SNAKE_CASE__ ) if f.endswith('''.py''' )] UpperCamelCase :Any = [sort_auto_mapping(SCREAMING_SNAKE_CASE__ , overwrite=SCREAMING_SNAKE_CASE__ ) for fname in fnames] if not overwrite and any(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Any = [f for f, d in zip(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) if d] raise ValueError( F'''The following files have auto mappings that need sorting: {", ".join(SCREAMING_SNAKE_CASE__ )}. Run `make style` to fix''' ''' this.''' ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser() parser.add_argument("""--check_only""", action="""store_true""", help="""Whether to only check or fix style.""") __snake_case = parser.parse_args() sort_all_auto_mappings(not args.check_only)
259
import math def _A ( SCREAMING_SNAKE_CASE__ : int = 100 ): UpperCamelCase :Dict = sum(i * i for i in range(1 , n + 1 ) ) UpperCamelCase :List[str] = int(math.pow(sum(range(1 , n + 1 ) ) , 2 ) ) return square_of_sum - sum_of_squares if __name__ == "__main__": print(f'''{solution() = }''')
259
1
from collections.abc import Callable from math import pi, sqrt from random import uniform from statistics import mean def _A ( SCREAMING_SNAKE_CASE__ : int ): # A local function to see if a dot lands in the circle. def is_in_circle(SCREAMING_SNAKE_CASE__ : float , SCREAMING_SNAKE_CASE__ : float ) -> bool: UpperCamelCase :Any = sqrt((x**2) + (y**2) ) # Our circle has a radius of 1, so a distance # greater than 1 would land outside the circle. return distance_from_centre <= 1 # The proportion of guesses that landed in the circle UpperCamelCase :int = mean( int(is_in_circle(uniform(-1.0 , 1.0 ) , uniform(-1.0 , 1.0 ) ) ) for _ in range(SCREAMING_SNAKE_CASE__ ) ) # The ratio of the area for circle to square is pi/4. UpperCamelCase :List[str] = proportion * 4 print(F'''The estimated value of pi is {pi_estimate}''' ) print(F'''The numpy value of pi is {pi}''' ) print(F'''The total error is {abs(pi - pi_estimate )}''' ) def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Callable[[float], float] , SCREAMING_SNAKE_CASE__ : float = 0.0 , SCREAMING_SNAKE_CASE__ : float = 1.0 , ): return mean( function_to_integrate(uniform(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) ) for _ in range(SCREAMING_SNAKE_CASE__ ) ) * (max_value - min_value) def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : float = 0.0 , SCREAMING_SNAKE_CASE__ : float = 1.0 ): def identity_function(SCREAMING_SNAKE_CASE__ : float ) -> float: return x UpperCamelCase :Dict = area_under_curve_estimator( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = (max_value * max_value - min_value * min_value) / 2 print('''******************''' ) print(F'''Estimating area under y=x where x varies from {min_value} to {max_value}''' ) print(F'''Estimated value is {estimated_value}''' ) print(F'''Expected value is {expected_value}''' ) print(F'''Total error is {abs(estimated_value - expected_value )}''' ) print('''******************''' ) def _A ( SCREAMING_SNAKE_CASE__ : int ): def function_to_integrate(SCREAMING_SNAKE_CASE__ : float ) -> float: return sqrt(4.0 - x * x ) UpperCamelCase :Union[str, Any] = area_under_curve_estimator( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , 0.0 , 2.0 ) print('''******************''' ) print('''Estimating pi using area_under_curve_estimator''' ) print(F'''Estimated value is {estimated_value}''' ) print(F'''Expected value is {pi}''' ) print(F'''Total error is {abs(estimated_value - pi )}''' ) print('''******************''' ) if __name__ == "__main__": import doctest doctest.testmod()
259
def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Any = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = [[False for _ in range(m + 1 )] for _ in range(n + 1 )] UpperCamelCase :List[str] = True for i in range(SCREAMING_SNAKE_CASE__ ): for j in range(m + 1 ): if dp[i][j]: if j < m and a[i].upper() == b[j]: UpperCamelCase :List[Any] = True if a[i].islower(): UpperCamelCase :List[Any] = True return dp[n][m] if __name__ == "__main__": import doctest doctest.testmod()
259
1
import os def _A ( ): UpperCamelCase :Dict = os.path.dirname(os.path.realpath(SCREAMING_SNAKE_CASE__ ) ) UpperCamelCase :Optional[Any] = os.path.join(SCREAMING_SNAKE_CASE__ , '''triangle.txt''' ) with open(SCREAMING_SNAKE_CASE__ ) as f: UpperCamelCase :List[Any] = f.readlines() UpperCamelCase :Any = [] for line in triangle: UpperCamelCase :int = [] for number in line.strip().split(''' ''' ): numbers_from_line.append(int(SCREAMING_SNAKE_CASE__ ) ) a.append(SCREAMING_SNAKE_CASE__ ) for i in range(1 , len(SCREAMING_SNAKE_CASE__ ) ): for j in range(len(a[i] ) ): UpperCamelCase :Union[str, Any] = a[i - 1][j] if j != len(a[i - 1] ) else 0 UpperCamelCase :Any = a[i - 1][j - 1] if j > 0 else 0 a[i][j] += max(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) return max(a[-1] ) if __name__ == "__main__": print(solution())
259
from math import factorial __snake_case = {str(digit): factorial(digit) for digit in range(10)} def _A ( SCREAMING_SNAKE_CASE__ : int ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''Parameter number must be int''' ) if number < 0: raise ValueError('''Parameter number must be greater than or equal to 0''' ) # Converts number in string to iterate on its digits and adds its factorial. return sum(DIGIT_FACTORIAL[digit] for digit in str(SCREAMING_SNAKE_CASE__ ) ) def _A ( SCREAMING_SNAKE_CASE__ : int = 60 , SCREAMING_SNAKE_CASE__ : int = 1000000 ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) or not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''Parameters chain_length and number_limit must be int''' ) if chain_length <= 0 or number_limit <= 0: raise ValueError( '''Parameters chain_length and number_limit must be greater than 0''' ) # the counter for the chains with the exact desired length UpperCamelCase :Any = 0 # the cached sizes of the previous chains UpperCamelCase :dict[int, int] = {} for start_chain_element in range(1 , SCREAMING_SNAKE_CASE__ ): # The temporary set will contain the elements of the chain UpperCamelCase :List[Any] = set() UpperCamelCase :Any = 0 # Stop computing the chain when you find a cached size, a repeating item or the # length is greater then the desired one. UpperCamelCase :Optional[Any] = start_chain_element while ( chain_element not in chain_sets_lengths and chain_element not in chain_set and chain_set_length <= chain_length ): chain_set.add(SCREAMING_SNAKE_CASE__ ) chain_set_length += 1 UpperCamelCase :List[Any] = digit_factorial_sum(SCREAMING_SNAKE_CASE__ ) if chain_element in chain_sets_lengths: chain_set_length += chain_sets_lengths[chain_element] UpperCamelCase :Any = chain_set_length # If chain contains the exact amount of elements increase the counter if chain_set_length == chain_length: chains_counter += 1 return chains_counter if __name__ == "__main__": import doctest doctest.testmod() print(f'''{solution()}''')
259
1
import unittest from transformers import ( MODEL_FOR_OBJECT_DETECTION_MAPPING, AutoFeatureExtractor, AutoModelForObjectDetection, ObjectDetectionPipeline, is_vision_available, pipeline, ) from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_pytesseract, require_tf, require_timm, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY if is_vision_available(): from PIL import Image else: class UpperCAmelCase_ : """simple docstring""" @staticmethod def UpperCAmelCase ( *SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> Optional[int]: pass @is_pipeline_test @require_vision @require_timm @require_torch class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : str =MODEL_FOR_OBJECT_DETECTION_MAPPING def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[Any]: UpperCamelCase :List[str] = ObjectDetectionPipeline(model=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) return object_detector, ["./tests/fixtures/tests_samples/COCO/000000039769.png"] def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :str = object_detector('''./tests/fixtures/tests_samples/COCO/000000039769.png''' , threshold=0.0 ) self.assertGreater(len(SCREAMING_SNAKE_CASE_ ) , 0 ) for detected_object in outputs: self.assertEqual( SCREAMING_SNAKE_CASE_ , { '''score''': ANY(SCREAMING_SNAKE_CASE_ ), '''label''': ANY(SCREAMING_SNAKE_CASE_ ), '''box''': {'''xmin''': ANY(SCREAMING_SNAKE_CASE_ ), '''ymin''': ANY(SCREAMING_SNAKE_CASE_ ), '''xmax''': ANY(SCREAMING_SNAKE_CASE_ ), '''ymax''': ANY(SCREAMING_SNAKE_CASE_ )}, } , ) import datasets UpperCamelCase :int = datasets.load_dataset('''hf-internal-testing/fixtures_image_utils''' , '''image''' , split='''test''' ) UpperCamelCase :Optional[int] = [ Image.open('''./tests/fixtures/tests_samples/COCO/000000039769.png''' ), '''http://images.cocodataset.org/val2017/000000039769.jpg''', # RGBA dataset[0]['''file'''], # LA dataset[1]['''file'''], # L dataset[2]['''file'''], ] UpperCamelCase :Tuple = object_detector(SCREAMING_SNAKE_CASE_ , threshold=0.0 ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , len(SCREAMING_SNAKE_CASE_ ) ) for outputs in batch_outputs: self.assertGreater(len(SCREAMING_SNAKE_CASE_ ) , 0 ) for detected_object in outputs: self.assertEqual( SCREAMING_SNAKE_CASE_ , { '''score''': ANY(SCREAMING_SNAKE_CASE_ ), '''label''': ANY(SCREAMING_SNAKE_CASE_ ), '''box''': {'''xmin''': ANY(SCREAMING_SNAKE_CASE_ ), '''ymin''': ANY(SCREAMING_SNAKE_CASE_ ), '''xmax''': ANY(SCREAMING_SNAKE_CASE_ ), '''ymax''': ANY(SCREAMING_SNAKE_CASE_ )}, } , ) @require_tf @unittest.skip('''Object detection not implemented in TF''' ) def UpperCAmelCase ( self ) -> Optional[Any]: pass @require_torch def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :List[str] = '''hf-internal-testing/tiny-detr-mobilenetsv3''' UpperCamelCase :Optional[Any] = AutoModelForObjectDetection.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = AutoFeatureExtractor.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = ObjectDetectionPipeline(model=SCREAMING_SNAKE_CASE_ , feature_extractor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = object_detector('''http://images.cocodataset.org/val2017/000000039769.jpg''' , threshold=0.0 ) self.assertEqual( nested_simplify(SCREAMING_SNAKE_CASE_ , decimals=4 ) , [ {'''score''': 0.3376, '''label''': '''LABEL_0''', '''box''': {'''xmin''': 159, '''ymin''': 120, '''xmax''': 480, '''ymax''': 359}}, {'''score''': 0.3376, '''label''': '''LABEL_0''', '''box''': {'''xmin''': 159, '''ymin''': 120, '''xmax''': 480, '''ymax''': 359}}, ] , ) UpperCamelCase :List[str] = object_detector( [ '''http://images.cocodataset.org/val2017/000000039769.jpg''', '''http://images.cocodataset.org/val2017/000000039769.jpg''', ] , threshold=0.0 , ) self.assertEqual( nested_simplify(SCREAMING_SNAKE_CASE_ , decimals=4 ) , [ [ {'''score''': 0.3376, '''label''': '''LABEL_0''', '''box''': {'''xmin''': 159, '''ymin''': 120, '''xmax''': 480, '''ymax''': 359}}, {'''score''': 0.3376, '''label''': '''LABEL_0''', '''box''': {'''xmin''': 159, '''ymin''': 120, '''xmax''': 480, '''ymax''': 359}}, ], [ {'''score''': 0.3376, '''label''': '''LABEL_0''', '''box''': {'''xmin''': 159, '''ymin''': 120, '''xmax''': 480, '''ymax''': 359}}, {'''score''': 0.3376, '''label''': '''LABEL_0''', '''box''': {'''xmin''': 159, '''ymin''': 120, '''xmax''': 480, '''ymax''': 359}}, ], ] , ) @require_torch @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[Any] = '''facebook/detr-resnet-50''' UpperCamelCase :List[Any] = AutoModelForObjectDetection.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = AutoFeatureExtractor.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = ObjectDetectionPipeline(model=SCREAMING_SNAKE_CASE_ , feature_extractor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = object_detector('''http://images.cocodataset.org/val2017/000000039769.jpg''' ) self.assertEqual( nested_simplify(SCREAMING_SNAKE_CASE_ , decimals=4 ) , [ {'''score''': 0.9982, '''label''': '''remote''', '''box''': {'''xmin''': 40, '''ymin''': 70, '''xmax''': 175, '''ymax''': 117}}, {'''score''': 0.9960, '''label''': '''remote''', '''box''': {'''xmin''': 333, '''ymin''': 72, '''xmax''': 368, '''ymax''': 187}}, {'''score''': 0.9955, '''label''': '''couch''', '''box''': {'''xmin''': 0, '''ymin''': 1, '''xmax''': 639, '''ymax''': 473}}, {'''score''': 0.9988, '''label''': '''cat''', '''box''': {'''xmin''': 13, '''ymin''': 52, '''xmax''': 314, '''ymax''': 470}}, {'''score''': 0.9987, '''label''': '''cat''', '''box''': {'''xmin''': 345, '''ymin''': 23, '''xmax''': 640, '''ymax''': 368}}, ] , ) UpperCamelCase :List[Any] = object_detector( [ '''http://images.cocodataset.org/val2017/000000039769.jpg''', '''http://images.cocodataset.org/val2017/000000039769.jpg''', ] ) self.assertEqual( nested_simplify(SCREAMING_SNAKE_CASE_ , decimals=4 ) , [ [ {'''score''': 0.9982, '''label''': '''remote''', '''box''': {'''xmin''': 40, '''ymin''': 70, '''xmax''': 175, '''ymax''': 117}}, {'''score''': 0.9960, '''label''': '''remote''', '''box''': {'''xmin''': 333, '''ymin''': 72, '''xmax''': 368, '''ymax''': 187}}, {'''score''': 0.9955, '''label''': '''couch''', '''box''': {'''xmin''': 0, '''ymin''': 1, '''xmax''': 639, '''ymax''': 473}}, {'''score''': 0.9988, '''label''': '''cat''', '''box''': {'''xmin''': 13, '''ymin''': 52, '''xmax''': 314, '''ymax''': 470}}, {'''score''': 0.9987, '''label''': '''cat''', '''box''': {'''xmin''': 345, '''ymin''': 23, '''xmax''': 640, '''ymax''': 368}}, ], [ {'''score''': 0.9982, '''label''': '''remote''', '''box''': {'''xmin''': 40, '''ymin''': 70, '''xmax''': 175, '''ymax''': 117}}, {'''score''': 0.9960, '''label''': '''remote''', '''box''': {'''xmin''': 333, '''ymin''': 72, '''xmax''': 368, '''ymax''': 187}}, {'''score''': 0.9955, '''label''': '''couch''', '''box''': {'''xmin''': 0, '''ymin''': 1, '''xmax''': 639, '''ymax''': 473}}, {'''score''': 0.9988, '''label''': '''cat''', '''box''': {'''xmin''': 13, '''ymin''': 52, '''xmax''': 314, '''ymax''': 470}}, {'''score''': 0.9987, '''label''': '''cat''', '''box''': {'''xmin''': 345, '''ymin''': 23, '''xmax''': 640, '''ymax''': 368}}, ], ] , ) @require_torch @slow def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[int] = '''facebook/detr-resnet-50''' UpperCamelCase :Any = pipeline('''object-detection''' , model=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = object_detector('''http://images.cocodataset.org/val2017/000000039769.jpg''' ) self.assertEqual( nested_simplify(SCREAMING_SNAKE_CASE_ , decimals=4 ) , [ {'''score''': 0.9982, '''label''': '''remote''', '''box''': {'''xmin''': 40, '''ymin''': 70, '''xmax''': 175, '''ymax''': 117}}, {'''score''': 0.9960, '''label''': '''remote''', '''box''': {'''xmin''': 333, '''ymin''': 72, '''xmax''': 368, '''ymax''': 187}}, {'''score''': 0.9955, '''label''': '''couch''', '''box''': {'''xmin''': 0, '''ymin''': 1, '''xmax''': 639, '''ymax''': 473}}, {'''score''': 0.9988, '''label''': '''cat''', '''box''': {'''xmin''': 13, '''ymin''': 52, '''xmax''': 314, '''ymax''': 470}}, {'''score''': 0.9987, '''label''': '''cat''', '''box''': {'''xmin''': 345, '''ymin''': 23, '''xmax''': 640, '''ymax''': 368}}, ] , ) UpperCamelCase :str = object_detector( [ '''http://images.cocodataset.org/val2017/000000039769.jpg''', '''http://images.cocodataset.org/val2017/000000039769.jpg''', ] ) self.assertEqual( nested_simplify(SCREAMING_SNAKE_CASE_ , decimals=4 ) , [ [ {'''score''': 0.9982, '''label''': '''remote''', '''box''': {'''xmin''': 40, '''ymin''': 70, '''xmax''': 175, '''ymax''': 117}}, {'''score''': 0.9960, '''label''': '''remote''', '''box''': {'''xmin''': 333, '''ymin''': 72, '''xmax''': 368, '''ymax''': 187}}, {'''score''': 0.9955, '''label''': '''couch''', '''box''': {'''xmin''': 0, '''ymin''': 1, '''xmax''': 639, '''ymax''': 473}}, {'''score''': 0.9988, '''label''': '''cat''', '''box''': {'''xmin''': 13, '''ymin''': 52, '''xmax''': 314, '''ymax''': 470}}, {'''score''': 0.9987, '''label''': '''cat''', '''box''': {'''xmin''': 345, '''ymin''': 23, '''xmax''': 640, '''ymax''': 368}}, ], [ {'''score''': 0.9982, '''label''': '''remote''', '''box''': {'''xmin''': 40, '''ymin''': 70, '''xmax''': 175, '''ymax''': 117}}, {'''score''': 0.9960, '''label''': '''remote''', '''box''': {'''xmin''': 333, '''ymin''': 72, '''xmax''': 368, '''ymax''': 187}}, {'''score''': 0.9955, '''label''': '''couch''', '''box''': {'''xmin''': 0, '''ymin''': 1, '''xmax''': 639, '''ymax''': 473}}, {'''score''': 0.9988, '''label''': '''cat''', '''box''': {'''xmin''': 13, '''ymin''': 52, '''xmax''': 314, '''ymax''': 470}}, {'''score''': 0.9987, '''label''': '''cat''', '''box''': {'''xmin''': 345, '''ymin''': 23, '''xmax''': 640, '''ymax''': 368}}, ], ] , ) @require_torch @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Tuple = 0.9985 UpperCamelCase :Dict = '''facebook/detr-resnet-50''' UpperCamelCase :str = pipeline('''object-detection''' , model=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = object_detector('''http://images.cocodataset.org/val2017/000000039769.jpg''' , threshold=SCREAMING_SNAKE_CASE_ ) self.assertEqual( nested_simplify(SCREAMING_SNAKE_CASE_ , decimals=4 ) , [ {'''score''': 0.9988, '''label''': '''cat''', '''box''': {'''xmin''': 13, '''ymin''': 52, '''xmax''': 314, '''ymax''': 470}}, {'''score''': 0.9987, '''label''': '''cat''', '''box''': {'''xmin''': 345, '''ymin''': 23, '''xmax''': 640, '''ymax''': 368}}, ] , ) @require_torch @require_pytesseract @slow def UpperCAmelCase ( self ) -> int: UpperCamelCase :str = '''Narsil/layoutlmv3-finetuned-funsd''' UpperCamelCase :str = 0.9993 UpperCamelCase :Any = pipeline('''object-detection''' , model=SCREAMING_SNAKE_CASE_ , threshold=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = object_detector( '''https://huggingface.co/spaces/impira/docquery/resolve/2359223c1837a7587402bda0f2643382a6eefeab/invoice.png''' ) self.assertEqual( nested_simplify(SCREAMING_SNAKE_CASE_ , decimals=4 ) , [ {'''score''': 0.9993, '''label''': '''I-ANSWER''', '''box''': {'''xmin''': 294, '''ymin''': 254, '''xmax''': 343, '''ymax''': 264}}, {'''score''': 0.9993, '''label''': '''I-ANSWER''', '''box''': {'''xmin''': 294, '''ymin''': 254, '''xmax''': 343, '''ymax''': 264}}, ] , )
259
import unittest import numpy as np import torch from diffusers import DDIMPipeline, DDIMScheduler, UNetaDModel from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu, slow, torch_device from ..pipeline_params import UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS, UNCONDITIONAL_IMAGE_GENERATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin enable_full_determinism() class UpperCAmelCase_ ( lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : int =DDIMPipeline UpperCamelCase_ : str =UNCONDITIONAL_IMAGE_GENERATION_PARAMS UpperCamelCase_ : str =PipelineTesterMixin.required_optional_params - { 'num_images_per_prompt', 'latents', 'callback', 'callback_steps', } UpperCamelCase_ : Optional[Any] =UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS UpperCamelCase_ : List[str] =False def UpperCAmelCase ( self ) -> Any: torch.manual_seed(0 ) UpperCamelCase :Optional[int] = UNetaDModel( block_out_channels=(32, 64) , layers_per_block=2 , sample_size=32 , in_channels=3 , out_channels=3 , down_block_types=('''DownBlock2D''', '''AttnDownBlock2D''') , up_block_types=('''AttnUpBlock2D''', '''UpBlock2D''') , ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Any = {'''unet''': unet, '''scheduler''': scheduler} return components def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=0 ) -> Any: if str(SCREAMING_SNAKE_CASE_ ).startswith('''mps''' ): UpperCamelCase :List[Any] = torch.manual_seed(SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :List[Any] = torch.Generator(device=SCREAMING_SNAKE_CASE_ ).manual_seed(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = { '''batch_size''': 1, '''generator''': generator, '''num_inference_steps''': 2, '''output_type''': '''numpy''', } return inputs def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Optional[int] = '''cpu''' UpperCamelCase :Union[str, Any] = self.get_dummy_components() UpperCamelCase :Optional[Any] = self.pipeline_class(**SCREAMING_SNAKE_CASE_ ) pipe.to(SCREAMING_SNAKE_CASE_ ) pipe.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = self.get_dummy_inputs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = pipe(**SCREAMING_SNAKE_CASE_ ).images UpperCamelCase :str = image[0, -3:, -3:, -1] self.assertEqual(image.shape , (1, 32, 32, 3) ) UpperCamelCase :Tuple = np.array( [1.000e00, 5.717e-01, 4.717e-01, 1.000e00, 0.000e00, 1.000e00, 3.000e-04, 0.000e00, 9.000e-04] ) UpperCamelCase :List[str] = np.abs(image_slice.flatten() - expected_slice ).max() self.assertLessEqual(SCREAMING_SNAKE_CASE_ , 1e-3 ) def UpperCAmelCase ( self ) -> int: super().test_dict_tuple_outputs_equivalent(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Optional[int]: super().test_save_load_local(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Any: super().test_save_load_optional_components(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> str: super().test_inference_batch_single_identical(expected_max_diff=3e-3 ) @slow @require_torch_gpu class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :int = '''google/ddpm-cifar10-32''' UpperCamelCase :Union[str, Any] = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Tuple = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddim.to(SCREAMING_SNAKE_CASE_ ) ddim.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddim(generator=SCREAMING_SNAKE_CASE_ , eta=0.0 , output_type='''numpy''' ).images UpperCamelCase :int = image[0, -3:, -3:, -1] assert image.shape == (1, 32, 32, 3) UpperCamelCase :Tuple = np.array([0.1723, 0.1617, 0.1600, 0.1626, 0.1497, 0.1513, 0.1505, 0.1442, 0.1453] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2 def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = '''google/ddpm-ema-bedroom-256''' UpperCamelCase :Any = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = DDIMScheduler.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddpm.to(SCREAMING_SNAKE_CASE_ ) ddpm.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddpm(generator=SCREAMING_SNAKE_CASE_ , output_type='''numpy''' ).images UpperCamelCase :Optional[int] = image[0, -3:, -3:, -1] assert image.shape == (1, 256, 256, 3) UpperCamelCase :Dict = np.array([0.0060, 0.0201, 0.0344, 0.0024, 0.0018, 0.0002, 0.0022, 0.0000, 0.0069] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2
259
1
import argparse import torch from transformers import GPTaLMHeadModel, RobertaForMaskedLM if __name__ == "__main__": __snake_case = argparse.ArgumentParser( description=( """Extraction some layers of the full RobertaForMaskedLM or GPT2LMHeadModel for Transfer Learned""" """ Distillation""" ) ) parser.add_argument("""--model_type""", default="""roberta""", choices=["""roberta""", """gpt2"""]) parser.add_argument("""--model_name""", default="""roberta-large""", type=str) parser.add_argument("""--dump_checkpoint""", default="""serialization_dir/tf_roberta_048131723.pth""", type=str) parser.add_argument("""--vocab_transform""", action="""store_true""") __snake_case = parser.parse_args() if args.model_type == "roberta": __snake_case = RobertaForMaskedLM.from_pretrained(args.model_name) __snake_case = """roberta""" elif args.model_type == "gpt2": __snake_case = GPTaLMHeadModel.from_pretrained(args.model_name) __snake_case = """transformer""" __snake_case = model.state_dict() __snake_case = {} # Embeddings # if args.model_type == "gpt2": for param_name in ["wte.weight", "wpe.weight"]: __snake_case = state_dict[f'''{prefix}.{param_name}'''] else: for w in ["word_embeddings", "position_embeddings", "token_type_embeddings"]: __snake_case = f'''{prefix}.embeddings.{w}.weight''' __snake_case = state_dict[param_name] for w in ["weight", "bias"]: __snake_case = f'''{prefix}.embeddings.LayerNorm.{w}''' __snake_case = state_dict[param_name] # Transformer Blocks # __snake_case = 0 for teacher_idx in [0, 2, 4, 7, 9, 11]: if args.model_type == "gpt2": for layer in ["ln_1", "attn.c_attn", "attn.c_proj", "ln_2", "mlp.c_fc", "mlp.c_proj"]: for w in ["weight", "bias"]: __snake_case = state_dict[ f'''{prefix}.h.{teacher_idx}.{layer}.{w}''' ] __snake_case = state_dict[f'''{prefix}.h.{teacher_idx}.attn.bias'''] else: for layer in [ "attention.self.query", "attention.self.key", "attention.self.value", "attention.output.dense", "attention.output.LayerNorm", "intermediate.dense", "output.dense", "output.LayerNorm", ]: for w in ["weight", "bias"]: __snake_case = state_dict[ f'''{prefix}.encoder.layer.{teacher_idx}.{layer}.{w}''' ] std_idx += 1 # Language Modeling Head ###s if args.model_type == "roberta": for layer in ["lm_head.decoder.weight", "lm_head.bias"]: __snake_case = state_dict[f'''{layer}'''] if args.vocab_transform: for w in ["weight", "bias"]: __snake_case = state_dict[f'''lm_head.dense.{w}'''] __snake_case = state_dict[f'''lm_head.layer_norm.{w}'''] elif args.model_type == "gpt2": for w in ["weight", "bias"]: __snake_case = state_dict[f'''{prefix}.ln_f.{w}'''] __snake_case = state_dict["""lm_head.weight"""] print(f'''N layers selected for distillation: {std_idx}''') print(f'''Number of params transferred for distillation: {len(compressed_sd.keys())}''') print(f'''Save transferred checkpoint to {args.dump_checkpoint}.''') torch.save(compressed_sd, args.dump_checkpoint)
259
from json import JSONDecodeError # Workaround for requests.exceptions.JSONDecodeError import requests def _A ( SCREAMING_SNAKE_CASE__ : str = "isbn/0140328726" ): UpperCamelCase :Optional[int] = olid.strip().strip('''/''' ) # Remove leading/trailing whitespace & slashes if new_olid.count('''/''' ) != 1: UpperCamelCase :str = F'''{olid} is not a valid Open Library olid''' raise ValueError(SCREAMING_SNAKE_CASE__ ) return requests.get(F'''https://openlibrary.org/{new_olid}.json''' ).json() def _A ( SCREAMING_SNAKE_CASE__ : dict ): UpperCamelCase :str = { '''title''': '''Title''', '''publish_date''': '''Publish date''', '''authors''': '''Authors''', '''number_of_pages''': '''Number of pages:''', '''first_sentence''': '''First sentence''', '''isbn_10''': '''ISBN (10)''', '''isbn_13''': '''ISBN (13)''', } UpperCamelCase :Optional[Any] = {better_key: ol_book_data[key] for key, better_key in desired_keys.items()} UpperCamelCase :List[str] = [ get_openlibrary_data(author['''key'''] )['''name'''] for author in data['''Authors'''] ] UpperCamelCase :int = data['''First sentence''']['''value'''] for key, value in data.items(): if isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = ''', '''.join(SCREAMING_SNAKE_CASE__ ) return data if __name__ == "__main__": import doctest doctest.testmod() while True: __snake_case = input("""\nEnter the ISBN code to search (or 'quit' to stop): """).strip() if isbn.lower() in ("", "q", "quit", "exit", "stop"): break if len(isbn) not in (10, 13) or not isbn.isdigit(): print(f'''Sorry, {isbn} is not a valid ISBN. Please, input a valid ISBN.''') continue print(f'''\nSearching Open Library for ISBN: {isbn}...\n''') try: __snake_case = summarize_book(get_openlibrary_data(f'''isbn/{isbn}''')) print("""\n""".join(f'''{key}: {value}''' for key, value in book_summary.items())) except JSONDecodeError: # Workaround for requests.exceptions.RequestException: print(f'''Sorry, there are no results for ISBN: {isbn}.''')
259
1
import inspect from typing import List, Optional, Tuple, Union import numpy as np import PIL import torch import torch.utils.checkpoint from ...models import UNetaDModel, VQModel from ...schedulers import ( DDIMScheduler, DPMSolverMultistepScheduler, EulerAncestralDiscreteScheduler, EulerDiscreteScheduler, LMSDiscreteScheduler, PNDMScheduler, ) from ...utils import PIL_INTERPOLATION, randn_tensor from ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput def _A ( SCREAMING_SNAKE_CASE__ : Dict ): UpperCamelCase , UpperCamelCase :int = image.size UpperCamelCase , UpperCamelCase :int = (x - x % 32 for x in (w, h)) # resize to integer multiple of 32 UpperCamelCase :Dict = image.resize((w, h) , resample=PIL_INTERPOLATION['''lanczos'''] ) UpperCamelCase :Optional[Any] = np.array(SCREAMING_SNAKE_CASE__ ).astype(np.floataa ) / 2_55.0 UpperCamelCase :Union[str, Any] = image[None].transpose(0 , 3 , 1 , 2 ) UpperCamelCase :Optional[Any] = torch.from_numpy(SCREAMING_SNAKE_CASE__ ) return 2.0 * image - 1.0 class UpperCAmelCase_ ( lowercase ): """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , ) -> Optional[Any]: super().__init__() self.register_modules(vqvae=SCREAMING_SNAKE_CASE_ , unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) @torch.no_grad() def __call__( self , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , SCREAMING_SNAKE_CASE_ = 100 , SCREAMING_SNAKE_CASE_ = 0.0 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = "pil" , SCREAMING_SNAKE_CASE_ = True , ) -> Union[Tuple, ImagePipelineOutput]: if isinstance(SCREAMING_SNAKE_CASE_ , PIL.Image.Image ): UpperCamelCase :Optional[int] = 1 elif isinstance(SCREAMING_SNAKE_CASE_ , torch.Tensor ): UpperCamelCase :str = image.shape[0] else: raise ValueError(F'''`image` has to be of type `PIL.Image.Image` or `torch.Tensor` but is {type(SCREAMING_SNAKE_CASE_ )}''' ) if isinstance(SCREAMING_SNAKE_CASE_ , PIL.Image.Image ): UpperCamelCase :List[Any] = preprocess(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Dict = image.shape[-2:] # in_channels should be 6: 3 for latents, 3 for low resolution image UpperCamelCase :Any = (batch_size, self.unet.config.in_channels // 2, height, width) UpperCamelCase :Tuple = next(self.unet.parameters() ).dtype UpperCamelCase :Optional[int] = randn_tensor(SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , device=self.device , dtype=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = image.to(device=self.device , dtype=SCREAMING_SNAKE_CASE_ ) # set timesteps and move to the correct device self.scheduler.set_timesteps(SCREAMING_SNAKE_CASE_ , device=self.device ) UpperCamelCase :Any = self.scheduler.timesteps # scale the initial noise by the standard deviation required by the scheduler UpperCamelCase :Dict = latents * self.scheduler.init_noise_sigma # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature. # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 # and should be between [0, 1] UpperCamelCase :Any = '''eta''' in set(inspect.signature(self.scheduler.step ).parameters.keys() ) UpperCamelCase :Any = {} if accepts_eta: UpperCamelCase :List[Any] = eta for t in self.progress_bar(SCREAMING_SNAKE_CASE_ ): # concat latents and low resolution image in the channel dimension. UpperCamelCase :Union[str, Any] = torch.cat([latents, image] , dim=1 ) UpperCamelCase :Optional[int] = self.scheduler.scale_model_input(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # predict the noise residual UpperCamelCase :Dict = self.unet(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ).sample # compute the previous noisy sample x_t -> x_t-1 UpperCamelCase :str = self.scheduler.step(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ).prev_sample # decode the image latents with the VQVAE UpperCamelCase :int = self.vqvae.decode(SCREAMING_SNAKE_CASE_ ).sample UpperCamelCase :int = torch.clamp(SCREAMING_SNAKE_CASE_ , -1.0 , 1.0 ) UpperCamelCase :Optional[Any] = image / 2 + 0.5 UpperCamelCase :Union[str, Any] = image.cpu().permute(0 , 2 , 3 , 1 ).numpy() if output_type == "pil": UpperCamelCase :Optional[Any] = self.numpy_to_pil(SCREAMING_SNAKE_CASE_ ) if not return_dict: return (image,) return ImagePipelineOutput(images=SCREAMING_SNAKE_CASE_ )
259
import inspect import tempfile import unittest from huggingface_hub import hf_hub_download from transformers import is_torch_available from transformers.testing_utils import is_flaky, require_torch, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin __snake_case = 1E-4 if is_torch_available(): import torch from transformers import AutoformerConfig, AutoformerForPrediction, AutoformerModel from transformers.models.autoformer.modeling_autoformer import AutoformerDecoder, AutoformerEncoder @require_torch class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=14 , SCREAMING_SNAKE_CASE_=10 , SCREAMING_SNAKE_CASE_=19 , SCREAMING_SNAKE_CASE_=5 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=[1, 2, 3, 4, 5] , SCREAMING_SNAKE_CASE_=25 , SCREAMING_SNAKE_CASE_=5 , ) -> str: UpperCamelCase :Any = d_model UpperCamelCase :List[str] = parent UpperCamelCase :List[Any] = batch_size UpperCamelCase :str = prediction_length UpperCamelCase :str = context_length UpperCamelCase :int = cardinality UpperCamelCase :Optional[Any] = num_time_features UpperCamelCase :Optional[Any] = lags_sequence UpperCamelCase :str = embedding_dimension UpperCamelCase :str = is_training UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :Tuple = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :List[Any] = attention_probs_dropout_prob UpperCamelCase :Optional[int] = context_length UpperCamelCase :Tuple = prediction_length + label_length UpperCamelCase :Optional[Any] = label_length UpperCamelCase :Optional[int] = moving_average UpperCamelCase :Union[str, Any] = autocorrelation_factor def UpperCAmelCase ( self ) -> Optional[int]: return AutoformerConfig( d_model=self.d_model , encoder_layers=self.num_hidden_layers , decoder_layers=self.num_hidden_layers , encoder_attention_heads=self.num_attention_heads , decoder_attention_heads=self.num_attention_heads , encoder_ffn_dim=self.intermediate_size , decoder_ffn_dim=self.intermediate_size , dropout=self.hidden_dropout_prob , attention_dropout=self.attention_probs_dropout_prob , prediction_length=self.prediction_length , context_length=self.context_length , label_length=self.label_length , lags_sequence=self.lags_sequence , num_time_features=self.num_time_features , num_static_categorical_features=1 , cardinality=[self.cardinality] , embedding_dimension=[self.embedding_dimension] , moving_average=self.moving_average , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :Optional[Any] = config.context_length + max(config.lags_sequence ) UpperCamelCase :Union[str, Any] = ids_tensor([self.batch_size, 1] , config.cardinality[0] ) UpperCamelCase :List[str] = floats_tensor([self.batch_size, _past_length, config.num_time_features] ) UpperCamelCase :Union[str, Any] = floats_tensor([self.batch_size, _past_length] ) UpperCamelCase :Any = floats_tensor([self.batch_size, _past_length] ) > 0.5 # decoder inputs UpperCamelCase :Tuple = floats_tensor([self.batch_size, config.prediction_length, config.num_time_features] ) UpperCamelCase :int = floats_tensor([self.batch_size, config.prediction_length] ) UpperCamelCase :Union[str, Any] = { '''past_values''': past_values, '''static_categorical_features''': static_categorical_features, '''past_time_features''': past_time_features, '''past_observed_mask''': past_observed_mask, '''future_time_features''': future_time_features, '''future_values''': future_values, } return inputs_dict def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.get_config() UpperCamelCase :Union[str, Any] = self.prepare_autoformer_inputs_dict(SCREAMING_SNAKE_CASE_ ) return config, inputs_dict def UpperCAmelCase ( self ) -> Any: UpperCamelCase , UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() return config, inputs_dict def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[Any]: UpperCamelCase :int = AutoformerModel(config=SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ).eval() UpperCamelCase :Any = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = outputs.encoder_last_hidden_state UpperCamelCase :str = outputs.last_hidden_state with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Any = model.get_encoder() encoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = AutoformerEncoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = model.create_network_inputs(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Tuple = model.decomposition_layer(transformer_inputs[:, : config.context_length, ...] ) UpperCamelCase :Tuple = torch.cat( (transformer_inputs[:, : config.context_length, ...], feature[:, : config.context_length, ...]) , dim=-1 , ) UpperCamelCase :Optional[Any] = encoder(inputs_embeds=SCREAMING_SNAKE_CASE_ )[0] self.parent.assertTrue((encoder_last_hidden_state_a - encoder_last_hidden_state).abs().max().item() < 1e-3 ) UpperCamelCase :Optional[Any] = ( torch.mean(transformer_inputs[:, : config.context_length, ...] , dim=1 ) .unsqueeze(1 ) .repeat(1 , config.prediction_length , 1 ) ) UpperCamelCase :Union[str, Any] = torch.zeros( [transformer_inputs.shape[0], config.prediction_length, transformer_inputs.shape[2]] , device=enc_input.device , ) UpperCamelCase :Tuple = torch.cat( ( torch.cat((seasonal_input[:, -config.label_length :, ...], zeros) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) UpperCamelCase :Optional[Any] = torch.cat( ( torch.cat((trend_input[:, -config.label_length :, ...], mean) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Union[str, Any] = model.get_decoder() decoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = AutoformerDecoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = decoder( trend=SCREAMING_SNAKE_CASE_ , inputs_embeds=SCREAMING_SNAKE_CASE_ , encoder_hidden_states=SCREAMING_SNAKE_CASE_ , )[0] self.parent.assertTrue((last_hidden_state_a - last_hidden_state).abs().max().item() < 1e-3 ) @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[str] =(AutoformerModel, AutoformerForPrediction) if is_torch_available() else () UpperCamelCase_ : List[str] =(AutoformerForPrediction,) if is_torch_available() else () UpperCamelCase_ : Optional[Any] ={'feature-extraction': AutoformerModel} if is_torch_available() else {} UpperCamelCase_ : Any =False UpperCamelCase_ : List[str] =False UpperCamelCase_ : Dict =False UpperCamelCase_ : Dict =False UpperCamelCase_ : int =False UpperCamelCase_ : Optional[int] =False def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = AutoformerModelTester(self ) UpperCamelCase :int = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase , UpperCamelCase :str = self.model_tester.prepare_config_and_inputs() for model_class in self.all_model_classes: UpperCamelCase :Optional[int] = model_class(SCREAMING_SNAKE_CASE_ ) with tempfile.TemporaryDirectory() as tmpdirname: model.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :List[str] = model_class.from_pretrained(SCREAMING_SNAKE_CASE_ , output_loading_info=SCREAMING_SNAKE_CASE_ ) self.assertEqual(info['''missing_keys'''] , [] ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_encoder_decoder_model_standalone(*SCREAMING_SNAKE_CASE_ ) @unittest.skip(reason='''Model has no tokens embeddings''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = inspect.signature(getattr(SCREAMING_SNAKE_CASE_ , '''forward''' ) ) # The main input is the name of the argument after `self` UpperCamelCase :List[str] = list(model_signature.parameters.keys() )[1] self.assertEqual(AutoformerModel.main_input_name , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase , UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Optional[Any] = [ '''past_values''', '''past_time_features''', '''past_observed_mask''', '''static_categorical_features''', '''static_real_features''', '''future_values''', '''future_time_features''', ] if model.__class__.__name__ in ["AutoformerForPrediction"]: expected_arg_names.append('''future_observed_mask''' ) expected_arg_names.extend( [ '''decoder_attention_mask''', '''head_mask''', '''decoder_head_mask''', '''cross_attn_head_mask''', '''encoder_outputs''', '''past_key_values''', '''output_hidden_states''', '''output_attentions''', '''use_cache''', '''return_dict''', ] ) self.assertListEqual(arg_names[: len(SCREAMING_SNAKE_CASE_ )] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :List[Any] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = True UpperCamelCase :Dict = getattr(self.model_tester , '''seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = getattr(self.model_tester , '''decoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = getattr(self.model_tester , '''encoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = getattr(self.model_tester , '''d_model''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = getattr(self.model_tester , '''num_attention_heads''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = d_model // num_attention_heads for model_class in self.all_model_classes: UpperCamelCase :Tuple = True UpperCamelCase :Tuple = False UpperCamelCase :Any = True UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :int = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) # check that output_attentions also work using config del inputs_dict["output_attentions"] UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) UpperCamelCase :List[str] = len(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = 7 if "last_hidden_state" in outputs: correct_outlen += 1 if "trend" in outputs: correct_outlen += 1 if "past_key_values" in outputs: correct_outlen += 1 # past_key_values have been returned if "loss" in outputs: correct_outlen += 1 if "params" in outputs: correct_outlen += 1 self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # decoder attentions UpperCamelCase :Union[str, Any] = outputs.decoder_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(decoder_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # cross attentions UpperCamelCase :Union[str, Any] = outputs.cross_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(cross_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # Check attention is always last and order is fine UpperCamelCase :Any = True UpperCamelCase :int = True UpperCamelCase :Any = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) self.assertEqual(out_len + 2 , len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(self_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) @is_flaky() def UpperCAmelCase ( self ) -> List[Any]: super().test_retain_grad_hidden_states_attentions() def _A ( SCREAMING_SNAKE_CASE__ : int="train-batch.pt" ): UpperCamelCase :Union[str, Any] = hf_hub_download(repo_id='''hf-internal-testing/tourism-monthly-batch''' , filename=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) UpperCamelCase :Tuple = torch.load(SCREAMING_SNAKE_CASE__ , map_location=SCREAMING_SNAKE_CASE__ ) return batch @require_torch @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :int = AutoformerModel.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = prepare_batch() with torch.no_grad(): UpperCamelCase :Optional[Any] = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , future_values=batch['''future_values'''] , future_time_features=batch['''future_time_features'''] , )[0] UpperCamelCase :Union[str, Any] = torch.Size( (64, model.config.prediction_length + model.config.label_length, model.config.feature_size) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = torch.tensor( [[0.3593, -1.3398, 0.6330], [0.2279, 1.5396, -0.1792], [0.0450, 1.3225, -0.2335]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Dict = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , ).encoder_last_hidden_state UpperCamelCase :Union[str, Any] = torch.Size((64, model.config.context_length, model.config.d_model) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = torch.tensor( [[-0.0734, -0.9036, 0.8358], [4.7186, 2.4113, 1.9581], [1.7953, 2.3558, 1.2970]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Tuple = model.generate( static_categorical_features=batch['''static_categorical_features'''] , past_time_features=batch['''past_time_features'''] , past_values=batch['''past_values'''] , future_time_features=batch['''future_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , ) UpperCamelCase :Optional[int] = torch.Size((64, model.config.num_parallel_samples, model.config.prediction_length) ) self.assertEqual(outputs.sequences.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor([3130.6763, 4056.5293, 7053.0786] , device=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = outputs.sequences.mean(dim=1 ) self.assertTrue(torch.allclose(mean_prediction[0, -3:] , SCREAMING_SNAKE_CASE_ , rtol=1e-1 ) )
259
1
import logging import os import sys from dataclasses import dataclass, field from typing import Optional import evaluate import numpy as np import torch from datasets import load_dataset from PIL import Image from torchvision.transforms import ( CenterCrop, Compose, Normalize, RandomHorizontalFlip, RandomResizedCrop, Resize, ToTensor, ) import transformers from transformers import ( MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING, AutoConfig, AutoImageProcessor, AutoModelForImageClassification, HfArgumentParser, Trainer, TrainingArguments, set_seed, ) from transformers.trainer_utils import get_last_checkpoint from transformers.utils import check_min_version, send_example_telemetry from transformers.utils.versions import require_version __snake_case = logging.getLogger(__name__) # Will error if the minimal version of Transformers is not installed. Remove at your own risks. check_min_version("""4.31.0""") require_version("""datasets>=1.8.0""", """To fix: pip install -r examples/pytorch/image-classification/requirements.txt""") __snake_case = list(MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING.keys()) __snake_case = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES) def _A ( SCREAMING_SNAKE_CASE__ : str ): with open(SCREAMING_SNAKE_CASE__ , '''rb''' ) as f: UpperCamelCase :List[str] = Image.open(SCREAMING_SNAKE_CASE__ ) return im.convert('''RGB''' ) @dataclass class UpperCAmelCase_ : """simple docstring""" UpperCamelCase_ : Optional[str] =field( default=lowercase, metadata={ 'help': 'Name of a dataset from the hub (could be your own, possibly private dataset hosted on the hub).' }, ) UpperCamelCase_ : Optional[str] =field( default=lowercase, metadata={'help': 'The configuration name of the dataset to use (via the datasets library).'} ) UpperCamelCase_ : Optional[str] =field(default=lowercase, metadata={'help': 'A folder containing the training data.'} ) UpperCamelCase_ : Optional[str] =field(default=lowercase, metadata={'help': 'A folder containing the validation data.'} ) UpperCamelCase_ : Optional[float] =field( default=0.15, metadata={'help': 'Percent to split off of train for validation.'} ) UpperCamelCase_ : Optional[int] =field( default=lowercase, metadata={ 'help': ( 'For debugging purposes or quicker training, truncate the number of training examples to this ' 'value if set.' ) }, ) UpperCamelCase_ : Optional[int] =field( default=lowercase, metadata={ 'help': ( 'For debugging purposes or quicker training, truncate the number of evaluation examples to this ' 'value if set.' ) }, ) def UpperCAmelCase ( self ) -> int: if self.dataset_name is None and (self.train_dir is None and self.validation_dir is None): raise ValueError( '''You must specify either a dataset name from the hub or a train and/or validation directory.''' ) @dataclass class UpperCAmelCase_ : """simple docstring""" UpperCamelCase_ : str =field( default='google/vit-base-patch16-224-in21k', metadata={'help': 'Path to pretrained model or model identifier from huggingface.co/models'}, ) UpperCamelCase_ : Optional[str] =field( default=lowercase, metadata={'help': 'If training from scratch, pass a model type from the list: ' + ', '.join(lowercase )}, ) UpperCamelCase_ : Optional[str] =field( default=lowercase, metadata={'help': 'Pretrained config name or path if not the same as model_name'} ) UpperCamelCase_ : Optional[str] =field( default=lowercase, metadata={'help': 'Where do you want to store the pretrained models downloaded from s3'} ) UpperCamelCase_ : str =field( default='main', metadata={'help': 'The specific model version to use (can be a branch name, tag name or commit id).'}, ) UpperCamelCase_ : str =field(default=lowercase, metadata={'help': 'Name or path of preprocessor config.'} ) UpperCamelCase_ : bool =field( default=lowercase, metadata={ 'help': ( 'Will use the token generated when running `huggingface-cli login` (necessary to use this script ' 'with private models).' ) }, ) UpperCamelCase_ : bool =field( default=lowercase, metadata={'help': 'Will enable to load a pretrained model whose head dimensions are different.'}, ) def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :Dict = torch.stack([example['''pixel_values'''] for example in examples] ) UpperCamelCase :Optional[int] = torch.tensor([example['''labels'''] for example in examples] ) return {"pixel_values": pixel_values, "labels": labels} def _A ( ): # See all possible arguments in src/transformers/training_args.py # or by passing the --help flag to this script. # We now keep distinct sets of args, for a cleaner separation of concerns. UpperCamelCase :int = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments) ) if len(sys.argv ) == 2 and sys.argv[1].endswith('''.json''' ): # If we pass only one argument to the script and it's the path to a json file, # let's parse it to get our arguments. UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1] ) ) else: UpperCamelCase , UpperCamelCase , UpperCamelCase :str = parser.parse_args_into_dataclasses() # Sending telemetry. Tracking the example usage helps us better allocate resources to maintain them. The # information sent is the one passed as arguments along with your Python/PyTorch versions. send_example_telemetry('''run_image_classification''' , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Setup logging logging.basicConfig( format='''%(asctime)s - %(levelname)s - %(name)s - %(message)s''' , datefmt='''%m/%d/%Y %H:%M:%S''' , handlers=[logging.StreamHandler(sys.stdout )] , ) if training_args.should_log: # The default of training_args.log_level is passive, so we set log level at info here to have that default. transformers.utils.logging.set_verbosity_info() UpperCamelCase :Any = training_args.get_process_log_level() logger.setLevel(SCREAMING_SNAKE_CASE__ ) transformers.utils.logging.set_verbosity(SCREAMING_SNAKE_CASE__ ) transformers.utils.logging.enable_default_handler() transformers.utils.logging.enable_explicit_format() # Log on each process the small summary: logger.warning( F'''Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}''' + F'''distributed training: {bool(training_args.local_rank != -1 )}, 16-bits training: {training_args.fpaa}''' ) logger.info(F'''Training/evaluation parameters {training_args}''' ) # Detecting last checkpoint. UpperCamelCase :Dict = None if os.path.isdir(training_args.output_dir ) and training_args.do_train and not training_args.overwrite_output_dir: UpperCamelCase :Any = get_last_checkpoint(training_args.output_dir ) if last_checkpoint is None and len(os.listdir(training_args.output_dir ) ) > 0: raise ValueError( F'''Output directory ({training_args.output_dir}) already exists and is not empty. ''' '''Use --overwrite_output_dir to overcome.''' ) elif last_checkpoint is not None and training_args.resume_from_checkpoint is None: logger.info( F'''Checkpoint detected, resuming training at {last_checkpoint}. To avoid this behavior, change ''' '''the `--output_dir` or add `--overwrite_output_dir` to train from scratch.''' ) # Set seed before initializing model. set_seed(training_args.seed ) # Initialize our dataset and prepare it for the 'image-classification' task. if data_args.dataset_name is not None: UpperCamelCase :int = load_dataset( data_args.dataset_name , data_args.dataset_config_name , cache_dir=model_args.cache_dir , task='''image-classification''' , use_auth_token=True if model_args.use_auth_token else None , ) else: UpperCamelCase :List[Any] = {} if data_args.train_dir is not None: UpperCamelCase :Union[str, Any] = os.path.join(data_args.train_dir , '''**''' ) if data_args.validation_dir is not None: UpperCamelCase :Union[str, Any] = os.path.join(data_args.validation_dir , '''**''' ) UpperCamelCase :Tuple = load_dataset( '''imagefolder''' , data_files=SCREAMING_SNAKE_CASE__ , cache_dir=model_args.cache_dir , task='''image-classification''' , ) # If we don't have a validation split, split off a percentage of train as validation. UpperCamelCase :Dict = None if '''validation''' in dataset.keys() else data_args.train_val_split if isinstance(data_args.train_val_split , SCREAMING_SNAKE_CASE__ ) and data_args.train_val_split > 0.0: UpperCamelCase :Union[str, Any] = dataset['''train'''].train_test_split(data_args.train_val_split ) UpperCamelCase :Dict = split['''train'''] UpperCamelCase :List[Any] = split['''test'''] # Prepare label mappings. # We'll include these in the model's config to get human readable labels in the Inference API. UpperCamelCase :Any = dataset['''train'''].features['''labels'''].names UpperCamelCase , UpperCamelCase :List[str] = {}, {} for i, label in enumerate(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[Any] = str(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = label # Load the accuracy metric from the datasets package UpperCamelCase :Optional[Any] = evaluate.load('''accuracy''' ) # Define our compute_metrics function. It takes an `EvalPrediction` object (a namedtuple with a # predictions and label_ids field) and has to return a dictionary string to float. def compute_metrics(SCREAMING_SNAKE_CASE__ : Dict ): return metric.compute(predictions=np.argmax(p.predictions , axis=1 ) , references=p.label_ids ) UpperCamelCase :Dict = AutoConfig.from_pretrained( model_args.config_name or model_args.model_name_or_path , num_labels=len(SCREAMING_SNAKE_CASE__ ) , labelaid=SCREAMING_SNAKE_CASE__ , idalabel=SCREAMING_SNAKE_CASE__ , finetuning_task='''image-classification''' , cache_dir=model_args.cache_dir , revision=model_args.model_revision , use_auth_token=True if model_args.use_auth_token else None , ) UpperCamelCase :Optional[Any] = AutoModelForImageClassification.from_pretrained( model_args.model_name_or_path , from_tf=bool('''.ckpt''' in model_args.model_name_or_path ) , config=SCREAMING_SNAKE_CASE__ , cache_dir=model_args.cache_dir , revision=model_args.model_revision , use_auth_token=True if model_args.use_auth_token else None , ignore_mismatched_sizes=model_args.ignore_mismatched_sizes , ) UpperCamelCase :int = AutoImageProcessor.from_pretrained( model_args.image_processor_name or model_args.model_name_or_path , cache_dir=model_args.cache_dir , revision=model_args.model_revision , use_auth_token=True if model_args.use_auth_token else None , ) # Define torchvision transforms to be applied to each image. if "shortest_edge" in image_processor.size: UpperCamelCase :Tuple = image_processor.size['''shortest_edge'''] else: UpperCamelCase :str = (image_processor.size['''height'''], image_processor.size['''width''']) UpperCamelCase :Tuple = Normalize(mean=image_processor.image_mean , std=image_processor.image_std ) UpperCamelCase :Optional[int] = Compose( [ RandomResizedCrop(SCREAMING_SNAKE_CASE__ ), RandomHorizontalFlip(), ToTensor(), normalize, ] ) UpperCamelCase :str = Compose( [ Resize(SCREAMING_SNAKE_CASE__ ), CenterCrop(SCREAMING_SNAKE_CASE__ ), ToTensor(), normalize, ] ) def train_transforms(SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :str = [ _train_transforms(pil_img.convert('''RGB''' ) ) for pil_img in example_batch['''image'''] ] return example_batch def val_transforms(SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Any = [_val_transforms(pil_img.convert('''RGB''' ) ) for pil_img in example_batch['''image''']] return example_batch if training_args.do_train: if "train" not in dataset: raise ValueError('''--do_train requires a train dataset''' ) if data_args.max_train_samples is not None: UpperCamelCase :List[Any] = ( dataset['''train'''].shuffle(seed=training_args.seed ).select(range(data_args.max_train_samples ) ) ) # Set the training transforms dataset["train"].set_transform(SCREAMING_SNAKE_CASE__ ) if training_args.do_eval: if "validation" not in dataset: raise ValueError('''--do_eval requires a validation dataset''' ) if data_args.max_eval_samples is not None: UpperCamelCase :Tuple = ( dataset['''validation'''].shuffle(seed=training_args.seed ).select(range(data_args.max_eval_samples ) ) ) # Set the validation transforms dataset["validation"].set_transform(SCREAMING_SNAKE_CASE__ ) # Initalize our trainer UpperCamelCase :int = Trainer( model=SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , train_dataset=dataset['''train'''] if training_args.do_train else None , eval_dataset=dataset['''validation'''] if training_args.do_eval else None , compute_metrics=SCREAMING_SNAKE_CASE__ , tokenizer=SCREAMING_SNAKE_CASE__ , data_collator=SCREAMING_SNAKE_CASE__ , ) # Training if training_args.do_train: UpperCamelCase :Any = None if training_args.resume_from_checkpoint is not None: UpperCamelCase :Optional[Any] = training_args.resume_from_checkpoint elif last_checkpoint is not None: UpperCamelCase :Union[str, Any] = last_checkpoint UpperCamelCase :int = trainer.train(resume_from_checkpoint=SCREAMING_SNAKE_CASE__ ) trainer.save_model() trainer.log_metrics('''train''' , train_result.metrics ) trainer.save_metrics('''train''' , train_result.metrics ) trainer.save_state() # Evaluation if training_args.do_eval: UpperCamelCase :List[Any] = trainer.evaluate() trainer.log_metrics('''eval''' , SCREAMING_SNAKE_CASE__ ) trainer.save_metrics('''eval''' , SCREAMING_SNAKE_CASE__ ) # Write model card and (optionally) push to hub UpperCamelCase :int = { '''finetuned_from''': model_args.model_name_or_path, '''tasks''': '''image-classification''', '''dataset''': data_args.dataset_name, '''tags''': ['''image-classification''', '''vision'''], } if training_args.push_to_hub: trainer.push_to_hub(**SCREAMING_SNAKE_CASE__ ) else: trainer.create_model_card(**SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": main()
259
import inspect import logging import os import random import shutil import tempfile import unittest import pytest import torch from torch import nn from torch.utils.data import DataLoader, TensorDataset from accelerate import Accelerator from accelerate.test_utils import execute_subprocess_async, require_cuda from accelerate.utils import ProjectConfiguration, set_seed __snake_case = logging.getLogger(__name__) def _A ( SCREAMING_SNAKE_CASE__ : Dict=2 , SCREAMING_SNAKE_CASE__ : Dict=3 , SCREAMING_SNAKE_CASE__ : Any=16 , SCREAMING_SNAKE_CASE__ : int = 10 , SCREAMING_SNAKE_CASE__ : int = 2 ): def get_dataset(SCREAMING_SNAKE_CASE__ : List[Any] ): UpperCamelCase :Union[str, Any] = torch.randn(batch_size * n_batches , 1 ) return TensorDataset(SCREAMING_SNAKE_CASE__ , a * x + b + 0.1 * torch.randn(batch_size * n_batches , 1 ) ) UpperCamelCase :str = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) return (train_dataloader, valid_dataloader) def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Any=None ): UpperCamelCase :Dict = [] for epoch in range(SCREAMING_SNAKE_CASE__ ): # Train quickly model.train() for batch in dataloader: UpperCamelCase , UpperCamelCase :Optional[Any] = batch UpperCamelCase :int = model(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = torch.nn.functional.mse_loss(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) accelerator.backward(SCREAMING_SNAKE_CASE__ ) optimizer.step() optimizer.zero_grad() rands.append(random.random() ) # Introduce some randomness if scheduler is not None: scheduler.step() return rands class UpperCAmelCase_ ( nn.Module ): """simple docstring""" def __init__( self ) -> str: super().__init__() UpperCamelCase :Optional[int] = nn.Parameter(torch.randn(1 ) ) UpperCamelCase :int = nn.Parameter(torch.randn(1 ) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> int: return x * self.a + self.b class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Dict: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Tuple = ProjectConfiguration(total_limit=1 , project_dir=SCREAMING_SNAKE_CASE_ , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Dict = Accelerator(project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Union[str, Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() # Save second state accelerator.save_state() self.assertEqual(len(os.listdir(accelerator.project_dir ) ) , 1 ) def UpperCAmelCase ( self ) -> str: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[str] = DummyModel() UpperCamelCase :Union[str, Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Dict = dummy_dataloaders() # Train baseline UpperCamelCase :Dict = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial UpperCamelCase :int = os.path.join(SCREAMING_SNAKE_CASE_ , '''initial''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[int] = optimizer.state_dict() UpperCamelCase :Optional[int] = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Any = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :List[Any] = dummy_dataloaders() UpperCamelCase :List[str] = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Tuple = model.a.item(), model.b.item() UpperCamelCase :Tuple = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything UpperCamelCase :Optional[int] = os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoint''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) # Load everything back in and make sure all states work accelerator.load_state(SCREAMING_SNAKE_CASE_ ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Union[str, Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :Optional[int] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :int = dummy_dataloaders() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() ((UpperCamelCase) , (UpperCamelCase)) :List[str] = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() UpperCamelCase :Any = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[int] = model.a.item(), model.b.item() UpperCamelCase :Any = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Union[str, Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Optional[Any] = ProjectConfiguration(iteration=1 , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything accelerator.save_state() # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_1''' ) ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :str = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[Any] = torch.tensor([1, 2, 3] ) UpperCamelCase :Any = torch.tensor([2, 3, 4] ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :Optional[Any] = torch.optim.Adam(net.parameters() ) UpperCamelCase :Optional[Any] = Accelerator() with self.assertRaises(SCREAMING_SNAKE_CASE_ ) as ve: accelerator.register_for_checkpointing(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = str(ve.exception ) self.assertTrue('''Item at index 0''' in message ) self.assertTrue('''Item at index 1''' in message ) self.assertFalse('''Item at index 2''' in message ) self.assertFalse('''Item at index 3''' in message ) def UpperCAmelCase ( self ) -> Any: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :List[str] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase :Any = torch.optim.lr_scheduler.StepLR(SCREAMING_SNAKE_CASE_ , step_size=1 , gamma=0.99 ) UpperCamelCase , UpperCamelCase :Any = dummy_dataloaders() UpperCamelCase :Optional[int] = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :str = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() UpperCamelCase :int = scheduler.state_dict() train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertNotEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) self.assertEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) def UpperCAmelCase ( self ) -> Union[str, Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ , total_limit=2 ) # Train baseline UpperCamelCase :Tuple = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = accelerator.prepare(SCREAMING_SNAKE_CASE_ ) # Save 3 states: for _ in range(11 ): accelerator.save_state() self.assertTrue(not os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_9''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_10''' ) ) ) @require_cuda def UpperCAmelCase ( self ) -> int: UpperCamelCase :int = ['''torchrun''', F'''--nproc_per_node={torch.cuda.device_count()}''', inspect.getfile(self.__class__ )] execute_subprocess_async(SCREAMING_SNAKE_CASE_ , env=os.environ.copy() ) if __name__ == "__main__": __snake_case = """/tmp/accelerate/state_checkpointing""" __snake_case = DummyModel() __snake_case = torch.optim.Adam(params=model.parameters(), lr=1E-3) __snake_case = torch.optim.lr_scheduler.StepLR(optimizer, step_size=1, gamma=0.9_9) __snake_case , __snake_case = dummy_dataloaders() __snake_case = ProjectConfiguration(automatic_checkpoint_naming=True) # Train baseline __snake_case = Accelerator(project_dir=savedir, project_config=project_config, mixed_precision="""no""") if accelerator.process_index == 0: if os.path.exists(savedir): shutil.rmtree(savedir) os.makedirs(savedir) __snake_case , __snake_case , __snake_case , __snake_case , __snake_case = accelerator.prepare( model, optimizer, train_dataloader, valid_dataloader, scheduler ) __snake_case , __snake_case = accelerator.prepare(model, optimizer) train(3, model, train_dataloader, optimizer, accelerator, scheduler) # Check that the intial optimizer is loaded on the GPU for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert param_device.type == accelerator.device.type __snake_case = model.cpu() accelerator.wait_for_everyone() accelerator.save_state() accelerator.wait_for_everyone() # Check CPU state accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""cpu""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == torch.device("""cpu""").type ), f"Loaded optimizer states did not match, expected to be loaded on the CPU but got {param_device}" # Check device state model.to(accelerator.device) accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""on_device""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == accelerator.device.type ), f"Loaded optimizer states did not match, expected to be loaded on {accelerator.device} but got {param_device}" # Check error with pytest.raises(TypeError, match="""Unsupported optimizer map location passed"""): accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""invalid""") accelerator.wait_for_everyone() if accelerator.process_index == 0: shutil.rmtree(savedir) accelerator.wait_for_everyone()
259
1
import math import numpy as np import qiskit from qiskit import Aer, ClassicalRegister, QuantumCircuit, QuantumRegister, execute def _A ( SCREAMING_SNAKE_CASE__ : int = 3 ): if isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''number of qubits must be a integer.''' ) if number_of_qubits <= 0: raise ValueError('''number of qubits must be > 0.''' ) if math.floor(SCREAMING_SNAKE_CASE__ ) != number_of_qubits: raise ValueError('''number of qubits must be exact integer.''' ) if number_of_qubits > 10: raise ValueError('''number of qubits too large to simulate(>10).''' ) UpperCamelCase :Any = QuantumRegister(SCREAMING_SNAKE_CASE__ , '''qr''' ) UpperCamelCase :List[Any] = ClassicalRegister(SCREAMING_SNAKE_CASE__ , '''cr''' ) UpperCamelCase :Union[str, Any] = QuantumCircuit(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = number_of_qubits for i in range(SCREAMING_SNAKE_CASE__ ): quantum_circuit.h(number_of_qubits - i - 1 ) counter -= 1 for j in range(SCREAMING_SNAKE_CASE__ ): quantum_circuit.cp(np.pi / 2 ** (counter - j) , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) for k in range(number_of_qubits // 2 ): quantum_circuit.swap(SCREAMING_SNAKE_CASE__ , number_of_qubits - k - 1 ) # measure all the qubits quantum_circuit.measure(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # simulate with 10000 shots UpperCamelCase :List[str] = Aer.get_backend('''qasm_simulator''' ) UpperCamelCase :List[Any] = execute(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , shots=10000 ) return job.result().get_counts(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": print( f'''Total count for quantum fourier transform state is: \ {quantum_fourier_transform(3)}''' )
259
import numpy as np __snake_case = [ ["""a""", """b""", """c""", """d""", """e"""], ["""f""", """g""", """h""", """i""", """k"""], ["""l""", """m""", """n""", """o""", """p"""], ["""q""", """r""", """s""", """t""", """u"""], ["""v""", """w""", """x""", """y""", """z"""], ] class UpperCAmelCase_ : """simple docstring""" def __init__( self ) -> None: UpperCamelCase :Dict = np.array(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> np.ndarray: UpperCamelCase , UpperCamelCase :Tuple = np.where(letter == self.SQUARE ) UpperCamelCase :List[Any] = np.concatenate([indexa + 1, indexa + 1] ) return indexes def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :int = self.SQUARE[indexa - 1, indexa - 1] return letter def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() UpperCamelCase :int = message.replace(''' ''' , '''''' ) UpperCamelCase :Dict = message.replace('''j''' , '''i''' ) UpperCamelCase :str = np.empty((2, len(SCREAMING_SNAKE_CASE_ )) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Union[str, Any] = numbers[0] UpperCamelCase :Dict = numbers[1] UpperCamelCase :Any = first_step.reshape(2 * len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = int(second_step[numbers_index * 2] ) UpperCamelCase :List[str] = int(second_step[(numbers_index * 2) + 1] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = encoded_message + letter return encoded_message def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() message.replace(''' ''' , '''''' ) UpperCamelCase :Optional[int] = np.empty(2 * len(SCREAMING_SNAKE_CASE_ ) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :List[str] = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Dict = numbers[0] UpperCamelCase :List[str] = numbers[1] UpperCamelCase :int = first_step.reshape((2, len(SCREAMING_SNAKE_CASE_ )) ) UpperCamelCase :Any = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Any = int(second_step[0, numbers_index] ) UpperCamelCase :List[Any] = int(second_step[1, numbers_index] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = decoded_message + letter return decoded_message
259
1
from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """google/bigbird-roberta-base""": """https://huggingface.co/google/bigbird-roberta-base/resolve/main/config.json""", """google/bigbird-roberta-large""": """https://huggingface.co/google/bigbird-roberta-large/resolve/main/config.json""", """google/bigbird-base-trivia-itc""": """https://huggingface.co/google/bigbird-base-trivia-itc/resolve/main/config.json""", # See all BigBird models at https://huggingface.co/models?filter=big_bird } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Any ='big_bird' def __init__( self , SCREAMING_SNAKE_CASE_=5_0358 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu_new" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=4096 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-12 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_=1 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=66 , SCREAMING_SNAKE_CASE_="block_sparse" , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=64 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=None , **SCREAMING_SNAKE_CASE_ , ) -> Union[str, Any]: super().__init__( pad_token_id=SCREAMING_SNAKE_CASE_ , bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , sep_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) UpperCamelCase :str = vocab_size UpperCamelCase :Union[str, Any] = max_position_embeddings UpperCamelCase :Any = hidden_size UpperCamelCase :List[str] = num_hidden_layers UpperCamelCase :Any = num_attention_heads UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :str = hidden_act UpperCamelCase :List[Any] = hidden_dropout_prob UpperCamelCase :Tuple = attention_probs_dropout_prob UpperCamelCase :Union[str, Any] = initializer_range UpperCamelCase :Optional[int] = type_vocab_size UpperCamelCase :Tuple = layer_norm_eps UpperCamelCase :List[Any] = use_cache UpperCamelCase :Optional[int] = rescale_embeddings UpperCamelCase :Tuple = attention_type UpperCamelCase :int = use_bias UpperCamelCase :List[Any] = block_size UpperCamelCase :Dict = num_random_blocks UpperCamelCase :Any = classifier_dropout class UpperCAmelCase_ ( lowercase ): """simple docstring""" @property def UpperCAmelCase ( self ) -> Mapping[str, Mapping[int, str]]: if self.task == "multiple-choice": UpperCamelCase :Any = {0: '''batch''', 1: '''choice''', 2: '''sequence'''} else: UpperCamelCase :List[str] = {0: '''batch''', 1: '''sequence'''} return OrderedDict( [ ('''input_ids''', dynamic_axis), ('''attention_mask''', dynamic_axis), ] )
259
import argparse import collections import numpy as np import torch from flax import traverse_util from tax import checkpoints from transformers import MTaConfig, UMTaEncoderModel, UMTaForConditionalGeneration from transformers.utils import logging logging.set_verbosity_info() def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Tuple ): return params[F'''{prefix}/{prefix}/relpos_bias/rel_embedding'''][:, i, :] def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Any="attention" ): UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/key/kernel'''][:, i, :, :] ) UpperCamelCase :Optional[Any] = k_tmp.reshape(k_tmp.shape[0] , k_tmp.shape[1] * k_tmp.shape[2] ) UpperCamelCase :Optional[int] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/out/kernel'''][:, i, :, :] ) UpperCamelCase :List[Any] = o_tmp.reshape(o_tmp.shape[0] * o_tmp.shape[1] , o_tmp.shape[2] ) UpperCamelCase :Union[str, Any] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/query/kernel'''][:, i, :, :] ) UpperCamelCase :Any = q_tmp.reshape(q_tmp.shape[0] , q_tmp.shape[1] * q_tmp.shape[2] ) UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/value/kernel'''][:, i, :, :] ) UpperCamelCase :str = v_tmp.reshape(v_tmp.shape[0] , v_tmp.shape[1] * v_tmp.shape[2] ) return k, o, q, v def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : List[str]=False ): if split_mlp_wi: UpperCamelCase :List[Any] = params[F'''{prefix}/{prefix}/mlp/wi_0/kernel'''][:, i, :] UpperCamelCase :int = params[F'''{prefix}/{prefix}/mlp/wi_1/kernel'''][:, i, :] UpperCamelCase :str = (wi_a, wi_a) else: UpperCamelCase :Optional[Any] = params[F'''{prefix}/{prefix}/mlp/wi/kernel'''][:, i, :] UpperCamelCase :Optional[int] = params[F'''{prefix}/{prefix}/mlp/wo/kernel'''][:, i, :] return wi, wo def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[int] ): return params[F'''{prefix}/{prefix}/{layer_name}/scale'''][:, i] def _A ( SCREAMING_SNAKE_CASE__ : dict , *, SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : bool , SCREAMING_SNAKE_CASE__ : bool = False ): UpperCamelCase :Tuple = traverse_util.flatten_dict(variables['''target'''] ) UpperCamelCase :List[Any] = {'''/'''.join(SCREAMING_SNAKE_CASE__ ): v for k, v in old.items()} # v1.1 models have a gated GeLU with wi_0 and wi_1 instead of wi UpperCamelCase :int = '''encoder/encoder/mlp/wi_0/kernel''' in old print('''Split MLP:''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = collections.OrderedDict() # Shared embeddings. UpperCamelCase :int = old['''token_embedder/embedding'''] # Encoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :Dict = k.T UpperCamelCase :Optional[Any] = o.T UpperCamelCase :int = q.T UpperCamelCase :Any = v.T # Block i, layer 1 (MLP). UpperCamelCase :Tuple = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Any = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[Any] = wi[0].T UpperCamelCase :Tuple = wi[1].T else: UpperCamelCase :Optional[Any] = wi.T UpperCamelCase :Dict = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :List[str] = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' ).T UpperCamelCase :Optional[Any] = old['''encoder/encoder_norm/scale'''] if not scalable_attention: UpperCamelCase :str = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''encoder''' ).T UpperCamelCase :Any = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''decoder''' ).T if not is_encoder_only: # Decoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :Union[str, Any] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_self_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Dict = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''self_attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :int = k.T UpperCamelCase :Optional[int] = o.T UpperCamelCase :Tuple = q.T UpperCamelCase :List[str] = v.T # Block i, layer 1 (Cross Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_cross_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[Any] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''encoder_decoder_attention''' ) UpperCamelCase :Tuple = layer_norm UpperCamelCase :Optional[Any] = k.T UpperCamelCase :List[str] = o.T UpperCamelCase :List[str] = q.T UpperCamelCase :str = v.T # Block i, layer 2 (MLP). UpperCamelCase :List[str] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Optional[int] = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[str] = wi[0].T UpperCamelCase :str = wi[1].T else: UpperCamelCase :Dict = wi.T UpperCamelCase :Optional[Any] = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :Tuple = tax_relpos_bias_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' ).T UpperCamelCase :Union[str, Any] = old['''decoder/decoder_norm/scale'''] # LM Head (only in v1.1 checkpoints, in v1.0 embeddings are used instead) if "decoder/logits_dense/kernel" in old: UpperCamelCase :Union[str, Any] = old['''decoder/logits_dense/kernel'''].T return new def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : bool ): UpperCamelCase :Optional[int] = collections.OrderedDict([(k, torch.from_numpy(v.copy() )) for (k, v) in converted_params.items()] ) # Add what is missing. if "encoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if not is_encoder_only: if "decoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if "lm_head.weight" not in state_dict: # For old 1.0 models. print('''Using shared word embeddings as lm_head.''' ) UpperCamelCase :List[Any] = state_dict['''shared.weight'''] return state_dict def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Dict = checkpoints.load_tax_checkpoint(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = convert_tax_to_pytorch( SCREAMING_SNAKE_CASE__ , num_layers=config.num_layers , is_encoder_only=SCREAMING_SNAKE_CASE__ , scalable_attention=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = make_state_dict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) model.load_state_dict(SCREAMING_SNAKE_CASE__ , strict=SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : bool = False , SCREAMING_SNAKE_CASE__ : bool = False , ): UpperCamelCase :Any = MTaConfig.from_json_file(SCREAMING_SNAKE_CASE__ ) print(F'''Building PyTorch model from configuration: {config}''' ) # Non-v1.1 checkpoints could also use T5Model, but this works for all. # The v1.0 checkpoints will simply have an LM head that is the word embeddings. if is_encoder_only: UpperCamelCase :List[str] = UMTaEncoderModel(SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :Any = UMTaForConditionalGeneration(SCREAMING_SNAKE_CASE__ ) # Load weights from tf checkpoint load_tax_weights_in_ta(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Save pytorch-model print(F'''Save PyTorch model to {pytorch_dump_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) # Verify that we can load the checkpoint. model.from_pretrained(SCREAMING_SNAKE_CASE__ ) print('''Done''' ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser(description="""Converts a native T5X checkpoint into a PyTorch checkpoint.""") # Required parameters parser.add_argument( """--t5x_checkpoint_path""", default=None, type=str, required=True, help="""Path to the T5X checkpoint.""" ) parser.add_argument( """--config_file""", default=None, type=str, required=True, help="""The config json file corresponding to the pre-trained T5 model.\nThis specifies the model architecture.""", ) parser.add_argument( """--pytorch_dump_path""", default=None, type=str, required=True, help="""Path to the output PyTorch model.""" ) parser.add_argument( """--is_encoder_only""", action="""store_true""", help="""Check if the model is encoder-decoder model""", default=False ) parser.add_argument( """--scalable_attention""", action="""store_true""", help="""Whether the model uses scaled attention (umt5 model)""", default=False, ) __snake_case = parser.parse_args() convert_tax_checkpoint_to_pytorch( args.tax_checkpoint_path, args.config_file, args.pytorch_dump_path, args.is_encoder_only, args.scalable_attention, )
259
1
from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """distilbert-base-uncased""": """https://huggingface.co/distilbert-base-uncased/resolve/main/config.json""", """distilbert-base-uncased-distilled-squad""": ( """https://huggingface.co/distilbert-base-uncased-distilled-squad/resolve/main/config.json""" ), """distilbert-base-cased""": """https://huggingface.co/distilbert-base-cased/resolve/main/config.json""", """distilbert-base-cased-distilled-squad""": ( """https://huggingface.co/distilbert-base-cased-distilled-squad/resolve/main/config.json""" ), """distilbert-base-german-cased""": """https://huggingface.co/distilbert-base-german-cased/resolve/main/config.json""", """distilbert-base-multilingual-cased""": ( """https://huggingface.co/distilbert-base-multilingual-cased/resolve/main/config.json""" ), """distilbert-base-uncased-finetuned-sst-2-english""": ( """https://huggingface.co/distilbert-base-uncased-finetuned-sst-2-english/resolve/main/config.json""" ), } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[Any] ='distilbert' UpperCamelCase_ : Optional[Any] ={ 'hidden_size': 'dim', 'num_attention_heads': 'n_heads', 'num_hidden_layers': 'n_layers', } def __init__( self , SCREAMING_SNAKE_CASE_=3_0522 , SCREAMING_SNAKE_CASE_=512 , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=6 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=4 * 768 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.2 , SCREAMING_SNAKE_CASE_=0 , **SCREAMING_SNAKE_CASE_ , ) -> Any: UpperCamelCase :Union[str, Any] = vocab_size UpperCamelCase :str = max_position_embeddings UpperCamelCase :str = sinusoidal_pos_embds UpperCamelCase :str = n_layers UpperCamelCase :Union[str, Any] = n_heads UpperCamelCase :int = dim UpperCamelCase :Tuple = hidden_dim UpperCamelCase :Any = dropout UpperCamelCase :str = attention_dropout UpperCamelCase :List[Any] = activation UpperCamelCase :str = initializer_range UpperCamelCase :List[Any] = qa_dropout UpperCamelCase :Tuple = seq_classif_dropout super().__init__(**SCREAMING_SNAKE_CASE_ , pad_token_id=SCREAMING_SNAKE_CASE_ ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" @property def UpperCAmelCase ( self ) -> Mapping[str, Mapping[int, str]]: if self.task == "multiple-choice": UpperCamelCase :Tuple = {0: '''batch''', 1: '''choice''', 2: '''sequence'''} else: UpperCamelCase :Union[str, Any] = {0: '''batch''', 1: '''sequence'''} return OrderedDict( [ ('''input_ids''', dynamic_axis), ('''attention_mask''', dynamic_axis), ] )
259
def _A ( SCREAMING_SNAKE_CASE__ : list[int] , SCREAMING_SNAKE_CASE__ : list[int] ): UpperCamelCase :Tuple = len(SCREAMING_SNAKE_CASE__ ) print('''The following activities are selected:''' ) # The first activity is always selected UpperCamelCase :Dict = 0 print(SCREAMING_SNAKE_CASE__ , end=''',''' ) # Consider rest of the activities for j in range(SCREAMING_SNAKE_CASE__ ): # If this activity has start time greater than # or equal to the finish time of previously # selected activity, then select it if start[j] >= finish[i]: print(SCREAMING_SNAKE_CASE__ , end=''',''' ) UpperCamelCase :List[str] = j if __name__ == "__main__": import doctest doctest.testmod() __snake_case = [1, 3, 0, 5, 8, 5] __snake_case = [2, 4, 6, 7, 9, 9] print_max_activities(start, finish)
259
1
from typing import List, Optional, Union import numpy as np import tensorflow as tf from .utils import logging __snake_case = logging.get_logger(__name__) def _A ( SCREAMING_SNAKE_CASE__ : Union[tf.Tensor, np.ndarray] ): if isinstance(SCREAMING_SNAKE_CASE__ , np.ndarray ): return list(tensor.shape ) UpperCamelCase :Optional[Any] = tf.shape(SCREAMING_SNAKE_CASE__ ) if tensor.shape == tf.TensorShape(SCREAMING_SNAKE_CASE__ ): return dynamic UpperCamelCase :str = tensor.shape.as_list() return [dynamic[i] if s is None else s for i, s in enumerate(SCREAMING_SNAKE_CASE__ )] def _A ( SCREAMING_SNAKE_CASE__ : tf.Tensor , SCREAMING_SNAKE_CASE__ : Optional[int] = None , SCREAMING_SNAKE_CASE__ : Optional[str] = None ): return tf.nn.softmax(logits=logits + 1e-9 , axis=SCREAMING_SNAKE_CASE__ , name=SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Any=1e-5 , SCREAMING_SNAKE_CASE__ : List[Any]=-1 ): # This is a very simplified functional layernorm, designed to duplicate # the functionality of PyTorch nn.functional.layer_norm when this is needed to port # models in Transformers. if weight.shape.rank != 1 or bias.shape.rank != 1 or not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise NotImplementedError('''Only 1D weight and bias tensors are supported for now, with only a single axis.''' ) # Get mean and variance on the axis to be normalized UpperCamelCase , UpperCamelCase :Optional[Any] = tf.nn.moments(SCREAMING_SNAKE_CASE__ , axes=[axis] , keepdims=SCREAMING_SNAKE_CASE__ ) if axis != -1: # Reshape scale and weight to have the same rank as inputs, but with 1 dimensions # on every dimension except axis UpperCamelCase :Dict = [1] * inputs.shape.rank UpperCamelCase :Dict = shape_list(SCREAMING_SNAKE_CASE__ )[axis] UpperCamelCase :Optional[Any] = tf.reshape(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = tf.reshape(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Compute layer normalization using the batch_normalization # function. UpperCamelCase :Dict = tf.nn.batch_normalization( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , offset=SCREAMING_SNAKE_CASE__ , scale=SCREAMING_SNAKE_CASE__ , variance_epsilon=SCREAMING_SNAKE_CASE__ , ) return outputs def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Tuple=0 , SCREAMING_SNAKE_CASE__ : Optional[Any]=-1 ): # Replicates the behavior of torch.flatten in TF # If end_dim or start_dim is negative, count them from the end if end_dim < 0: end_dim += input.shape.rank if start_dim < 0: start_dim += input.shape.rank if start_dim == end_dim: return input UpperCamelCase :Tuple = tf.shape(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[Any] = tf.math.reduce_prod(in_shape[start_dim : end_dim + 1] ) UpperCamelCase :int = tf.concat([in_shape[:start_dim], [flattened_dim], in_shape[end_dim + 1 :]] , axis=0 ) return tf.reshape(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : tf.Tensor ): if not isinstance(SCREAMING_SNAKE_CASE__ , tf.Tensor ): UpperCamelCase :Tuple = tf.convert_to_tensor(SCREAMING_SNAKE_CASE__ ) # Catches stray NumPy inputs if encoder_attention_mask.shape.rank == 3: UpperCamelCase :Optional[int] = encoder_attention_mask[:, None, :, :] if encoder_attention_mask.shape.rank == 2: UpperCamelCase :Optional[Any] = encoder_attention_mask[:, None, None, :] # T5 has a mask that can compare sequence ids, we can simulate this here with this transposition # Cf. https://github.com/tensorflow/mesh/blob/8d2465e9bc93129b913b5ccc6a59aa97abd96ec6/mesh_tensorflow # /transformer/transformer_layers.py#L270 # encoder_extended_attention_mask = (encoder_extended_attention_mask == # encoder_extended_attention_mask.transpose(-1, -2)) UpperCamelCase :int = ( tf.cast(1 , encoder_attention_mask.dtype ) - encoder_extended_attention_mask ) * encoder_extended_attention_mask.dtype.min return encoder_extended_attention_mask def _A ( SCREAMING_SNAKE_CASE__ : tf.Tensor , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : str = "input_ids" ): tf.debugging.assert_less( SCREAMING_SNAKE_CASE__ , tf.cast(SCREAMING_SNAKE_CASE__ , dtype=tensor.dtype ) , message=( F'''The maximum value of {tensor_name} ({tf.math.reduce_max(SCREAMING_SNAKE_CASE__ )}) must be smaller than the embedding ''' F'''layer\'s input dimension ({embed_dim}). The likely cause is some problem at tokenization time.''' ) , ) def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :Optional[int] = 64512 # Check that no item in `data` is larger than `HDF5_OBJECT_HEADER_LIMIT` # because in that case even chunking the array would not make the saving # possible. UpperCamelCase :List[Any] = [x for x in data if len(SCREAMING_SNAKE_CASE__ ) > HDF5_OBJECT_HEADER_LIMIT] # Expecting this to never be true. if bad_attributes: raise RuntimeError( '''The following attributes cannot be saved to HDF5 file because ''' F'''they are larger than {HDF5_OBJECT_HEADER_LIMIT} ''' F'''bytes: {bad_attributes}''' ) UpperCamelCase :List[Any] = np.asarray(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[Any] = 1 UpperCamelCase :Dict = np.array_split(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # This will never loop forever thanks to the test above. while any(x.nbytes > HDF5_OBJECT_HEADER_LIMIT for x in chunked_data ): num_chunks += 1 UpperCamelCase :List[str] = np.array_split(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) if num_chunks > 1: for chunk_id, chunk_data in enumerate(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[Any] = chunk_data else: UpperCamelCase :List[str] = data def _A ( SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Union[str, Any] ): if name in group.attrs: UpperCamelCase :Dict = [n.decode('''utf8''' ) if hasattr(SCREAMING_SNAKE_CASE__ , '''decode''' ) else n for n in group.attrs[name]] else: UpperCamelCase :str = [] UpperCamelCase :List[Any] = 0 while "%s%d" % (name, chunk_id) in group.attrs: data.extend( [n.decode('''utf8''' ) if hasattr(SCREAMING_SNAKE_CASE__ , '''decode''' ) else n for n in group.attrs['''%s%d''' % (name, chunk_id)]] ) chunk_id += 1 return data def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] ): def _expand_single_ad_tensor(SCREAMING_SNAKE_CASE__ : Optional[Any] ): if isinstance(SCREAMING_SNAKE_CASE__ , tf.Tensor ) and t.shape.rank == 1: return tf.expand_dims(SCREAMING_SNAKE_CASE__ , axis=-1 ) return t return tf.nest.map_structure(_expand_single_ad_tensor , SCREAMING_SNAKE_CASE__ )
259
import copy import os from typing import Union from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """microsoft/git-base""": """https://huggingface.co/microsoft/git-base/resolve/main/config.json""", } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Dict ='git_vision_model' def __init__( self , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=224 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_="quick_gelu" , SCREAMING_SNAKE_CASE_=1e-5 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.02 , **SCREAMING_SNAKE_CASE_ , ) -> Tuple: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :Dict = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :List[str] = num_channels UpperCamelCase :Optional[int] = patch_size UpperCamelCase :Optional[int] = image_size UpperCamelCase :List[Any] = initializer_range UpperCamelCase :Union[str, Any] = attention_dropout UpperCamelCase :Tuple = layer_norm_eps UpperCamelCase :Optional[Any] = hidden_act @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> "PretrainedConfig": cls._set_token_in_kwargs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Dict = cls.get_config_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) # get the vision config dict if we are loading from GITConfig if config_dict.get('''model_type''' ) == "git": UpperCamelCase :Tuple = config_dict['''vision_config'''] if "model_type" in config_dict and hasattr(cls , '''model_type''' ) and config_dict["model_type"] != cls.model_type: logger.warning( F'''You are using a model of type {config_dict["model_type"]} to instantiate a model of type ''' F'''{cls.model_type}. This is not supported for all configurations of models and can yield errors.''' ) return cls.from_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Optional[Any] ='git' def __init__( self , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=3_0522 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=6 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=1024 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-12 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_="absolute" , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=101 , SCREAMING_SNAKE_CASE_=102 , SCREAMING_SNAKE_CASE_=None , **SCREAMING_SNAKE_CASE_ , ) -> int: super().__init__(bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , pad_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) if vision_config is None: UpperCamelCase :Tuple = {} logger.info('''vision_config is None. initializing the GitVisionConfig with default values.''' ) UpperCamelCase :Union[str, Any] = GitVisionConfig(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = vocab_size UpperCamelCase :Optional[Any] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :List[Any] = num_attention_heads UpperCamelCase :Dict = hidden_act UpperCamelCase :List[str] = intermediate_size UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :Optional[int] = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = max_position_embeddings UpperCamelCase :Tuple = initializer_range UpperCamelCase :Any = layer_norm_eps UpperCamelCase :int = position_embedding_type UpperCamelCase :Dict = use_cache UpperCamelCase :Tuple = tie_word_embeddings UpperCamelCase :Union[str, Any] = num_image_with_embedding UpperCamelCase :Optional[int] = bos_token_id UpperCamelCase :List[Any] = eos_token_id def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Union[str, Any] = copy.deepcopy(self.__dict__ ) UpperCamelCase :Optional[int] = self.vision_config.to_dict() UpperCamelCase :int = self.__class__.model_type return output
259
1
import json from typing import List, Optional, Tuple from tokenizers import normalizers from ...tokenization_utils_fast import PreTrainedTokenizerFast from .tokenization_electra import ElectraTokenizer __snake_case = {"""vocab_file""": """vocab.txt""", """tokenizer_file""": """tokenizer.json"""} __snake_case = { """vocab_file""": { """google/electra-small-generator""": ( """https://huggingface.co/google/electra-small-generator/resolve/main/vocab.txt""" ), """google/electra-base-generator""": """https://huggingface.co/google/electra-base-generator/resolve/main/vocab.txt""", """google/electra-large-generator""": ( """https://huggingface.co/google/electra-large-generator/resolve/main/vocab.txt""" ), """google/electra-small-discriminator""": ( """https://huggingface.co/google/electra-small-discriminator/resolve/main/vocab.txt""" ), """google/electra-base-discriminator""": ( """https://huggingface.co/google/electra-base-discriminator/resolve/main/vocab.txt""" ), """google/electra-large-discriminator""": ( """https://huggingface.co/google/electra-large-discriminator/resolve/main/vocab.txt""" ), }, """tokenizer_file""": { """google/electra-small-generator""": ( """https://huggingface.co/google/electra-small-generator/resolve/main/tokenizer.json""" ), """google/electra-base-generator""": ( """https://huggingface.co/google/electra-base-generator/resolve/main/tokenizer.json""" ), """google/electra-large-generator""": ( """https://huggingface.co/google/electra-large-generator/resolve/main/tokenizer.json""" ), """google/electra-small-discriminator""": ( """https://huggingface.co/google/electra-small-discriminator/resolve/main/tokenizer.json""" ), """google/electra-base-discriminator""": ( """https://huggingface.co/google/electra-base-discriminator/resolve/main/tokenizer.json""" ), """google/electra-large-discriminator""": ( """https://huggingface.co/google/electra-large-discriminator/resolve/main/tokenizer.json""" ), }, } __snake_case = { """google/electra-small-generator""": 5_12, """google/electra-base-generator""": 5_12, """google/electra-large-generator""": 5_12, """google/electra-small-discriminator""": 5_12, """google/electra-base-discriminator""": 5_12, """google/electra-large-discriminator""": 5_12, } __snake_case = { """google/electra-small-generator""": {"""do_lower_case""": True}, """google/electra-base-generator""": {"""do_lower_case""": True}, """google/electra-large-generator""": {"""do_lower_case""": True}, """google/electra-small-discriminator""": {"""do_lower_case""": True}, """google/electra-base-discriminator""": {"""do_lower_case""": True}, """google/electra-large-discriminator""": {"""do_lower_case""": True}, } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Dict =VOCAB_FILES_NAMES UpperCamelCase_ : int =PRETRAINED_VOCAB_FILES_MAP UpperCamelCase_ : Union[str, Any] =PRETRAINED_INIT_CONFIGURATION UpperCamelCase_ : Dict =PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES UpperCamelCase_ : List[Any] =ElectraTokenizer def __init__( self , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_="[UNK]" , SCREAMING_SNAKE_CASE_="[SEP]" , SCREAMING_SNAKE_CASE_="[PAD]" , SCREAMING_SNAKE_CASE_="[CLS]" , SCREAMING_SNAKE_CASE_="[MASK]" , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=None , **SCREAMING_SNAKE_CASE_ , ) -> Union[str, Any]: super().__init__( SCREAMING_SNAKE_CASE_ , tokenizer_file=SCREAMING_SNAKE_CASE_ , do_lower_case=SCREAMING_SNAKE_CASE_ , unk_token=SCREAMING_SNAKE_CASE_ , sep_token=SCREAMING_SNAKE_CASE_ , pad_token=SCREAMING_SNAKE_CASE_ , cls_token=SCREAMING_SNAKE_CASE_ , mask_token=SCREAMING_SNAKE_CASE_ , tokenize_chinese_chars=SCREAMING_SNAKE_CASE_ , strip_accents=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) UpperCamelCase :int = json.loads(self.backend_tokenizer.normalizer.__getstate__() ) if ( normalizer_state.get('''lowercase''' , SCREAMING_SNAKE_CASE_ ) != do_lower_case or normalizer_state.get('''strip_accents''' , SCREAMING_SNAKE_CASE_ ) != strip_accents or normalizer_state.get('''handle_chinese_chars''' , SCREAMING_SNAKE_CASE_ ) != tokenize_chinese_chars ): UpperCamelCase :Dict = getattr(SCREAMING_SNAKE_CASE_ , normalizer_state.pop('''type''' ) ) UpperCamelCase :Optional[int] = do_lower_case UpperCamelCase :str = strip_accents UpperCamelCase :Dict = tokenize_chinese_chars UpperCamelCase :Optional[Any] = normalizer_class(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = do_lower_case def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=None ) -> Dict: UpperCamelCase :Any = [self.cls_token_id] + token_ids_a + [self.sep_token_id] if token_ids_a: output += token_ids_a + [self.sep_token_id] return output def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None ) -> List[int]: UpperCamelCase :Any = [self.sep_token_id] UpperCamelCase :Union[str, Any] = [self.cls_token_id] if token_ids_a is None: return len(cls + token_ids_a + sep ) * [0] return len(cls + token_ids_a + sep ) * [0] + len(token_ids_a + sep ) * [1] def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None ) -> Tuple[str]: UpperCamelCase :Optional[Any] = self._tokenizer.model.save(SCREAMING_SNAKE_CASE_ , name=SCREAMING_SNAKE_CASE_ ) return tuple(SCREAMING_SNAKE_CASE_ )
259
import time from contextlib import contextmanager from pathlib import Path import pytest import requests from huggingface_hub.hf_api import HfApi, HfFolder __snake_case = """__DUMMY_TRANSFORMERS_USER__""" __snake_case = """Dummy User""" __snake_case = """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""" __snake_case = """https://hub-ci.huggingface.co""" __snake_case = CI_HUB_ENDPOINT + """/datasets/{repo_id}/resolve/{revision}/{path}""" __snake_case = CI_HUB_ENDPOINT + """/{repo_id}/resolve/{revision}/{filename}""" __snake_case = Path("""~/.huggingface/hub_ci_token""").expanduser() @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): monkeypatch.setattr( '''huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any ): monkeypatch.setattr('''datasets.config.HF_ENDPOINT''' , SCREAMING_SNAKE_CASE__ ) monkeypatch.setattr('''datasets.config.HUB_DATASETS_URL''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): monkeypatch.setattr('''huggingface_hub.hf_api.HfFolder.path_token''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : List[Any] ): HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield HfFolder.delete_token() @pytest.fixture(scope='''session''' ) def _A ( ): return HfApi(endpoint=SCREAMING_SNAKE_CASE__ ) @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi ): UpperCamelCase :Tuple = HfFolder.get_token() HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield CI_HUB_USER_TOKEN if previous_token is not None: HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Dict ): def _cleanup_repo(SCREAMING_SNAKE_CASE__ : Tuple ): hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) return _cleanup_repo @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): @contextmanager def _temporary_repo(SCREAMING_SNAKE_CASE__ : Any ): try: yield repo_id finally: cleanup_repo(SCREAMING_SNAKE_CASE__ ) return _temporary_repo @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :Union[str, Any] = F'''repo_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :int = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data/text_data.txt''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Dict ): return hf_private_dataset_repo_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Optional[int] = F'''repo_zipped_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Any = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str] ): return hf_private_dataset_repo_zipped_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Dict = F'''repo_zipped_img_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Dict = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple ): return hf_private_dataset_repo_zipped_img_data_
259
1
def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] ): # Return True if there is node that has not iterated. UpperCamelCase :Tuple = [False] * len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = [] queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = True while queue: UpperCamelCase :Optional[Any] = queue.pop(0 ) for ind in range(len(graph[u] ) ): if visited[ind] is False and graph[u][ind] > 0: queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = True UpperCamelCase :Optional[int] = u return visited[t] def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : str ): # This array is filled by BFS and to store path UpperCamelCase :Optional[int] = [-1] * (len(SCREAMING_SNAKE_CASE__ )) UpperCamelCase :Optional[int] = 0 while bfs(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Dict = float('''Inf''' ) UpperCamelCase :str = sink while s != source: # Find the minimum value in select path UpperCamelCase :Optional[Any] = min(SCREAMING_SNAKE_CASE__ , graph[parent[s]][s] ) UpperCamelCase :Any = parent[s] max_flow += path_flow UpperCamelCase :Tuple = sink while v != source: UpperCamelCase :List[str] = parent[v] graph[u][v] -= path_flow graph[v][u] += path_flow UpperCamelCase :Any = parent[v] return max_flow __snake_case = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] __snake_case , __snake_case = 0, 5 print(ford_fulkerson(graph, source, sink))
259
from __future__ import annotations import unittest from transformers import RoFormerConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask from ...test_pipeline_mixin import PipelineTesterMixin if is_tf_available(): import tensorflow as tf from transformers import ( TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerModel, ) from transformers.models.roformer.modeling_tf_roformer import ( TFRoFormerSelfAttention, TFRoFormerSinusoidalPositionalEmbedding, ) class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=99 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=512 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=None , ) -> Dict: UpperCamelCase :Any = parent UpperCamelCase :Dict = 13 UpperCamelCase :List[Any] = 7 UpperCamelCase :List[Any] = True UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = True UpperCamelCase :List[str] = True UpperCamelCase :Dict = 99 UpperCamelCase :Any = 32 UpperCamelCase :Tuple = 2 UpperCamelCase :Union[str, Any] = 4 UpperCamelCase :List[str] = 37 UpperCamelCase :Dict = '''gelu''' UpperCamelCase :Dict = 0.1 UpperCamelCase :Tuple = 0.1 UpperCamelCase :Dict = 512 UpperCamelCase :str = 16 UpperCamelCase :Optional[Any] = 2 UpperCamelCase :Dict = 0.02 UpperCamelCase :Optional[int] = 3 UpperCamelCase :int = 4 UpperCamelCase :Dict = None def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size, self.seq_length] , self.vocab_size ) UpperCamelCase :Optional[int] = None if self.use_input_mask: UpperCamelCase :Dict = random_attention_mask([self.batch_size, self.seq_length] ) UpperCamelCase :Dict = None if self.use_token_type_ids: UpperCamelCase :List[Any] = ids_tensor([self.batch_size, self.seq_length] , self.type_vocab_size ) UpperCamelCase :Union[str, Any] = None UpperCamelCase :Optional[int] = None UpperCamelCase :Any = None if self.use_labels: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size] , self.type_sequence_label_size ) UpperCamelCase :Optional[Any] = ids_tensor([self.batch_size, self.seq_length] , self.num_labels ) UpperCamelCase :int = ids_tensor([self.batch_size] , self.num_choices ) UpperCamelCase :Union[str, Any] = RoFormerConfig( vocab_size=self.vocab_size , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , max_position_embeddings=self.max_position_embeddings , type_vocab_size=self.type_vocab_size , initializer_range=self.initializer_range , return_dict=SCREAMING_SNAKE_CASE_ , ) return config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[Any] = TFRoFormerModel(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = {'''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids} UpperCamelCase :int = [input_ids, input_mask] UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = True UpperCamelCase :Union[str, Any] = TFRoFormerForCausalLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Any = model(SCREAMING_SNAKE_CASE_ )['''logits'''] self.parent.assertListEqual( list(prediction_scores.numpy().shape ) , [self.batch_size, self.seq_length, self.vocab_size] ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :str = TFRoFormerForMaskedLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.vocab_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :List[Any] = self.num_labels UpperCamelCase :int = TFRoFormerForSequenceClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Optional[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = self.num_choices UpperCamelCase :Any = TFRoFormerForMultipleChoice(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :int = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :Any = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :List[Any] = { '''input_ids''': multiple_choice_inputs_ids, '''attention_mask''': multiple_choice_input_mask, '''token_type_ids''': multiple_choice_token_type_ids, } UpperCamelCase :Dict = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_choices) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :Union[str, Any] = self.num_labels UpperCamelCase :Dict = TFRoFormerForTokenClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Tuple = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :Union[str, Any] = TFRoFormerForQuestionAnswering(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.start_logits.shape , (self.batch_size, self.seq_length) ) self.parent.assertEqual(result.end_logits.shape , (self.batch_size, self.seq_length) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() ( ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ) :Union[str, Any] = config_and_inputs UpperCamelCase :Any = {'''input_ids''': input_ids, '''token_type_ids''': token_type_ids, '''attention_mask''': input_mask} return config, inputs_dict @require_tf class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : str =( ( TFRoFormerModel, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerForMultipleChoice, ) if is_tf_available() else () ) UpperCamelCase_ : Tuple =( { 'feature-extraction': TFRoFormerModel, 'fill-mask': TFRoFormerForMaskedLM, 'question-answering': TFRoFormerForQuestionAnswering, 'text-classification': TFRoFormerForSequenceClassification, 'text-generation': TFRoFormerForCausalLM, 'token-classification': TFRoFormerForTokenClassification, 'zero-shot': TFRoFormerForSequenceClassification, } if is_tf_available() else {} ) UpperCamelCase_ : Tuple =False UpperCamelCase_ : Optional[Any] =False def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: if pipeline_test_casse_name == "TextGenerationPipelineTests": return True return False def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = TFRoFormerModelTester(self ) UpperCamelCase :Optional[int] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> List[str]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_masked_lm(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_lm_head(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_multiple_choice(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_question_answering(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_sequence_classification(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_token_classification(*SCREAMING_SNAKE_CASE_ ) @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = TFRoFormerModel.from_pretrained('''junnyu/roformer_chinese_base''' ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Tuple = TFRoFormerForMaskedLM.from_pretrained('''junnyu/roformer_chinese_base''' ) UpperCamelCase :Union[str, Any] = tf.constant([[0, 1, 2, 3, 4, 5]] ) UpperCamelCase :str = model(SCREAMING_SNAKE_CASE_ )[0] # TODO Replace vocab size UpperCamelCase :Tuple = 5_0000 UpperCamelCase :Optional[Any] = [1, 6, vocab_size] self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) print(output[:, :3, :3] ) # TODO Replace values below with what was printed above. UpperCamelCase :int = tf.constant( [ [ [-0.1205_3341, -1.026_4901, 0.2922_1946], [-1.513_3783, 0.19_7433, 0.1519_0607], [-5.013_5403, -3.90_0256, -0.8403_8764], ] ] ) tf.debugging.assert_near(output[:, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Optional[int] =1E-4 def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = tf.constant([[4, 10]] ) UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=6 , embedding_dim=6 ) UpperCamelCase :str = emba(input_ids.shape ) UpperCamelCase :List[str] = tf.constant( [[0.0000, 0.0000, 0.0000, 1.0000, 1.0000, 1.0000], [0.8415, 0.0464, 0.0022, 0.5403, 0.9989, 1.0000]] ) tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Dict = tf.constant( [ [0.0000, 0.0000, 0.0000, 0.0000, 0.0000], [0.8415, 0.8219, 0.8020, 0.7819, 0.7617], [0.9093, 0.9364, 0.9581, 0.9749, 0.9870], ] ) UpperCamelCase :Dict = TFRoFormerSinusoidalPositionalEmbedding(num_positions=512 , embedding_dim=512 ) emba([2, 16, 512] ) UpperCamelCase :Any = emba.weight[:3, :5] tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[Any] =1E-4 def UpperCAmelCase ( self ) -> List[str]: # 2,12,16,64 UpperCamelCase :List[Any] = tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = -tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=32 , embedding_dim=64 ) UpperCamelCase :int = embed_positions([2, 16, 768] )[None, None, :, :] UpperCamelCase , UpperCamelCase :List[str] = TFRoFormerSelfAttention.apply_rotary_position_embeddings( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = tf.constant( [ [0.0000, 0.0100, 0.0200, 0.0300, 0.0400, 0.0500, 0.0600, 0.0700], [-0.2012, 0.8897, 0.0263, 0.9401, 0.2074, 0.9463, 0.3481, 0.9343], [-1.7057, 0.6271, -1.2145, 1.3897, -0.6303, 1.7647, -0.1173, 1.8985], [-2.1731, -1.6397, -2.7358, 0.2854, -2.1840, 1.7183, -1.3018, 2.4871], [0.2717, -3.6173, -2.9206, -2.1988, -3.6638, 0.3858, -2.9155, 2.2980], [3.9859, -2.1580, -0.7984, -4.4904, -4.1181, -2.0252, -4.4782, 1.1253], ] ) UpperCamelCase :Optional[int] = tf.constant( [ [0.0000, -0.0100, -0.0200, -0.0300, -0.0400, -0.0500, -0.0600, -0.0700], [0.2012, -0.8897, -0.0263, -0.9401, -0.2074, -0.9463, -0.3481, -0.9343], [1.7057, -0.6271, 1.2145, -1.3897, 0.6303, -1.7647, 0.1173, -1.8985], [2.1731, 1.6397, 2.7358, -0.2854, 2.1840, -1.7183, 1.3018, -2.4871], [-0.2717, 3.6173, 2.9206, 2.1988, 3.6638, -0.3858, 2.9155, -2.2980], [-3.9859, 2.1580, 0.7984, 4.4904, 4.1181, 2.0252, 4.4782, -1.1253], ] ) tf.debugging.assert_near(query_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) tf.debugging.assert_near(key_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance )
259
1
import json import os import shutil import tempfile import unittest import numpy as np import pytest from transformers import MgpstrTokenizer from transformers.models.mgp_str.tokenization_mgp_str import VOCAB_FILES_NAMES from transformers.testing_utils import require_torch, require_vision from transformers.utils import IMAGE_PROCESSOR_NAME, is_torch_available, is_vision_available if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import MgpstrProcessor, ViTImageProcessor @require_torch @require_vision class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Union[str, Any] =ViTImageProcessor if is_vision_available() else None @property def UpperCAmelCase ( self ) -> Dict: return self.image_processor_tester.prepare_image_processor_dict() def UpperCAmelCase ( self ) -> int: UpperCamelCase :Union[str, Any] = (3, 32, 128) UpperCamelCase :Any = tempfile.mkdtemp() # fmt: off UpperCamelCase :int = ['''[GO]''', '''[s]''', '''0''', '''1''', '''2''', '''3''', '''4''', '''5''', '''6''', '''7''', '''8''', '''9''', '''a''', '''b''', '''c''', '''d''', '''e''', '''f''', '''g''', '''h''', '''i''', '''j''', '''k''', '''l''', '''m''', '''n''', '''o''', '''p''', '''q''', '''r''', '''s''', '''t''', '''u''', '''v''', '''w''', '''x''', '''y''', '''z'''] # fmt: on UpperCamelCase :Optional[int] = dict(zip(SCREAMING_SNAKE_CASE_ , range(len(SCREAMING_SNAKE_CASE_ ) ) ) ) UpperCamelCase :Optional[int] = os.path.join(self.tmpdirname , VOCAB_FILES_NAMES['''vocab_file'''] ) with open(self.vocab_file , '''w''' , encoding='''utf-8''' ) as fp: fp.write(json.dumps(SCREAMING_SNAKE_CASE_ ) + '''\n''' ) UpperCamelCase :Tuple = { '''do_normalize''': False, '''do_resize''': True, '''image_processor_type''': '''ViTImageProcessor''', '''resample''': 3, '''size''': {'''height''': 32, '''width''': 128}, } UpperCamelCase :str = os.path.join(self.tmpdirname , SCREAMING_SNAKE_CASE_ ) with open(self.image_processor_file , '''w''' , encoding='''utf-8''' ) as fp: json.dump(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> int: return MgpstrTokenizer.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: return ViTImageProcessor.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: shutil.rmtree(self.tmpdirname ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = np.random.randint(255 , size=(3, 30, 400) , dtype=np.uinta ) UpperCamelCase :List[Any] = Image.fromarray(np.moveaxis(SCREAMING_SNAKE_CASE_ , 0 , -1 ) ) return image_input def UpperCAmelCase ( self ) -> str: UpperCamelCase :str = self.get_tokenizer() UpperCamelCase :Union[str, Any] = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Dict = MgpstrProcessor.from_pretrained(self.tmpdirname , use_fast=SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[int] = self.get_tokenizer() UpperCamelCase :Dict = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Optional[int] = self.get_tokenizer(bos_token='''(BOS)''' , eos_token='''(EOS)''' ) UpperCamelCase :Optional[Any] = self.get_image_processor(do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) UpperCamelCase :int = MgpstrProcessor.from_pretrained( self.tmpdirname , bos_token='''(BOS)''' , eos_token='''(EOS)''' , do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer_add_kwargs.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor_add_kwargs.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.get_image_processor() UpperCamelCase :List[str] = self.get_tokenizer() UpperCamelCase :str = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = self.prepare_image_inputs() UpperCamelCase :List[str] = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) UpperCamelCase :Optional[Any] = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) for key in input_image_proc.keys(): self.assertAlmostEqual(input_image_proc[key].sum() , input_processor[key].sum() , delta=1e-2 ) def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Union[str, Any] = self.get_tokenizer() UpperCamelCase :int = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = '''test''' UpperCamelCase :Optional[int] = processor(text=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = tokenizer(SCREAMING_SNAKE_CASE_ ) for key in encoded_tok.keys(): self.assertListEqual(encoded_tok[key] , encoded_processor[key] ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :List[str] = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = '''test''' UpperCamelCase :str = self.prepare_image_inputs() UpperCamelCase :Dict = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , ['''pixel_values''', '''labels'''] ) # test if it raises when no input is passed with pytest.raises(SCREAMING_SNAKE_CASE_ ): processor() def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Any = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = [[1, 4, 5, 8, 1, 0, 8], [3, 4, 3, 1, 1, 8, 9], [3, 4, 3, 1, 1, 8, 9]] UpperCamelCase :Union[str, Any] = processor.char_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = tokenizer.batch_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = [seq.replace(''' ''' , '''''' ) for seq in decoded_tok] self.assertListEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :List[Any] = self.get_image_processor() UpperCamelCase :Optional[Any] = self.get_tokenizer() UpperCamelCase :Any = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = None UpperCamelCase :List[Any] = self.prepare_image_inputs() UpperCamelCase :Union[str, Any] = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , processor.model_input_names ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Optional[int] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.randn(1 , 27 , 38 ) UpperCamelCase :Union[str, Any] = torch.randn(1 , 27 , 5_0257 ) UpperCamelCase :Optional[Any] = torch.randn(1 , 27 , 3_0522 ) UpperCamelCase :Optional[Any] = processor.batch_decode([char_input, bpe_input, wp_input] ) self.assertListEqual(list(results.keys() ) , ['''generated_text''', '''scores''', '''char_preds''', '''bpe_preds''', '''wp_preds'''] )
259
import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, _config_zero_init, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from torch import nn from transformers import MODEL_MAPPING, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTModel from transformers.models.dpt.modeling_dpt import DPT_PRETRAINED_MODEL_ARCHIVE_LIST if is_vision_available(): from PIL import Image from transformers import DPTImageProcessor class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=[0, 1, 2, 3] , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=[1, 384, 24, 24] , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=None , ) -> int: UpperCamelCase :List[Any] = parent UpperCamelCase :List[str] = batch_size UpperCamelCase :Optional[Any] = image_size UpperCamelCase :Optional[Any] = patch_size UpperCamelCase :Optional[Any] = num_channels UpperCamelCase :Union[str, Any] = is_training UpperCamelCase :Dict = use_labels UpperCamelCase :List[Any] = hidden_size UpperCamelCase :Optional[int] = num_hidden_layers UpperCamelCase :Any = backbone_out_indices UpperCamelCase :int = num_attention_heads UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :Optional[int] = hidden_dropout_prob UpperCamelCase :int = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = initializer_range UpperCamelCase :List[Any] = num_labels UpperCamelCase :Any = backbone_featmap_shape UpperCamelCase :Optional[int] = scope UpperCamelCase :Optional[int] = is_hybrid # sequence length of DPT = num_patches + 1 (we add 1 for the [CLS] token) UpperCamelCase :Tuple = (image_size // patch_size) ** 2 UpperCamelCase :int = num_patches + 1 def UpperCAmelCase ( self ) -> str: UpperCamelCase :Tuple = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size] ) UpperCamelCase :int = None if self.use_labels: UpperCamelCase :str = ids_tensor([self.batch_size, self.image_size, self.image_size] , self.num_labels ) UpperCamelCase :Any = self.get_config() return config, pixel_values, labels def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Tuple = { '''global_padding''': '''same''', '''layer_type''': '''bottleneck''', '''depths''': [3, 4, 9], '''out_features''': ['''stage1''', '''stage2''', '''stage3'''], '''embedding_dynamic_padding''': True, '''hidden_sizes''': [96, 192, 384, 768], '''num_groups''': 2, } return DPTConfig( image_size=self.image_size , patch_size=self.patch_size , num_channels=self.num_channels , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , backbone_out_indices=self.backbone_out_indices , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , is_decoder=SCREAMING_SNAKE_CASE_ , initializer_range=self.initializer_range , is_hybrid=self.is_hybrid , backbone_config=SCREAMING_SNAKE_CASE_ , backbone_featmap_shape=self.backbone_featmap_shape , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[int] = DPTModel(config=SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Optional[int] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :Tuple = self.num_labels UpperCamelCase :Any = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Union[str, Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.predicted_depth.shape , (self.batch_size, self.image_size, self.image_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :int = self.num_labels UpperCamelCase :str = DPTForSemanticSegmentation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :List[str] = model(SCREAMING_SNAKE_CASE_ , labels=SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual( result.logits.shape , (self.batch_size, self.num_labels, self.image_size, self.image_size) ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[Any] = self.prepare_config_and_inputs() UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = config_and_inputs UpperCamelCase :List[Any] = {'''pixel_values''': pixel_values} return config, inputs_dict @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Tuple =(DPTModel, DPTForDepthEstimation, DPTForSemanticSegmentation) if is_torch_available() else () UpperCamelCase_ : Optional[Any] =( { 'depth-estimation': DPTForDepthEstimation, 'feature-extraction': DPTModel, 'image-segmentation': DPTForSemanticSegmentation, } if is_torch_available() else {} ) UpperCamelCase_ : List[Any] =False UpperCamelCase_ : Optional[int] =False UpperCamelCase_ : Union[str, Any] =False def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = DPTModelTester(self ) UpperCamelCase :List[Any] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() @unittest.skip(reason='''DPT does not use inputs_embeds''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(model.get_input_embeddings() , (nn.Module) ) UpperCamelCase :Optional[int] = model.get_output_embeddings() self.assertTrue(x is None or isinstance(SCREAMING_SNAKE_CASE_ , nn.Linear ) ) def UpperCAmelCase ( self ) -> int: UpperCamelCase , UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Optional[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Any = ['''pixel_values'''] self.assertListEqual(arg_names[:1] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Any = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_depth_estimation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_semantic_segmentation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :int = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ): continue UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.train() UpperCamelCase :Union[str, Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Optional[int]: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Union[str, Any] = False UpperCamelCase :Dict = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ) or not model_class.supports_gradient_checkpointing: continue UpperCamelCase :Tuple = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.gradient_checkpointing_enable() model.train() UpperCamelCase :List[Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = _config_zero_init(SCREAMING_SNAKE_CASE_ ) for model_class in self.all_model_classes: UpperCamelCase :Tuple = model_class(config=SCREAMING_SNAKE_CASE_ ) # Skip the check for the backbone UpperCamelCase :List[str] = [] for name, module in model.named_modules(): if module.__class__.__name__ == "DPTViTHybridEmbeddings": UpperCamelCase :Tuple = [F'''{name}.{key}''' for key in module.state_dict().keys()] break for name, param in model.named_parameters(): if param.requires_grad: if name in backbone_params: continue self.assertIn( ((param.data.mean() * 1e9).round() / 1e9).item() , [0.0, 1.0] , msg=F'''Parameter {name} of model {model_class} seems not properly initialized''' , ) @unittest.skip('''Will be fixed soon by reducing the size of the model used for common tests.''' ) def UpperCAmelCase ( self ) -> Tuple: pass @slow def UpperCAmelCase ( self ) -> Any: for model_name in DPT_PRETRAINED_MODEL_ARCHIVE_LIST[1:]: UpperCamelCase :int = DPTModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: # We do this test only for DPTForDepthEstimation since it is the only model that uses readout_type UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Optional[Any] = '''add''' with self.assertRaises(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :int = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) def _A ( ): UpperCamelCase :List[Any] = Image.open('''./tests/fixtures/tests_samples/COCO/000000039769.png''' ) return image @require_torch @require_vision @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> str: UpperCamelCase :Any = DPTImageProcessor.from_pretrained('''Intel/dpt-hybrid-midas''' ) UpperCamelCase :int = DPTForDepthEstimation.from_pretrained('''Intel/dpt-hybrid-midas''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = prepare_img() UpperCamelCase :Union[str, Any] = image_processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' ).to(SCREAMING_SNAKE_CASE_ ) # forward pass with torch.no_grad(): UpperCamelCase :Union[str, Any] = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = outputs.predicted_depth # verify the predicted depth UpperCamelCase :List[str] = torch.Size((1, 384, 384) ) self.assertEqual(predicted_depth.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor( [[[5.6437, 5.6146, 5.6511], [5.4371, 5.5649, 5.5958], [5.5215, 5.5184, 5.5293]]] ).to(SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(outputs.predicted_depth[:3, :3, :3] / 100 , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) )
259
1
import unittest from diffusers import FlaxAutoencoderKL from diffusers.utils import is_flax_available from diffusers.utils.testing_utils import require_flax from .test_modeling_common_flax import FlaxModelTesterMixin if is_flax_available(): import jax @require_flax class UpperCAmelCase_ ( lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Optional[Any] =FlaxAutoencoderKL @property def UpperCAmelCase ( self ) -> str: UpperCamelCase :str = 4 UpperCamelCase :str = 3 UpperCamelCase :Tuple = (32, 32) UpperCamelCase :List[Any] = jax.random.PRNGKey(0 ) UpperCamelCase :Optional[Any] = jax.random.uniform(SCREAMING_SNAKE_CASE_ , ((batch_size, num_channels) + sizes) ) return {"sample": image, "prng_key": prng_key} def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Optional[Any] = { '''block_out_channels''': [32, 64], '''in_channels''': 3, '''out_channels''': 3, '''down_block_types''': ['''DownEncoderBlock2D''', '''DownEncoderBlock2D'''], '''up_block_types''': ['''UpDecoderBlock2D''', '''UpDecoderBlock2D'''], '''latent_channels''': 4, } UpperCamelCase :Dict = self.dummy_input return init_dict, inputs_dict
259
def _A ( ): for n in range(1 , 1000000 ): yield n * (n + 1) // 2 def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :Optional[int] = 1 UpperCamelCase :List[Any] = 2 while i * i <= n: UpperCamelCase :str = 0 while n % i == 0: n //= i multiplicity += 1 divisors_count *= multiplicity + 1 i += 1 if n > 1: divisors_count *= 2 return divisors_count def _A ( ): return next(i for i in triangle_number_generator() if count_divisors(SCREAMING_SNAKE_CASE__ ) > 500 ) if __name__ == "__main__": print(solution())
259
1
import argparse import torch from transformers import BertConfig, BertForPreTraining, load_tf_weights_in_bert from transformers.utils import logging logging.set_verbosity_info() def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Union[str, Any] ): # Initialise PyTorch model UpperCamelCase :Any = BertConfig.from_json_file(SCREAMING_SNAKE_CASE__ ) print(F'''Building PyTorch model from configuration: {config}''' ) UpperCamelCase :List[str] = BertForPreTraining(SCREAMING_SNAKE_CASE__ ) # Load weights from tf checkpoint load_tf_weights_in_bert(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Save pytorch-model print(F'''Save PyTorch model to {pytorch_dump_path}''' ) torch.save(model.state_dict() , SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--tf_checkpoint_path""", default=None, type=str, required=True, help="""Path to the TensorFlow checkpoint path.""" ) parser.add_argument( """--bert_config_file""", default=None, type=str, required=True, help=( """The config json file corresponding to the pre-trained BERT model. \n""" """This specifies the model architecture.""" ), ) parser.add_argument( """--pytorch_dump_path""", default=None, type=str, required=True, help="""Path to the output PyTorch model.""" ) __snake_case = parser.parse_args() convert_tf_checkpoint_to_pytorch(args.tf_checkpoint_path, args.bert_config_file, args.pytorch_dump_path)
259
def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] ): # Return True if there is node that has not iterated. UpperCamelCase :Tuple = [False] * len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = [] queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = True while queue: UpperCamelCase :Optional[Any] = queue.pop(0 ) for ind in range(len(graph[u] ) ): if visited[ind] is False and graph[u][ind] > 0: queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = True UpperCamelCase :Optional[int] = u return visited[t] def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : str ): # This array is filled by BFS and to store path UpperCamelCase :Optional[int] = [-1] * (len(SCREAMING_SNAKE_CASE__ )) UpperCamelCase :Optional[int] = 0 while bfs(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Dict = float('''Inf''' ) UpperCamelCase :str = sink while s != source: # Find the minimum value in select path UpperCamelCase :Optional[Any] = min(SCREAMING_SNAKE_CASE__ , graph[parent[s]][s] ) UpperCamelCase :Any = parent[s] max_flow += path_flow UpperCamelCase :Tuple = sink while v != source: UpperCamelCase :List[str] = parent[v] graph[u][v] -= path_flow graph[v][u] += path_flow UpperCamelCase :Any = parent[v] return max_flow __snake_case = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] __snake_case , __snake_case = 0, 5 print(ford_fulkerson(graph, source, sink))
259
1
from __future__ import annotations import inspect import unittest import numpy as np from transformers import ResNetConfig from transformers.testing_utils import require_tf, require_vision, slow from transformers.utils import cached_property, is_tf_available, is_vision_available from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_tf_available(): import tensorflow as tf from transformers import TFResNetForImageClassification, TFResNetModel from transformers.models.resnet.modeling_tf_resnet import TF_RESNET_PRETRAINED_MODEL_ARCHIVE_LIST if is_vision_available(): from PIL import Image from transformers import AutoImageProcessor class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=10 , SCREAMING_SNAKE_CASE_=[10, 20, 30, 40] , SCREAMING_SNAKE_CASE_=[1, 1, 2, 1] , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_="relu" , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=None , ) -> str: UpperCamelCase :List[str] = parent UpperCamelCase :Optional[int] = batch_size UpperCamelCase :int = image_size UpperCamelCase :Tuple = num_channels UpperCamelCase :str = embeddings_size UpperCamelCase :int = hidden_sizes UpperCamelCase :Optional[int] = depths UpperCamelCase :Tuple = is_training UpperCamelCase :Union[str, Any] = use_labels UpperCamelCase :Union[str, Any] = hidden_act UpperCamelCase :Any = num_labels UpperCamelCase :Dict = scope UpperCamelCase :Union[str, Any] = len(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: UpperCamelCase :List[str] = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size] ) UpperCamelCase :Optional[int] = None if self.use_labels: UpperCamelCase :int = ids_tensor([self.batch_size] , self.num_labels ) UpperCamelCase :str = self.get_config() return config, pixel_values, labels def UpperCAmelCase ( self ) -> Optional[int]: return ResNetConfig( num_channels=self.num_channels , embeddings_size=self.embeddings_size , hidden_sizes=self.hidden_sizes , depths=self.depths , hidden_act=self.hidden_act , num_labels=self.num_labels , image_size=self.image_size , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Any: UpperCamelCase :Optional[int] = TFResNetModel(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = model(SCREAMING_SNAKE_CASE_ ) # expected last hidden states: B, C, H // 32, W // 32 self.parent.assertEqual( result.last_hidden_state.shape , (self.batch_size, self.hidden_sizes[-1], self.image_size // 32, self.image_size // 32) , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :Optional[Any] = self.num_labels UpperCamelCase :List[str] = TFResNetForImageClassification(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = model(SCREAMING_SNAKE_CASE_ , labels=SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_labels) ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = config_and_inputs UpperCamelCase :Dict = {'''pixel_values''': pixel_values} return config, inputs_dict @require_tf class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Optional[int] =(TFResNetModel, TFResNetForImageClassification) if is_tf_available() else () UpperCamelCase_ : Any =( {'feature-extraction': TFResNetModel, 'image-classification': TFResNetForImageClassification} if is_tf_available() else {} ) UpperCamelCase_ : int =False UpperCamelCase_ : str =False UpperCamelCase_ : Optional[Any] =False UpperCamelCase_ : List[Any] =False UpperCamelCase_ : int =False def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Dict = TFResNetModelTester(self ) UpperCamelCase :str = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[str]: self.create_and_test_config_common_properties() self.config_tester.create_and_test_config_to_json_string() self.config_tester.create_and_test_config_to_json_file() self.config_tester.create_and_test_config_from_and_save_pretrained() self.config_tester.create_and_test_config_with_num_labels() self.config_tester.check_config_can_be_init_without_params() self.config_tester.check_config_arguments_init() def UpperCAmelCase ( self ) -> Union[str, Any]: return @unittest.skip(reason='''ResNet does not use inputs_embeds''' ) def UpperCAmelCase ( self ) -> Tuple: pass @unittest.skip(reason='''ResNet does not support input and output embeddings''' ) def UpperCAmelCase ( self ) -> List[str]: pass def UpperCAmelCase ( self ) -> List[str]: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = inspect.signature(model.call ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :int = [*signature.parameters.keys()] UpperCamelCase :List[str] = ['''pixel_values'''] self.assertListEqual(arg_names[:1] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: def check_hidden_states_output(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ): UpperCamelCase :str = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Tuple = outputs.encoder_hidden_states if config.is_encoder_decoder else outputs.hidden_states UpperCamelCase :Optional[Any] = self.model_tester.num_stages self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , expected_num_stages + 1 ) # ResNet's feature maps are of shape (batch_size, num_channels, height, width) self.assertListEqual( list(hidden_states[0].shape[-2:] ) , [self.model_tester.image_size // 4, self.model_tester.image_size // 4] , ) UpperCamelCase , UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :List[str] = ['''basic''', '''bottleneck'''] for model_class in self.all_model_classes: for layer_type in layers_type: UpperCamelCase :int = layer_type UpperCamelCase :str = True check_hidden_states_output(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # check that output_hidden_states also work using config del inputs_dict["output_hidden_states"] UpperCamelCase :int = True check_hidden_states_output(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[str]: UpperCamelCase :Any = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_image_classification(*SCREAMING_SNAKE_CASE_ ) @slow def UpperCAmelCase ( self ) -> str: for model_name in TF_RESNET_PRETRAINED_MODEL_ARCHIVE_LIST[:1]: UpperCamelCase :Optional[Any] = TFResNetModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) def _A ( ): UpperCamelCase :int = Image.open('''./tests/fixtures/tests_samples/COCO/000000039769.png''' ) return image @require_tf @require_vision class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" @cached_property def UpperCAmelCase ( self ) -> Optional[int]: return ( AutoImageProcessor.from_pretrained(TF_RESNET_PRETRAINED_MODEL_ARCHIVE_LIST[0] ) if is_vision_available() else None ) @slow def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Union[str, Any] = TFResNetForImageClassification.from_pretrained(TF_RESNET_PRETRAINED_MODEL_ARCHIVE_LIST[0] ) UpperCamelCase :int = self.default_image_processor UpperCamelCase :int = prepare_img() UpperCamelCase :Optional[int] = image_processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''tf''' ) # forward pass UpperCamelCase :Union[str, Any] = model(**SCREAMING_SNAKE_CASE_ ) # verify the logits UpperCamelCase :Optional[int] = tf.TensorShape((1, 1000) ) self.assertEqual(outputs.logits.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = tf.constant([-11.1069, -9.7877, -8.3777] ) self.assertTrue(np.allclose(outputs.logits[0, :3].numpy() , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) )
259
from __future__ import annotations from typing import Any def _A ( SCREAMING_SNAKE_CASE__ : list[Any] ): create_state_space_tree(SCREAMING_SNAKE_CASE__ , [] , 0 ) def _A ( SCREAMING_SNAKE_CASE__ : list[Any] , SCREAMING_SNAKE_CASE__ : list[Any] , SCREAMING_SNAKE_CASE__ : int ): if index == len(SCREAMING_SNAKE_CASE__ ): print(SCREAMING_SNAKE_CASE__ ) return create_state_space_tree(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , index + 1 ) current_subsequence.append(sequence[index] ) create_state_space_tree(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , index + 1 ) current_subsequence.pop() if __name__ == "__main__": __snake_case = [3, 1, 2, 4] generate_all_subsequences(seq) seq.clear() seq.extend(["""A""", """B""", """C"""]) generate_all_subsequences(seq)
259
1
from pickle import UnpicklingError import jax import jax.numpy as jnp import numpy as np from flax.serialization import from_bytes from flax.traverse_util import flatten_dict from ..utils import logging __snake_case = logging.get_logger(__name__) def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : List[str] ): try: with open(SCREAMING_SNAKE_CASE__ , '''rb''' ) as flax_state_f: UpperCamelCase :str = from_bytes(SCREAMING_SNAKE_CASE__ , flax_state_f.read() ) except UnpicklingError as e: try: with open(SCREAMING_SNAKE_CASE__ ) as f: if f.read().startswith('''version''' ): raise OSError( '''You seem to have cloned a repository without having git-lfs installed. Please''' ''' install git-lfs and run `git lfs install` followed by `git lfs pull` in the''' ''' folder you cloned.''' ) else: raise ValueError from e except (UnicodeDecodeError, ValueError): raise EnvironmentError(F'''Unable to convert {model_file} to Flax deserializable object. ''' ) return load_flax_weights_in_pytorch_model(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : List[str] ): try: import torch # noqa: F401 except ImportError: logger.error( '''Loading Flax weights in PyTorch requires both PyTorch and Flax to be installed. Please see''' ''' https://pytorch.org/ and https://flax.readthedocs.io/en/latest/installation.html for installation''' ''' instructions.''' ) raise # check if we have bf16 weights UpperCamelCase :Union[str, Any] = flatten_dict(jax.tree_util.tree_map(lambda SCREAMING_SNAKE_CASE__ : x.dtype == jnp.bfloataa , SCREAMING_SNAKE_CASE__ ) ).values() if any(SCREAMING_SNAKE_CASE__ ): # convert all weights to fp32 if they are bf16 since torch.from_numpy can-not handle bf16 # and bf16 is not fully supported in PT yet. logger.warning( '''Found ``bfloat16`` weights in Flax model. Casting all ``bfloat16`` weights to ``float32`` ''' '''before loading those in PyTorch model.''' ) UpperCamelCase :Tuple = jax.tree_util.tree_map( lambda SCREAMING_SNAKE_CASE__ : params.astype(np.floataa ) if params.dtype == jnp.bfloataa else params , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = '''''' UpperCamelCase :List[str] = flatten_dict(SCREAMING_SNAKE_CASE__ , sep='''.''' ) UpperCamelCase :Union[str, Any] = pt_model.state_dict() # keep track of unexpected & missing keys UpperCamelCase :Any = [] UpperCamelCase :int = set(pt_model_dict.keys() ) for flax_key_tuple, flax_tensor in flax_state_dict.items(): UpperCamelCase :List[Any] = flax_key_tuple.split('''.''' ) if flax_key_tuple_array[-1] == "kernel" and flax_tensor.ndim == 4: UpperCamelCase :Optional[Any] = flax_key_tuple_array[:-1] + ['''weight'''] UpperCamelCase :Any = jnp.transpose(SCREAMING_SNAKE_CASE__ , (3, 2, 0, 1) ) elif flax_key_tuple_array[-1] == "kernel": UpperCamelCase :Dict = flax_key_tuple_array[:-1] + ['''weight'''] UpperCamelCase :str = flax_tensor.T elif flax_key_tuple_array[-1] == "scale": UpperCamelCase :Optional[Any] = flax_key_tuple_array[:-1] + ['''weight'''] if "time_embedding" not in flax_key_tuple_array: for i, flax_key_tuple_string in enumerate(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[Any] = ( flax_key_tuple_string.replace('''_0''' , '''.0''' ) .replace('''_1''' , '''.1''' ) .replace('''_2''' , '''.2''' ) .replace('''_3''' , '''.3''' ) .replace('''_4''' , '''.4''' ) .replace('''_5''' , '''.5''' ) .replace('''_6''' , '''.6''' ) .replace('''_7''' , '''.7''' ) .replace('''_8''' , '''.8''' ) .replace('''_9''' , '''.9''' ) ) UpperCamelCase :Any = '''.'''.join(SCREAMING_SNAKE_CASE__ ) if flax_key in pt_model_dict: if flax_tensor.shape != pt_model_dict[flax_key].shape: raise ValueError( F'''Flax checkpoint seems to be incorrect. Weight {flax_key_tuple} was expected ''' F'''to be of shape {pt_model_dict[flax_key].shape}, but is {flax_tensor.shape}.''' ) else: # add weight to pytorch dict UpperCamelCase :Dict = np.asarray(SCREAMING_SNAKE_CASE__ ) if not isinstance(SCREAMING_SNAKE_CASE__ , np.ndarray ) else flax_tensor UpperCamelCase :Dict = torch.from_numpy(SCREAMING_SNAKE_CASE__ ) # remove from missing keys missing_keys.remove(SCREAMING_SNAKE_CASE__ ) else: # weight is not expected by PyTorch model unexpected_keys.append(SCREAMING_SNAKE_CASE__ ) pt_model.load_state_dict(SCREAMING_SNAKE_CASE__ ) # re-transform missing_keys to list UpperCamelCase :List[Any] = list(SCREAMING_SNAKE_CASE__ ) if len(SCREAMING_SNAKE_CASE__ ) > 0: logger.warning( '''Some weights of the Flax model were not used when initializing the PyTorch model''' F''' {pt_model.__class__.__name__}: {unexpected_keys}\n- This IS expected if you are initializing''' F''' {pt_model.__class__.__name__} from a Flax model trained on another task or with another architecture''' ''' (e.g. initializing a BertForSequenceClassification model from a FlaxBertForPreTraining model).\n- This''' F''' IS NOT expected if you are initializing {pt_model.__class__.__name__} from a Flax model that you expect''' ''' to be exactly identical (e.g. initializing a BertForSequenceClassification model from a''' ''' FlaxBertForSequenceClassification model).''' ) if len(SCREAMING_SNAKE_CASE__ ) > 0: logger.warning( F'''Some weights of {pt_model.__class__.__name__} were not initialized from the Flax model and are newly''' F''' initialized: {missing_keys}\nYou should probably TRAIN this model on a down-stream task to be able to''' ''' use it for predictions and inference.''' ) return pt_model
259
from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, get_resize_output_image_size, normalize, rescale, resize, to_channel_dimension_format, ) from ...image_utils import ( IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, ChannelDimension, ImageInput, PILImageResampling, is_batched, to_numpy_array, valid_images, ) from ...utils import TensorType, logging __snake_case = logging.get_logger(__name__) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[Any] =['pixel_values'] def __init__( self , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = PILImageResampling.BICUBIC , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = 1 / 255 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> None: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = size if size is not None else {'''height''': 224, '''width''': 224} UpperCamelCase :Optional[Any] = get_size_dict(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = crop_size if crop_size is not None else {'''height''': 224, '''width''': 224} UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ , default_to_square=SCREAMING_SNAKE_CASE_ , param_name='''crop_size''' ) UpperCamelCase :Optional[int] = do_resize UpperCamelCase :int = do_rescale UpperCamelCase :Tuple = do_normalize UpperCamelCase :str = do_center_crop UpperCamelCase :int = crop_size UpperCamelCase :Tuple = size UpperCamelCase :List[str] = resample UpperCamelCase :Tuple = rescale_factor UpperCamelCase :Optional[Any] = image_mean if image_mean is not None else IMAGENET_DEFAULT_MEAN UpperCamelCase :Optional[int] = image_std if image_std is not None else IMAGENET_DEFAULT_STD def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = PILImageResampling.BILINEAR , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ ) if "shortest_edge" in size: UpperCamelCase :str = get_resize_output_image_size(SCREAMING_SNAKE_CASE_ , size=size['''shortest_edge'''] , default_to_square=SCREAMING_SNAKE_CASE_ ) # size = get_resize_output_image_size(image, size["shortest_edge"], size["longest_edge"]) elif "height" in size and "width" in size: UpperCamelCase :Optional[int] = (size['''height'''], size['''width''']) else: raise ValueError(F'''Size must contain \'height\' and \'width\' keys or \'shortest_edge\' key. Got {size.keys()}''' ) return resize(SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ , resample=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: UpperCamelCase :Union[str, Any] = get_size_dict(SCREAMING_SNAKE_CASE_ ) if "height" not in size or "width" not in size: raise ValueError(F'''The `size` parameter must contain the keys (height, width). Got {size.keys()}''' ) return center_crop(SCREAMING_SNAKE_CASE_ , size=(size['''height'''], size['''width''']) , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ ) -> np.ndarray: return rescale(SCREAMING_SNAKE_CASE_ , scale=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: return normalize(SCREAMING_SNAKE_CASE_ , mean=SCREAMING_SNAKE_CASE_ , std=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = ChannelDimension.FIRST , **SCREAMING_SNAKE_CASE_ , ) -> BatchFeature: UpperCamelCase :Union[str, Any] = do_resize if do_resize is not None else self.do_resize UpperCamelCase :Optional[int] = do_rescale if do_rescale is not None else self.do_rescale UpperCamelCase :Optional[Any] = do_normalize if do_normalize is not None else self.do_normalize UpperCamelCase :Union[str, Any] = do_center_crop if do_center_crop is not None else self.do_center_crop UpperCamelCase :Optional[int] = crop_size if crop_size is not None else self.crop_size UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ , param_name='''crop_size''' , default_to_square=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = resample if resample is not None else self.resample UpperCamelCase :List[str] = rescale_factor if rescale_factor is not None else self.rescale_factor UpperCamelCase :Optional[Any] = image_mean if image_mean is not None else self.image_mean UpperCamelCase :Dict = image_std if image_std is not None else self.image_std UpperCamelCase :Dict = size if size is not None else self.size UpperCamelCase :Optional[int] = get_size_dict(SCREAMING_SNAKE_CASE_ ) if not is_batched(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :str = [images] if not valid_images(SCREAMING_SNAKE_CASE_ ): raise ValueError( '''Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, ''' '''torch.Tensor, tf.Tensor or jax.ndarray.''' ) if do_resize and size is None: raise ValueError('''Size must be specified if do_resize is True.''' ) if do_center_crop and crop_size is None: raise ValueError('''Crop size must be specified if do_center_crop is True.''' ) if do_rescale and rescale_factor is None: raise ValueError('''Rescale factor must be specified if do_rescale is True.''' ) # All transformations expect numpy arrays. UpperCamelCase :Tuple = [to_numpy_array(SCREAMING_SNAKE_CASE_ ) for image in images] if do_resize: UpperCamelCase :List[Any] = [self.resize(image=SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ , resample=SCREAMING_SNAKE_CASE_ ) for image in images] if do_center_crop: UpperCamelCase :Tuple = [self.center_crop(image=SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ ) for image in images] if do_rescale: UpperCamelCase :Union[str, Any] = [self.rescale(image=SCREAMING_SNAKE_CASE_ , scale=SCREAMING_SNAKE_CASE_ ) for image in images] if do_normalize: UpperCamelCase :Union[str, Any] = [self.normalize(image=SCREAMING_SNAKE_CASE_ , mean=SCREAMING_SNAKE_CASE_ , std=SCREAMING_SNAKE_CASE_ ) for image in images] UpperCamelCase :List[str] = [to_channel_dimension_format(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) for image in images] UpperCamelCase :int = {'''pixel_values''': images} return BatchFeature(data=SCREAMING_SNAKE_CASE_ , tensor_type=SCREAMING_SNAKE_CASE_ )
259
1
import time from contextlib import contextmanager from pathlib import Path import pytest import requests from huggingface_hub.hf_api import HfApi, HfFolder __snake_case = """__DUMMY_TRANSFORMERS_USER__""" __snake_case = """Dummy User""" __snake_case = """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""" __snake_case = """https://hub-ci.huggingface.co""" __snake_case = CI_HUB_ENDPOINT + """/datasets/{repo_id}/resolve/{revision}/{path}""" __snake_case = CI_HUB_ENDPOINT + """/{repo_id}/resolve/{revision}/{filename}""" __snake_case = Path("""~/.huggingface/hub_ci_token""").expanduser() @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): monkeypatch.setattr( '''huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any ): monkeypatch.setattr('''datasets.config.HF_ENDPOINT''' , SCREAMING_SNAKE_CASE__ ) monkeypatch.setattr('''datasets.config.HUB_DATASETS_URL''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): monkeypatch.setattr('''huggingface_hub.hf_api.HfFolder.path_token''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : List[Any] ): HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield HfFolder.delete_token() @pytest.fixture(scope='''session''' ) def _A ( ): return HfApi(endpoint=SCREAMING_SNAKE_CASE__ ) @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi ): UpperCamelCase :Tuple = HfFolder.get_token() HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield CI_HUB_USER_TOKEN if previous_token is not None: HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Dict ): def _cleanup_repo(SCREAMING_SNAKE_CASE__ : Tuple ): hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) return _cleanup_repo @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): @contextmanager def _temporary_repo(SCREAMING_SNAKE_CASE__ : Any ): try: yield repo_id finally: cleanup_repo(SCREAMING_SNAKE_CASE__ ) return _temporary_repo @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :Union[str, Any] = F'''repo_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :int = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data/text_data.txt''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Dict ): return hf_private_dataset_repo_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Optional[int] = F'''repo_zipped_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Any = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str] ): return hf_private_dataset_repo_zipped_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Dict = F'''repo_zipped_img_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Dict = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple ): return hf_private_dataset_repo_zipped_img_data_
259
import os import sys import tempfile import torch from .state import AcceleratorState from .utils import PrecisionType, PrepareForLaunch, is_mps_available, patch_environment def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str]=() , SCREAMING_SNAKE_CASE__ : List[Any]=None , SCREAMING_SNAKE_CASE__ : List[Any]="no" , SCREAMING_SNAKE_CASE__ : Dict="29500" ): UpperCamelCase :List[Any] = False UpperCamelCase :Tuple = False if any(key.startswith('''KAGGLE''' ) for key in os.environ.keys() ): UpperCamelCase :Dict = True elif "IPython" in sys.modules: UpperCamelCase :int = '''google.colab''' in str(sys.modules['''IPython'''].get_ipython() ) try: UpperCamelCase :Any = PrecisionType(mixed_precision.lower() ) except ValueError: raise ValueError( F'''Unknown mixed_precision mode: {args.mixed_precision.lower()}. Choose between {PrecisionType.list()}.''' ) if (in_colab or in_kaggle) and (os.environ.get('''TPU_NAME''' , SCREAMING_SNAKE_CASE__ ) is not None): # TPU launch import torch_xla.distributed.xla_multiprocessing as xmp if len(AcceleratorState._shared_state ) > 0: raise ValueError( '''To train on TPU in Colab or Kaggle Kernel, the `Accelerator` should only be initialized inside ''' '''your training function. Restart your notebook and make sure no cells initializes an ''' '''`Accelerator`.''' ) if num_processes is None: UpperCamelCase :Tuple = 8 UpperCamelCase :Optional[int] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , distributed_type='''TPU''' ) print(F'''Launching a training on {num_processes} TPU cores.''' ) xmp.spawn(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' ) elif in_colab: # No need for a distributed launch otherwise as it's either CPU or one GPU. if torch.cuda.is_available(): print('''Launching training on one GPU.''' ) else: print('''Launching training on one CPU.''' ) function(*SCREAMING_SNAKE_CASE__ ) else: if num_processes is None: raise ValueError( '''You have to specify the number of GPUs you would like to use, add `num_processes=...` to your call.''' ) if num_processes > 1: # Multi-GPU launch from torch.multiprocessing import start_processes from torch.multiprocessing.spawn import ProcessRaisedException if len(AcceleratorState._shared_state ) > 0: raise ValueError( '''To launch a multi-GPU training from your notebook, the `Accelerator` should only be initialized ''' '''inside your training function. Restart your notebook and make sure no cells initializes an ''' '''`Accelerator`.''' ) if torch.cuda.is_initialized(): raise ValueError( '''To launch a multi-GPU training from your notebook, you need to avoid running any instruction ''' '''using `torch.cuda` in any cell. Restart your notebook and make sure no cells use any CUDA ''' '''function.''' ) # torch.distributed will expect a few environment variable to be here. We set the ones common to each # process here (the other ones will be set be the launcher). with patch_environment( world_size=SCREAMING_SNAKE_CASE__ , master_addr='''127.0.01''' , master_port=SCREAMING_SNAKE_CASE__ , mixed_precision=SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , distributed_type='''MULTI_GPU''' ) print(F'''Launching training on {num_processes} GPUs.''' ) try: start_processes(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' ) except ProcessRaisedException as e: if "Cannot re-initialize CUDA in forked subprocess" in e.args[0]: raise RuntimeError( '''CUDA has been initialized before the `notebook_launcher` could create a forked subprocess. ''' '''This likely stems from an outside import causing issues once the `notebook_launcher()` is called. ''' '''Please review your imports and test them when running the `notebook_launcher()` to identify ''' '''which one is problematic.''' ) from e else: # No need for a distributed launch otherwise as it's either CPU, GPU or MPS. if is_mps_available(): UpperCamelCase :Any = '''1''' print('''Launching training on MPS.''' ) elif torch.cuda.is_available(): print('''Launching training on one GPU.''' ) else: print('''Launching training on CPU.''' ) function(*SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Tuple=() , SCREAMING_SNAKE_CASE__ : int=2 ): from torch.multiprocessing import start_processes with tempfile.NamedTemporaryFile() as tmp_file: # torch.distributed will expect a few environment variable to be here. We set the ones common to each # process here (the other ones will be set be the launcher). with patch_environment( world_size=SCREAMING_SNAKE_CASE__ , master_addr='''127.0.01''' , master_port='''29500''' , accelerate_mixed_precision='''no''' , accelerate_debug_rdv_file=tmp_file.name , accelerate_use_cpu='''yes''' , ): UpperCamelCase :Optional[int] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , debug=SCREAMING_SNAKE_CASE__ ) start_processes(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' )
259
1
import json import os import shutil import tempfile import unittest import numpy as np import pytest from transformers import CLIPTokenizer, CLIPTokenizerFast from transformers.models.clip.tokenization_clip import VOCAB_FILES_NAMES from transformers.testing_utils import require_vision from transformers.utils import IMAGE_PROCESSOR_NAME, is_vision_available if is_vision_available(): from PIL import Image from transformers import CLIPSegProcessor, ViTImageProcessor @require_vision class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = tempfile.mkdtemp() # fmt: off UpperCamelCase :Tuple = ['''l''', '''o''', '''w''', '''e''', '''r''', '''s''', '''t''', '''i''', '''d''', '''n''', '''lo''', '''l</w>''', '''w</w>''', '''r</w>''', '''t</w>''', '''low</w>''', '''er</w>''', '''lowest</w>''', '''newer</w>''', '''wider''', '''<unk>''', '''<|startoftext|>''', '''<|endoftext|>'''] # fmt: on UpperCamelCase :Optional[Any] = dict(zip(SCREAMING_SNAKE_CASE_ , range(len(SCREAMING_SNAKE_CASE_ ) ) ) ) UpperCamelCase :Dict = ['''#version: 0.2''', '''l o''', '''lo w</w>''', '''e r</w>''', ''''''] UpperCamelCase :Union[str, Any] = {'''unk_token''': '''<unk>'''} UpperCamelCase :int = os.path.join(self.tmpdirname , VOCAB_FILES_NAMES['''vocab_file'''] ) UpperCamelCase :int = os.path.join(self.tmpdirname , VOCAB_FILES_NAMES['''merges_file'''] ) with open(self.vocab_file , '''w''' , encoding='''utf-8''' ) as fp: fp.write(json.dumps(SCREAMING_SNAKE_CASE_ ) + '''\n''' ) with open(self.merges_file , '''w''' , encoding='''utf-8''' ) as fp: fp.write('''\n'''.join(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = { '''do_resize''': True, '''size''': 20, '''do_center_crop''': True, '''crop_size''': 18, '''do_normalize''': True, '''image_mean''': [0.4814_5466, 0.457_8275, 0.4082_1073], '''image_std''': [0.2686_2954, 0.2613_0258, 0.2757_7711], } UpperCamelCase :Dict = os.path.join(self.tmpdirname , SCREAMING_SNAKE_CASE_ ) with open(self.image_processor_file , '''w''' , encoding='''utf-8''' ) as fp: json.dump(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> Tuple: return CLIPTokenizer.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: return CLIPTokenizerFast.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> int: return ViTImageProcessor.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[int]: shutil.rmtree(self.tmpdirname ) def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Tuple = [np.random.randint(255 , size=(3, 30, 400) , dtype=np.uinta )] UpperCamelCase :str = [Image.fromarray(np.moveaxis(SCREAMING_SNAKE_CASE_ , 0 , -1 ) ) for x in image_inputs] return image_inputs def UpperCAmelCase ( self ) -> List[str]: UpperCamelCase :List[str] = self.get_tokenizer() UpperCamelCase :List[Any] = self.get_rust_tokenizer() UpperCamelCase :Tuple = self.get_image_processor() UpperCamelCase :Tuple = CLIPSegProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor_slow.save_pretrained(self.tmpdirname ) UpperCamelCase :List[Any] = CLIPSegProcessor.from_pretrained(self.tmpdirname , use_fast=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = CLIPSegProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor_fast.save_pretrained(self.tmpdirname ) UpperCamelCase :str = CLIPSegProcessor.from_pretrained(self.tmpdirname ) self.assertEqual(processor_slow.tokenizer.get_vocab() , tokenizer_slow.get_vocab() ) self.assertEqual(processor_fast.tokenizer.get_vocab() , tokenizer_fast.get_vocab() ) self.assertEqual(tokenizer_slow.get_vocab() , tokenizer_fast.get_vocab() ) self.assertIsInstance(processor_slow.tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(processor_fast.tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor_slow.image_processor.to_json_string() , image_processor.to_json_string() ) self.assertEqual(processor_fast.image_processor.to_json_string() , image_processor.to_json_string() ) self.assertIsInstance(processor_slow.image_processor , SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(processor_fast.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[str]: UpperCamelCase :List[Any] = CLIPSegProcessor(tokenizer=self.get_tokenizer() , image_processor=self.get_image_processor() ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :List[Any] = self.get_tokenizer(bos_token='''(BOS)''' , eos_token='''(EOS)''' ) UpperCamelCase :Tuple = self.get_image_processor(do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) UpperCamelCase :Union[str, Any] = CLIPSegProcessor.from_pretrained( self.tmpdirname , bos_token='''(BOS)''' , eos_token='''(EOS)''' , do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) self.assertEqual(processor.tokenizer.get_vocab() , tokenizer_add_kwargs.get_vocab() ) self.assertIsInstance(processor.tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor_add_kwargs.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[str]: UpperCamelCase :Any = self.get_image_processor() UpperCamelCase :List[str] = self.get_tokenizer() UpperCamelCase :Tuple = CLIPSegProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = self.prepare_image_inputs() UpperCamelCase :int = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) UpperCamelCase :Tuple = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) for key in input_feat_extract.keys(): self.assertAlmostEqual(input_feat_extract[key].sum() , input_processor[key].sum() , delta=1e-2 ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :List[Any] = self.get_image_processor() UpperCamelCase :List[Any] = self.get_tokenizer() UpperCamelCase :List[str] = CLIPSegProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = '''lower newer''' UpperCamelCase :Union[str, Any] = processor(text=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = tokenizer(SCREAMING_SNAKE_CASE_ ) for key in encoded_tok.keys(): self.assertListEqual(encoded_tok[key] , encoded_processor[key] ) def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Optional[int] = self.get_image_processor() UpperCamelCase :Optional[int] = self.get_tokenizer() UpperCamelCase :Dict = CLIPSegProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = '''lower newer''' UpperCamelCase :int = self.prepare_image_inputs() UpperCamelCase :Optional[int] = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , ['''input_ids''', '''attention_mask''', '''pixel_values'''] ) # test if it raises when no input is passed with pytest.raises(SCREAMING_SNAKE_CASE_ ): processor() def UpperCAmelCase ( self ) -> str: UpperCamelCase :Tuple = self.get_image_processor() UpperCamelCase :Optional[Any] = self.get_tokenizer() UpperCamelCase :List[str] = CLIPSegProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = self.prepare_image_inputs() UpperCamelCase :Optional[Any] = self.prepare_image_inputs() UpperCamelCase :Optional[Any] = processor(images=SCREAMING_SNAKE_CASE_ , visual_prompt=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , ['''pixel_values''', '''conditional_pixel_values'''] ) # test if it raises when no input is passed with pytest.raises(SCREAMING_SNAKE_CASE_ ): processor() def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.get_image_processor() UpperCamelCase :Union[str, Any] = self.get_tokenizer() UpperCamelCase :int = CLIPSegProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = [[1, 4, 5, 8, 1, 0, 8], [3, 4, 3, 1, 1, 8, 9]] UpperCamelCase :Optional[Any] = processor.batch_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = tokenizer.batch_decode(SCREAMING_SNAKE_CASE_ ) self.assertListEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ )
259
import sys def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Any = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] UpperCamelCase :List[Any] = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] for chain_length in range(2 , SCREAMING_SNAKE_CASE__ ): for a in range(1 , n - chain_length + 1 ): UpperCamelCase :Optional[Any] = a + chain_length - 1 UpperCamelCase :int = sys.maxsize for c in range(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Any = ( matrix[a][c] + matrix[c + 1][b] + array[a - 1] * array[c] * array[b] ) if cost < matrix[a][b]: UpperCamelCase :int = cost UpperCamelCase :List[str] = c return matrix, sol def _A ( SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] ): if i == j: print('''A''' + str(SCREAMING_SNAKE_CASE__ ) , end=''' ''' ) else: print('''(''' , end=''' ''' ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , optimal_solution[i][j] ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , optimal_solution[i][j] + 1 , SCREAMING_SNAKE_CASE__ ) print(''')''' , end=''' ''' ) def _A ( ): UpperCamelCase :Optional[int] = [30, 35, 15, 5, 10, 20, 25] UpperCamelCase :Optional[Any] = len(SCREAMING_SNAKE_CASE__ ) # Size of matrix created from above array will be # 30*35 35*15 15*5 5*10 10*20 20*25 UpperCamelCase , UpperCamelCase :Dict = matrix_chain_order(SCREAMING_SNAKE_CASE__ ) print('''No. of Operation required: ''' + str(matrix[1][n - 1] ) ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , 1 , n - 1 ) if __name__ == "__main__": main()
259
1
from __future__ import annotations import random import unittest from transformers import TransfoXLConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_tf_available(): import tensorflow as tf from transformers import ( TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST, TFTransfoXLForSequenceClassification, TFTransfoXLLMHeadModel, TFTransfoXLModel, ) class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , ) -> Optional[Any]: UpperCamelCase :Optional[Any] = parent UpperCamelCase :Tuple = 13 UpperCamelCase :List[Any] = 7 UpperCamelCase :Tuple = 30 UpperCamelCase :Union[str, Any] = self.seq_length + self.mem_len UpperCamelCase :Optional[int] = 15 UpperCamelCase :Union[str, Any] = True UpperCamelCase :List[Any] = True UpperCamelCase :List[Any] = 99 UpperCamelCase :List[Any] = [10, 50, 80] UpperCamelCase :Union[str, Any] = 32 UpperCamelCase :Optional[Any] = 32 UpperCamelCase :Optional[int] = 4 UpperCamelCase :int = 8 UpperCamelCase :str = 128 UpperCamelCase :Optional[int] = 2 UpperCamelCase :Union[str, Any] = 2 UpperCamelCase :Optional[int] = None UpperCamelCase :int = 1 UpperCamelCase :Optional[int] = 0 UpperCamelCase :Any = 3 UpperCamelCase :Dict = self.vocab_size - 1 UpperCamelCase :str = 0.01 def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Optional[Any] = ids_tensor([self.batch_size, self.seq_length] , self.vocab_size ) UpperCamelCase :List[Any] = ids_tensor([self.batch_size, self.seq_length] , self.vocab_size ) UpperCamelCase :str = None if self.use_labels: UpperCamelCase :Optional[Any] = ids_tensor([self.batch_size, self.seq_length] , self.vocab_size ) UpperCamelCase :List[Any] = TransfoXLConfig( vocab_size=self.vocab_size , mem_len=self.mem_len , clamp_len=self.clamp_len , cutoffs=self.cutoffs , d_model=self.hidden_size , d_embed=self.d_embed , n_head=self.num_attention_heads , d_head=self.d_head , d_inner=self.d_inner , div_val=self.div_val , n_layer=self.num_hidden_layers , eos_token_id=self.eos_token_id , pad_token_id=self.vocab_size - 1 , init_range=self.init_range , num_labels=self.num_labels , ) return (config, input_ids_a, input_ids_a, lm_labels) def UpperCAmelCase ( self ) -> Union[str, Any]: random.seed(self.seed ) tf.random.set_seed(self.seed ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :Optional[int] = TFTransfoXLModel(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Optional[int] = model(SCREAMING_SNAKE_CASE_ ).to_tuple() UpperCamelCase :Optional[Any] = {'''input_ids''': input_ids_a, '''mems''': mems_a} UpperCamelCase , UpperCamelCase :int = model(SCREAMING_SNAKE_CASE_ ).to_tuple() self.parent.assertEqual(hidden_states_a.shape , (self.batch_size, self.seq_length, self.hidden_size) ) self.parent.assertEqual(hidden_states_a.shape , (self.batch_size, self.seq_length, self.hidden_size) ) self.parent.assertListEqual( [mem.shape for mem in mems_a] , [(self.mem_len, self.batch_size, self.hidden_size)] * self.num_hidden_layers , ) self.parent.assertListEqual( [mem.shape for mem in mems_a] , [(self.mem_len, self.batch_size, self.hidden_size)] * self.num_hidden_layers , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Dict: UpperCamelCase :List[str] = TFTransfoXLLMHeadModel(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :List[str] = model(SCREAMING_SNAKE_CASE_ ).to_tuple() UpperCamelCase :Dict = {'''input_ids''': input_ids_a, '''labels''': lm_labels} UpperCamelCase , UpperCamelCase :List[str] = model(SCREAMING_SNAKE_CASE_ ).to_tuple() UpperCamelCase , UpperCamelCase :Tuple = model([input_ids_a, mems_a] ).to_tuple() UpperCamelCase :Dict = {'''input_ids''': input_ids_a, '''mems''': mems_a, '''labels''': lm_labels} UpperCamelCase , UpperCamelCase :Union[str, Any] = model(SCREAMING_SNAKE_CASE_ ).to_tuple() self.parent.assertEqual(lm_logits_a.shape , (self.batch_size, self.seq_length, self.vocab_size) ) self.parent.assertListEqual( [mem.shape for mem in mems_a] , [(self.mem_len, self.batch_size, self.hidden_size)] * self.num_hidden_layers , ) self.parent.assertEqual(lm_logits_a.shape , (self.batch_size, self.seq_length, self.vocab_size) ) self.parent.assertListEqual( [mem.shape for mem in mems_a] , [(self.mem_len, self.batch_size, self.hidden_size)] * self.num_hidden_layers , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :Dict = TFTransfoXLForSequenceClassification(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_labels) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Union[str, Any] = self.prepare_config_and_inputs() ((UpperCamelCase) , (UpperCamelCase) , (UpperCamelCase) , (UpperCamelCase)) :int = config_and_inputs UpperCamelCase :Dict = {'''input_ids''': input_ids_a} return config, inputs_dict @require_tf class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[str] =( (TFTransfoXLModel, TFTransfoXLLMHeadModel, TFTransfoXLForSequenceClassification) if is_tf_available() else () ) UpperCamelCase_ : List[str] =() if is_tf_available() else () UpperCamelCase_ : Tuple =( { 'feature-extraction': TFTransfoXLModel, 'text-classification': TFTransfoXLForSequenceClassification, 'text-generation': TFTransfoXLLMHeadModel, 'zero-shot': TFTransfoXLForSequenceClassification, } if is_tf_available() else {} ) # TODO: add this test when TFTransfoXLLMHead has a linear output layer implemented UpperCamelCase_ : Any =False UpperCamelCase_ : Tuple =False UpperCamelCase_ : Tuple =False UpperCamelCase_ : Union[str, Any] =False def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: if pipeline_test_casse_name == "TextGenerationPipelineTests": # Get `ValueError: AttributeError: 'NoneType' object has no attribute 'new_ones'` or `AssertionError`. # `TransfoXLConfig` was never used in pipeline tests: cannot create a simple # tokenizer. return True return False def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Union[str, Any] = TFTransfoXLModelTester(self ) UpperCamelCase :Optional[Any] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , d_embed=37 ) def UpperCAmelCase ( self ) -> Dict: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> List[str]: self.model_tester.set_seed() UpperCamelCase :Any = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_transfo_xl_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[str]: self.model_tester.set_seed() UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_transfo_xl_lm_head(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> int: UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_transfo_xl_for_sequence_classification(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :List[Any] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :int = [TFTransfoXLForSequenceClassification] for model_class in self.all_model_classes: UpperCamelCase :List[str] = model_class(SCREAMING_SNAKE_CASE_ ) assert isinstance(model.get_input_embeddings() , tf.keras.layers.Layer ) if model_class in list_other_models_with_output_ebd: UpperCamelCase :Union[str, Any] = model.get_output_embeddings() assert isinstance(SCREAMING_SNAKE_CASE_ , tf.keras.layers.Layer ) UpperCamelCase :Union[str, Any] = model.get_bias() assert name is None else: UpperCamelCase :Tuple = model.get_output_embeddings() assert x is None UpperCamelCase :str = model.get_bias() assert name is None def UpperCAmelCase ( self ) -> Optional[int]: # TODO JP: Make TransfoXL XLA compliant pass @slow def UpperCAmelCase ( self ) -> List[Any]: for model_name in TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST[:1]: UpperCamelCase :int = TFTransfoXLModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) @unittest.skip(reason='''This model doesn\'t play well with fit() due to not returning a single loss.''' ) def UpperCAmelCase ( self ) -> List[Any]: pass @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" @unittest.skip('''Skip test until #12651 is resolved.''' ) @slow def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Any = TFTransfoXLLMHeadModel.from_pretrained('''transfo-xl-wt103''' ) # fmt: off UpperCamelCase :int = tf.convert_to_tensor([[33,1297,2,1,1009,4,1109,1_1739,4762,358,5,25,245,22,1706,17,2_0098,5,3215,21,37,1110,3,13,1041,4,24,603,490,2,7_1477,2_0098,10_4447,2,2_0961,1,2604,4,1,329,3,6224,831,1_6002,2,8,603,7_8967,2_9546,23,803,20,25,416,5,8,232,4,277,6,1855,4601,3,2_9546,54,8,3609,5,5_7211,49,4,1,277,18,8,1755,1_5691,3,341,25,416,693,4_2573,71,17,401,94,31,1_7919,2,2_9546,7873,18,1,435,23,1_1011,755,5,5167,3,7983,98,84,2,2_9546,3267,8,3609,4,1,4865,1075,2,6087,71,6,346,8,5854,3,2_9546,824,1400,1868,2,19,160,2,311,8,5496,2,2_0920,17,25,1_5097,3,24,24,0]] , dtype=tf.intaa ) # noqa: E231 # fmt: on # In 1991 , the remains of Russian Tsar Nicholas II and his family # ( except for Alexei and Maria ) are discovered . # The voice of Nicholas's young son , Tsarevich Alexei Nikolaevich , narrates the # remainder of the story . 1883 Western Siberia , # a young Grigori Rasputin is asked by his father and a group of men to perform magic . # Rasputin has a vision and denounces one of the men as a horse thief . Although his # father initially slaps him for making such an accusation , Rasputin watches as the # man is chased outside and beaten . Twenty years later , Rasputin sees a vision of # the Virgin Mary , prompting him to become a priest . Rasputin quickly becomes famous , # with people , even a bishop , begging for his blessing . <eod> </s> <eos> # fmt: off UpperCamelCase :str = [33,1297,2,1,1009,4,1109,1_1739,4762,358,5,25,245,22,1706,17,2_0098,5,3215,21,37,1110,3,13,1041,4,24,603,490,2,7_1477,2_0098,10_4447,2,2_0961,1,2604,4,1,329,3,6224,831,1_6002,2,8,603,7_8967,2_9546,23,803,20,25,416,5,8,232,4,277,6,1855,4601,3,2_9546,54,8,3609,5,5_7211,49,4,1,277,18,8,1755,1_5691,3,341,25,416,693,4_2573,71,17,401,94,31,1_7919,2,2_9546,7873,18,1,435,23,1_1011,755,5,5167,3,7983,98,84,2,2_9546,3267,8,3609,4,1,4865,1075,2,6087,71,6,346,8,5854,3,2_9546,824,1400,1868,2,19,160,2,311,8,5496,2,2_0920,17,25,1_5097,3,24,24,0,33,1,1857,2,1,1009,4,1109,1_1739,4762,358,5,25,245,28,1110,3,13,1041,4,24,603,490,2,7_1477,2_0098,10_4447,2,2_0961,1,2604,4,1,329,3,0] # noqa: E231 # fmt: on # In 1991, the remains of Russian Tsar Nicholas II and his family ( # except for Alexei and Maria ) are discovered. The voice of young son, # Tsarevich Alexei Nikolaevich, narrates the remainder of the story. # 1883 Western Siberia, a young Grigori Rasputin is asked by his father # and a group of men to perform magic. Rasputin has a vision and # denounces one of the men as a horse thief. Although his father initially # slaps him for making such an accusation, Rasputin watches as the man # is chased outside and beaten. Twenty years later, Rasputin sees a vision # of the Virgin Mary, prompting him to become a priest. # Rasputin quickly becomes famous, with people, even a bishop, begging for # his blessing. <unk> <unk> <eos> In the 1990s, the remains of Russian Tsar # Nicholas II and his family were discovered. The voice of <unk> young son, # Tsarevich Alexei Nikolaevich, narrates the remainder of the story.<eos> UpperCamelCase :int = model.generate(SCREAMING_SNAKE_CASE_ , max_length=200 , do_sample=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(output_ids[0].numpy().tolist() , SCREAMING_SNAKE_CASE_ )
259
import argparse import json import os from pathlib import Path import requests import torch from transformers import JukeboxConfig, JukeboxModel from transformers.utils import logging logging.set_verbosity_info() __snake_case = logging.get_logger(__name__) __snake_case = """https://openaipublic.azureedge.net/jukebox/models/""" __snake_case = { """jukebox-1b-lyrics""": [ """5b/vqvae.pth.tar""", """5b/prior_level_0.pth.tar""", """5b/prior_level_1.pth.tar""", """1b_lyrics/prior_level_2.pth.tar""", ], """jukebox-5b-lyrics""": [ """5b/vqvae.pth.tar""", """5b/prior_level_0.pth.tar""", """5b/prior_level_1.pth.tar""", """5b_lyrics/prior_level_2.pth.tar""", ], } def _A ( SCREAMING_SNAKE_CASE__ : List[Any] ): if key.endswith('''.model.1.bias''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :int = key.replace('''.model.1.bias''' , '''.conv1d_1.bias''' ) elif key.endswith('''.model.1.weight''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Union[str, Any] = key.replace('''.model.1.weight''' , '''.conv1d_1.weight''' ) elif key.endswith('''.model.3.bias''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Optional[Any] = key.replace('''.model.3.bias''' , '''.conv1d_2.bias''' ) elif key.endswith('''.model.3.weight''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Optional[int] = key.replace('''.model.3.weight''' , '''.conv1d_2.weight''' ) if "conditioner_blocks.0." in key: UpperCamelCase :Any = key.replace('''conditioner_blocks.0''' , '''conditioner_blocks''' ) if "prime_prior" in key: UpperCamelCase :int = key.replace('''prime_prior''' , '''encoder''' ) if ".emb." in key and "total" not in key and "absolute" not in key and "relative" not in key: UpperCamelCase :Any = key.replace('''.emb.''' , '''.''' ) if key.endswith('''k''' ): # replace vqvae.X.k with vqvae.X.codebook return key.replace('''.k''' , '''.codebook''' ) if "y_emb." in key: return key.replace('''y_emb.''' , '''metadata_embedding.''' ) if "x_emb.emb." in key: UpperCamelCase :str = key.replace('''0.x_emb.emb''' , '''embed_tokens''' ) if "prime_state_ln" in key: return key.replace('''prime_state_ln''' , '''encoder.final_layer_norm''' ) if ".ln" in key: return key.replace('''.ln''' , '''.layer_norm''' ) if "_ln" in key: return key.replace('''_ln''' , '''_layer_norm''' ) if "prime_state_proj" in key: return key.replace('''prime_state_proj''' , '''encoder.proj_in''' ) if "prime_x_out" in key: return key.replace('''prime_x_out''' , '''encoder.lm_head''' ) if "prior.x_out" in key: return key.replace('''x_out''' , '''fc_proj_out''' ) if "x_emb" in key: return key.replace('''x_emb''' , '''embed_tokens''' ) return key def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Optional[int] = {} import re UpperCamelCase :int = re.compile(R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :str = re.compile( R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :int = re.compile(R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Tuple = re.compile(R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :int = re.compile( R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Optional[int] = re.compile(R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Optional[Any] = re.compile(R'''conditioner_blocks.(\d*).cond.model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :int = re.compile( R'''conditioner_blocks.(\d*).cond.model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Tuple = re.compile(R'''conditioner_blocks.(\d*).cond.model.(\d*).(bias|weight)''' ) for original_key, value in state_dict.items(): # rename vqvae.encoder keys if re_encoder_block_conv_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = re_encoder_block_conv_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = regex_match.groups() UpperCamelCase :List[str] = int(groups[2] ) * 2 + int(groups[3] ) UpperCamelCase :List[Any] = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :int = re_encoder_block_conv_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_encoder_block_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_encoder_block_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[Any] = regex_match.groups() UpperCamelCase :Any = int(groups[2] ) * 2 + int(groups[3] ) UpperCamelCase :Any = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :str = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}.''' UpperCamelCase :List[str] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Union[str, Any] = prefix + resnet_block UpperCamelCase :str = re_encoder_block_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_encoder_block_proj_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[int] = re_encoder_block_proj_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = regex_match.groups() UpperCamelCase :int = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.proj_out.{groups[-1]}''' UpperCamelCase :str = re_encoder_block_proj_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # rename vqvae.decoder keys elif re_decoder_block_conv_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_decoder_block_conv_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = regex_match.groups() UpperCamelCase :str = int(groups[2] ) * 2 + int(groups[3] ) - 2 UpperCamelCase :List[Any] = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :Union[str, Any] = re_decoder_block_conv_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_decoder_block_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_decoder_block_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = regex_match.groups() UpperCamelCase :List[str] = int(groups[2] ) * 2 + int(groups[3] ) - 2 UpperCamelCase :Optional[int] = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :Any = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}.''' UpperCamelCase :Optional[int] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Any = prefix + resnet_block UpperCamelCase :Optional[int] = re_decoder_block_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_decoder_block_proj_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[int] = re_decoder_block_proj_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[Any] = regex_match.groups() UpperCamelCase :List[Any] = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.proj_in.{groups[-1]}''' UpperCamelCase :Any = re_decoder_block_proj_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # rename prior cond.model to upsampler.upsample_block and resnet elif re_prior_cond_conv_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_prior_cond_conv_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = regex_match.groups() UpperCamelCase :str = int(groups[1] ) * 2 + int(groups[2] ) - 2 UpperCamelCase :Tuple = F'''conditioner_blocks.upsampler.upsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :int = re_prior_cond_conv_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_prior_cond_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = re_prior_cond_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = regex_match.groups() UpperCamelCase :Optional[Any] = int(groups[1] ) * 2 + int(groups[2] ) - 2 UpperCamelCase :int = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :Tuple = F'''conditioner_blocks.upsampler.upsample_block.{block_index}.''' UpperCamelCase :List[Any] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Any = prefix + resnet_block UpperCamelCase :Dict = re_prior_cond_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_prior_cond_proj_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = re_prior_cond_proj_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = regex_match.groups() UpperCamelCase :Dict = F'''conditioner_blocks.upsampler.proj_in.{groups[-1]}''' UpperCamelCase :Any = re_prior_cond_proj_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # keep original key else: UpperCamelCase :List[str] = original_key UpperCamelCase :Any = replace_key(SCREAMING_SNAKE_CASE__ ) if F'''{key_prefix}.{key}''' not in model_state_dict or key is None: print(F'''failed converting {original_key} to {key}, does not match''' ) # handle missmatched shape elif value.shape != model_state_dict[F'''{key_prefix}.{key}'''].shape: UpperCamelCase :Union[str, Any] = model_state_dict[F'''{key_prefix}.{key}'''] print(F'''{original_key}-> {key} : \nshape {val.shape} and { value.shape}, do not match''' ) UpperCamelCase :List[Any] = original_key UpperCamelCase :Any = original_key UpperCamelCase :Optional[int] = value return new_dict @torch.no_grad() def _A ( SCREAMING_SNAKE_CASE__ : List[str]=None , SCREAMING_SNAKE_CASE__ : Dict=None ): for file in MODEL_MAPPING[model_name]: if not os.path.isfile(F'''{pytorch_dump_folder_path}/{file.split("/" )[-1]}''' ): UpperCamelCase :Dict = requests.get(F'''{PREFIX}{file}''' , allow_redirects=SCREAMING_SNAKE_CASE__ ) os.makedirs(F'''{pytorch_dump_folder_path}/''' , exist_ok=SCREAMING_SNAKE_CASE__ ) open(F'''{pytorch_dump_folder_path}/{file.split("/" )[-1]}''' , '''wb''' ).write(r.content ) UpperCamelCase :Optional[int] = MODEL_MAPPING[model_name.split('''/''' )[-1]] UpperCamelCase :Any = JukeboxConfig.from_pretrained(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = JukeboxModel(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = [] UpperCamelCase :List[Any] = {} for i, dict_name in enumerate(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = torch.load(F'''{pytorch_dump_folder_path}/{dict_name.split("/" )[-1]}''' )['''model'''] UpperCamelCase :Tuple = {} for k in old_dic.keys(): if k.endswith('''.b''' ): UpperCamelCase :Optional[int] = old_dic[k] elif k.endswith('''.w''' ): UpperCamelCase :Optional[Any] = old_dic[k] elif "level_2" not in dict_name and "cond.model." in k: UpperCamelCase :Optional[Any] = old_dic[k] else: UpperCamelCase :Any = old_dic[k] UpperCamelCase :Any = '''vqvae''' if i == 0 else F'''priors.{3 - i}''' UpperCamelCase :Dict = fix_jukebox_keys(SCREAMING_SNAKE_CASE__ , model.state_dict() , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) weight_dict.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = weight_dict.pop(0 ) model.vqvae.load_state_dict(SCREAMING_SNAKE_CASE__ ) for i in range(len(SCREAMING_SNAKE_CASE__ ) ): model.priors[i].load_state_dict(weight_dict[2 - i] ) Path(SCREAMING_SNAKE_CASE__ ).mkdir(exist_ok=SCREAMING_SNAKE_CASE__ ) with open(F'''{pytorch_dump_folder_path}/mapping.json''' , '''w''' ) as txtfile: json.dump(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) print(F'''Saving model {model_name} to {pytorch_dump_folder_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) return weight_dict if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--model_name""", default="""jukebox-5b-lyrics""", type=str, help="""Name of the model you'd like to convert.""", ) parser.add_argument( """--pytorch_dump_folder_path""", default="""jukebox-5b-lyrics-converted""", type=str, help="""Path to the output PyTorch model directory.""", ) __snake_case = parser.parse_args() convert_openai_checkpoint(args.model_name, args.pytorch_dump_folder_path)
259
1
from .imports import is_rich_available if is_rich_available(): from rich.traceback import install install(show_locals=False) else: raise ModuleNotFoundError("""To use the rich extension, install rich with `pip install rich`""")
259
import json import os import shutil import tempfile import unittest import numpy as np import pytest from transformers import MgpstrTokenizer from transformers.models.mgp_str.tokenization_mgp_str import VOCAB_FILES_NAMES from transformers.testing_utils import require_torch, require_vision from transformers.utils import IMAGE_PROCESSOR_NAME, is_torch_available, is_vision_available if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import MgpstrProcessor, ViTImageProcessor @require_torch @require_vision class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Union[str, Any] =ViTImageProcessor if is_vision_available() else None @property def UpperCAmelCase ( self ) -> Dict: return self.image_processor_tester.prepare_image_processor_dict() def UpperCAmelCase ( self ) -> int: UpperCamelCase :Union[str, Any] = (3, 32, 128) UpperCamelCase :Any = tempfile.mkdtemp() # fmt: off UpperCamelCase :int = ['''[GO]''', '''[s]''', '''0''', '''1''', '''2''', '''3''', '''4''', '''5''', '''6''', '''7''', '''8''', '''9''', '''a''', '''b''', '''c''', '''d''', '''e''', '''f''', '''g''', '''h''', '''i''', '''j''', '''k''', '''l''', '''m''', '''n''', '''o''', '''p''', '''q''', '''r''', '''s''', '''t''', '''u''', '''v''', '''w''', '''x''', '''y''', '''z'''] # fmt: on UpperCamelCase :Optional[int] = dict(zip(SCREAMING_SNAKE_CASE_ , range(len(SCREAMING_SNAKE_CASE_ ) ) ) ) UpperCamelCase :Optional[int] = os.path.join(self.tmpdirname , VOCAB_FILES_NAMES['''vocab_file'''] ) with open(self.vocab_file , '''w''' , encoding='''utf-8''' ) as fp: fp.write(json.dumps(SCREAMING_SNAKE_CASE_ ) + '''\n''' ) UpperCamelCase :Tuple = { '''do_normalize''': False, '''do_resize''': True, '''image_processor_type''': '''ViTImageProcessor''', '''resample''': 3, '''size''': {'''height''': 32, '''width''': 128}, } UpperCamelCase :str = os.path.join(self.tmpdirname , SCREAMING_SNAKE_CASE_ ) with open(self.image_processor_file , '''w''' , encoding='''utf-8''' ) as fp: json.dump(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> int: return MgpstrTokenizer.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: return ViTImageProcessor.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: shutil.rmtree(self.tmpdirname ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = np.random.randint(255 , size=(3, 30, 400) , dtype=np.uinta ) UpperCamelCase :List[Any] = Image.fromarray(np.moveaxis(SCREAMING_SNAKE_CASE_ , 0 , -1 ) ) return image_input def UpperCAmelCase ( self ) -> str: UpperCamelCase :str = self.get_tokenizer() UpperCamelCase :Union[str, Any] = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Dict = MgpstrProcessor.from_pretrained(self.tmpdirname , use_fast=SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[int] = self.get_tokenizer() UpperCamelCase :Dict = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Optional[int] = self.get_tokenizer(bos_token='''(BOS)''' , eos_token='''(EOS)''' ) UpperCamelCase :Optional[Any] = self.get_image_processor(do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) UpperCamelCase :int = MgpstrProcessor.from_pretrained( self.tmpdirname , bos_token='''(BOS)''' , eos_token='''(EOS)''' , do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer_add_kwargs.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor_add_kwargs.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.get_image_processor() UpperCamelCase :List[str] = self.get_tokenizer() UpperCamelCase :str = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = self.prepare_image_inputs() UpperCamelCase :List[str] = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) UpperCamelCase :Optional[Any] = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) for key in input_image_proc.keys(): self.assertAlmostEqual(input_image_proc[key].sum() , input_processor[key].sum() , delta=1e-2 ) def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Union[str, Any] = self.get_tokenizer() UpperCamelCase :int = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = '''test''' UpperCamelCase :Optional[int] = processor(text=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = tokenizer(SCREAMING_SNAKE_CASE_ ) for key in encoded_tok.keys(): self.assertListEqual(encoded_tok[key] , encoded_processor[key] ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :List[str] = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = '''test''' UpperCamelCase :str = self.prepare_image_inputs() UpperCamelCase :Dict = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , ['''pixel_values''', '''labels'''] ) # test if it raises when no input is passed with pytest.raises(SCREAMING_SNAKE_CASE_ ): processor() def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Any = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = [[1, 4, 5, 8, 1, 0, 8], [3, 4, 3, 1, 1, 8, 9], [3, 4, 3, 1, 1, 8, 9]] UpperCamelCase :Union[str, Any] = processor.char_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = tokenizer.batch_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = [seq.replace(''' ''' , '''''' ) for seq in decoded_tok] self.assertListEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :List[Any] = self.get_image_processor() UpperCamelCase :Optional[Any] = self.get_tokenizer() UpperCamelCase :Any = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = None UpperCamelCase :List[Any] = self.prepare_image_inputs() UpperCamelCase :Union[str, Any] = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , processor.model_input_names ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Optional[int] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.randn(1 , 27 , 38 ) UpperCamelCase :Union[str, Any] = torch.randn(1 , 27 , 5_0257 ) UpperCamelCase :Optional[Any] = torch.randn(1 , 27 , 3_0522 ) UpperCamelCase :Optional[Any] = processor.batch_decode([char_input, bpe_input, wp_input] ) self.assertListEqual(list(results.keys() ) , ['''generated_text''', '''scores''', '''char_preds''', '''bpe_preds''', '''wp_preds'''] )
259
1
def _A ( SCREAMING_SNAKE_CASE__ : list ): UpperCamelCase :Optional[int] = len(SCREAMING_SNAKE_CASE__ ) for _ in range(SCREAMING_SNAKE_CASE__ ): for i in range(_ % 2 , arr_size - 1 , 2 ): if arr[i + 1] < arr[i]: UpperCamelCase , UpperCamelCase :Optional[Any] = arr[i + 1], arr[i] return arr if __name__ == "__main__": __snake_case = list(range(10, 0, -1)) print(f'''Original: {arr}. Sorted: {odd_even_transposition(arr)}''')
259
import math def _A ( SCREAMING_SNAKE_CASE__ : int = 100 ): UpperCamelCase :Dict = sum(i * i for i in range(1 , n + 1 ) ) UpperCamelCase :List[str] = int(math.pow(sum(range(1 , n + 1 ) ) , 2 ) ) return square_of_sum - sum_of_squares if __name__ == "__main__": print(f'''{solution() = }''')
259
1
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available __snake_case = { """configuration_altclip""": [ """ALTCLIP_PRETRAINED_CONFIG_ARCHIVE_MAP""", """AltCLIPConfig""", """AltCLIPTextConfig""", """AltCLIPVisionConfig""", ], """processing_altclip""": ["""AltCLIPProcessor"""], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: __snake_case = [ """ALTCLIP_PRETRAINED_MODEL_ARCHIVE_LIST""", """AltCLIPPreTrainedModel""", """AltCLIPModel""", """AltCLIPTextModel""", """AltCLIPVisionModel""", ] if TYPE_CHECKING: from .configuration_altclip import ( ALTCLIP_PRETRAINED_CONFIG_ARCHIVE_MAP, AltCLIPConfig, AltCLIPTextConfig, AltCLIPVisionConfig, ) from .processing_altclip import AltCLIPProcessor try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_altclip import ( ALTCLIP_PRETRAINED_MODEL_ARCHIVE_LIST, AltCLIPModel, AltCLIPPreTrainedModel, AltCLIPTextModel, AltCLIPVisionModel, ) else: import sys __snake_case = _LazyModule(__name__, globals()["""__file__"""], _import_structure, module_spec=__spec__)
259
def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Any = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = [[False for _ in range(m + 1 )] for _ in range(n + 1 )] UpperCamelCase :List[str] = True for i in range(SCREAMING_SNAKE_CASE__ ): for j in range(m + 1 ): if dp[i][j]: if j < m and a[i].upper() == b[j]: UpperCamelCase :List[Any] = True if a[i].islower(): UpperCamelCase :List[Any] = True return dp[n][m] if __name__ == "__main__": import doctest doctest.testmod()
259
1
from __future__ import annotations from typing import TypedDict class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : str UpperCamelCase_ : int def _A ( SCREAMING_SNAKE_CASE__ : str ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''The parameter s type must be str.''' ) return [s[i:] + s[:i] for i in range(len(SCREAMING_SNAKE_CASE__ ) )] def _A ( SCREAMING_SNAKE_CASE__ : str ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''The parameter s type must be str.''' ) if not s: raise ValueError('''The parameter s must not be empty.''' ) UpperCamelCase :Optional[int] = all_rotations(SCREAMING_SNAKE_CASE__ ) rotations.sort() # sort the list of rotations in alphabetically order # make a string composed of the last char of each rotation UpperCamelCase :BWTTransformDict = { "bwt_string": "".join([word[-1] for word in rotations] ), "idx_original_string": rotations.index(SCREAMING_SNAKE_CASE__ ), } return response def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : int ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''The parameter bwt_string type must be str.''' ) if not bwt_string: raise ValueError('''The parameter bwt_string must not be empty.''' ) try: UpperCamelCase :str = int(SCREAMING_SNAKE_CASE__ ) except ValueError: raise TypeError( '''The parameter idx_original_string type must be int or passive''' ''' of cast to int.''' ) if idx_original_string < 0: raise ValueError('''The parameter idx_original_string must not be lower than 0.''' ) if idx_original_string >= len(SCREAMING_SNAKE_CASE__ ): raise ValueError( '''The parameter idx_original_string must be lower than''' ''' len(bwt_string).''' ) UpperCamelCase :int = [''''''] * len(SCREAMING_SNAKE_CASE__ ) for _ in range(len(SCREAMING_SNAKE_CASE__ ) ): for i in range(len(SCREAMING_SNAKE_CASE__ ) ): UpperCamelCase :int = bwt_string[i] + ordered_rotations[i] ordered_rotations.sort() return ordered_rotations[idx_original_string] if __name__ == "__main__": __snake_case = """Provide a string that I will generate its BWT transform: """ __snake_case = input(entry_msg).strip() __snake_case = bwt_transform(s) print( f'''Burrows Wheeler transform for string \'{s}\' results ''' f'''in \'{result["bwt_string"]}\'''' ) __snake_case = reverse_bwt(result["""bwt_string"""], result["""idx_original_string"""]) print( f'''Reversing Burrows Wheeler transform for entry \'{result["bwt_string"]}\' ''' f'''we get original string \'{original_string}\'''' )
259
from math import factorial __snake_case = {str(digit): factorial(digit) for digit in range(10)} def _A ( SCREAMING_SNAKE_CASE__ : int ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''Parameter number must be int''' ) if number < 0: raise ValueError('''Parameter number must be greater than or equal to 0''' ) # Converts number in string to iterate on its digits and adds its factorial. return sum(DIGIT_FACTORIAL[digit] for digit in str(SCREAMING_SNAKE_CASE__ ) ) def _A ( SCREAMING_SNAKE_CASE__ : int = 60 , SCREAMING_SNAKE_CASE__ : int = 1000000 ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) or not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''Parameters chain_length and number_limit must be int''' ) if chain_length <= 0 or number_limit <= 0: raise ValueError( '''Parameters chain_length and number_limit must be greater than 0''' ) # the counter for the chains with the exact desired length UpperCamelCase :Any = 0 # the cached sizes of the previous chains UpperCamelCase :dict[int, int] = {} for start_chain_element in range(1 , SCREAMING_SNAKE_CASE__ ): # The temporary set will contain the elements of the chain UpperCamelCase :List[Any] = set() UpperCamelCase :Any = 0 # Stop computing the chain when you find a cached size, a repeating item or the # length is greater then the desired one. UpperCamelCase :Optional[Any] = start_chain_element while ( chain_element not in chain_sets_lengths and chain_element not in chain_set and chain_set_length <= chain_length ): chain_set.add(SCREAMING_SNAKE_CASE__ ) chain_set_length += 1 UpperCamelCase :List[Any] = digit_factorial_sum(SCREAMING_SNAKE_CASE__ ) if chain_element in chain_sets_lengths: chain_set_length += chain_sets_lengths[chain_element] UpperCamelCase :Any = chain_set_length # If chain contains the exact amount of elements increase the counter if chain_set_length == chain_length: chains_counter += 1 return chains_counter if __name__ == "__main__": import doctest doctest.testmod() print(f'''{solution()}''')
259
1
import argparse from collections import OrderedDict from pathlib import Path import requests import torch from PIL import Image from transformers import GLPNConfig, GLPNForDepthEstimation, GLPNImageProcessor from transformers.utils import logging logging.set_verbosity_info() __snake_case = logging.get_logger(__name__) def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] ): UpperCamelCase :Union[str, Any] = OrderedDict() for key, value in state_dict.items(): if key.startswith('''module.encoder''' ): UpperCamelCase :Union[str, Any] = key.replace('''module.encoder''' , '''glpn.encoder''' ) if key.startswith('''module.decoder''' ): UpperCamelCase :Union[str, Any] = key.replace('''module.decoder''' , '''decoder.stages''' ) if "patch_embed" in key: # replace for example patch_embed1 by patch_embeddings.0 UpperCamelCase :Dict = key[key.find('''patch_embed''' ) + len('''patch_embed''' )] UpperCamelCase :Any = key.replace(F'''patch_embed{idx}''' , F'''patch_embeddings.{int(SCREAMING_SNAKE_CASE__ )-1}''' ) if "norm" in key: UpperCamelCase :List[str] = key.replace('''norm''' , '''layer_norm''' ) if "glpn.encoder.layer_norm" in key: # replace for example layer_norm1 by layer_norm.0 UpperCamelCase :Tuple = key[key.find('''glpn.encoder.layer_norm''' ) + len('''glpn.encoder.layer_norm''' )] UpperCamelCase :int = key.replace(F'''layer_norm{idx}''' , F'''layer_norm.{int(SCREAMING_SNAKE_CASE__ )-1}''' ) if "layer_norm1" in key: UpperCamelCase :Optional[int] = key.replace('''layer_norm1''' , '''layer_norm_1''' ) if "layer_norm2" in key: UpperCamelCase :Union[str, Any] = key.replace('''layer_norm2''' , '''layer_norm_2''' ) if "block" in key: # replace for example block1 by block.0 UpperCamelCase :List[str] = key[key.find('''block''' ) + len('''block''' )] UpperCamelCase :Optional[Any] = key.replace(F'''block{idx}''' , F'''block.{int(SCREAMING_SNAKE_CASE__ )-1}''' ) if "attn.q" in key: UpperCamelCase :int = key.replace('''attn.q''' , '''attention.self.query''' ) if "attn.proj" in key: UpperCamelCase :Tuple = key.replace('''attn.proj''' , '''attention.output.dense''' ) if "attn" in key: UpperCamelCase :Tuple = key.replace('''attn''' , '''attention.self''' ) if "fc1" in key: UpperCamelCase :Union[str, Any] = key.replace('''fc1''' , '''dense1''' ) if "fc2" in key: UpperCamelCase :List[str] = key.replace('''fc2''' , '''dense2''' ) if "linear_pred" in key: UpperCamelCase :Optional[Any] = key.replace('''linear_pred''' , '''classifier''' ) if "linear_fuse" in key: UpperCamelCase :List[str] = key.replace('''linear_fuse.conv''' , '''linear_fuse''' ) UpperCamelCase :List[Any] = key.replace('''linear_fuse.bn''' , '''batch_norm''' ) if "linear_c" in key: # replace for example linear_c4 by linear_c.3 UpperCamelCase :Union[str, Any] = key[key.find('''linear_c''' ) + len('''linear_c''' )] UpperCamelCase :Optional[Any] = key.replace(F'''linear_c{idx}''' , F'''linear_c.{int(SCREAMING_SNAKE_CASE__ )-1}''' ) if "bot_conv" in key: UpperCamelCase :Optional[int] = key.replace('''bot_conv''' , '''0.convolution''' ) if "skip_conv1" in key: UpperCamelCase :Any = key.replace('''skip_conv1''' , '''1.convolution''' ) if "skip_conv2" in key: UpperCamelCase :str = key.replace('''skip_conv2''' , '''2.convolution''' ) if "fusion1" in key: UpperCamelCase :Tuple = key.replace('''fusion1''' , '''1.fusion''' ) if "fusion2" in key: UpperCamelCase :List[str] = key.replace('''fusion2''' , '''2.fusion''' ) if "fusion3" in key: UpperCamelCase :Optional[Any] = key.replace('''fusion3''' , '''3.fusion''' ) if "fusion" in key and "conv" in key: UpperCamelCase :Optional[int] = key.replace('''conv''' , '''convolutional_layer''' ) if key.startswith('''module.last_layer_depth''' ): UpperCamelCase :Union[str, Any] = key.replace('''module.last_layer_depth''' , '''head.head''' ) UpperCamelCase :List[Any] = value return new_state_dict def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str ): # for each of the encoder blocks: for i in range(config.num_encoder_blocks ): for j in range(config.depths[i] ): # read in weights + bias of keys and values (which is a single matrix in the original implementation) UpperCamelCase :Union[str, Any] = state_dict.pop(F'''glpn.encoder.block.{i}.{j}.attention.self.kv.weight''' ) UpperCamelCase :List[str] = state_dict.pop(F'''glpn.encoder.block.{i}.{j}.attention.self.kv.bias''' ) # next, add keys and values (in that order) to the state dict UpperCamelCase :Optional[Any] = kv_weight[ : config.hidden_sizes[i], : ] UpperCamelCase :Optional[Any] = kv_bias[: config.hidden_sizes[i]] UpperCamelCase :int = kv_weight[ config.hidden_sizes[i] :, : ] UpperCamelCase :Dict = kv_bias[config.hidden_sizes[i] :] def _A ( ): UpperCamelCase :Optional[int] = '''http://images.cocodataset.org/val2017/000000039769.jpg''' UpperCamelCase :Union[str, Any] = Image.open(requests.get(SCREAMING_SNAKE_CASE__ , stream=SCREAMING_SNAKE_CASE__ ).raw ) return image @torch.no_grad() def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict=False , SCREAMING_SNAKE_CASE__ : List[Any]=None ): UpperCamelCase :Tuple = GLPNConfig(hidden_sizes=[64, 128, 320, 512] , decoder_hidden_size=64 , depths=[3, 8, 27, 3] ) # load image processor (only resize + rescale) UpperCamelCase :Optional[int] = GLPNImageProcessor() # prepare image UpperCamelCase :Any = prepare_img() UpperCamelCase :int = image_processor(images=SCREAMING_SNAKE_CASE__ , return_tensors='''pt''' ).pixel_values logger.info('''Converting model...''' ) # load original state dict UpperCamelCase :int = torch.load(SCREAMING_SNAKE_CASE__ , map_location=torch.device('''cpu''' ) ) # rename keys UpperCamelCase :List[str] = rename_keys(SCREAMING_SNAKE_CASE__ ) # key and value matrices need special treatment read_in_k_v(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # create HuggingFace model and load state dict UpperCamelCase :Optional[int] = GLPNForDepthEstimation(SCREAMING_SNAKE_CASE__ ) model.load_state_dict(SCREAMING_SNAKE_CASE__ ) model.eval() # forward pass UpperCamelCase :Any = model(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = outputs.predicted_depth # verify output if model_name is not None: if "nyu" in model_name: UpperCamelCase :Union[str, Any] = torch.tensor( [[4.41_47, 4.08_73, 4.06_73], [3.78_90, 3.28_81, 3.15_25], [3.76_74, 3.54_23, 3.49_13]] ) elif "kitti" in model_name: UpperCamelCase :Optional[Any] = torch.tensor( [[3.42_91, 2.78_65, 2.51_51], [3.28_41, 2.70_21, 2.35_02], [3.11_47, 2.46_25, 2.24_81]] ) else: raise ValueError(F'''Unknown model name: {model_name}''' ) UpperCamelCase :str = torch.Size([1, 480, 640] ) assert predicted_depth.shape == expected_shape assert torch.allclose(predicted_depth[0, :3, :3] , SCREAMING_SNAKE_CASE__ , atol=1e-4 ) print('''Looks ok!''' ) # finally, push to hub if required if push_to_hub: logger.info('''Pushing model and image processor to the hub...''' ) model.push_to_hub( repo_path_or_name=Path(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) , organization='''nielsr''' , commit_message='''Add model''' , use_temp_dir=SCREAMING_SNAKE_CASE__ , ) image_processor.push_to_hub( repo_path_or_name=Path(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) , organization='''nielsr''' , commit_message='''Add image processor''' , use_temp_dir=SCREAMING_SNAKE_CASE__ , ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser() parser.add_argument( """--checkpoint_path""", default=None, type=str, help="""Path to the original PyTorch checkpoint (.pth file).""", ) parser.add_argument( """--pytorch_dump_folder_path""", default=None, type=str, help="""Path to the folder to output PyTorch model.""" ) parser.add_argument( """--push_to_hub""", action="""store_true""", help="""Whether to upload the model to the HuggingFace hub.""" ) parser.add_argument( """--model_name""", default="""glpn-kitti""", type=str, help="""Name of the model in case you're pushing to the hub.""", ) __snake_case = parser.parse_args() convert_glpn_checkpoint(args.checkpoint_path, args.pytorch_dump_folder_path, args.push_to_hub, args.model_name)
259
import unittest import numpy as np import torch from diffusers import DDIMPipeline, DDIMScheduler, UNetaDModel from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu, slow, torch_device from ..pipeline_params import UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS, UNCONDITIONAL_IMAGE_GENERATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin enable_full_determinism() class UpperCAmelCase_ ( lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : int =DDIMPipeline UpperCamelCase_ : str =UNCONDITIONAL_IMAGE_GENERATION_PARAMS UpperCamelCase_ : str =PipelineTesterMixin.required_optional_params - { 'num_images_per_prompt', 'latents', 'callback', 'callback_steps', } UpperCamelCase_ : Optional[Any] =UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS UpperCamelCase_ : List[str] =False def UpperCAmelCase ( self ) -> Any: torch.manual_seed(0 ) UpperCamelCase :Optional[int] = UNetaDModel( block_out_channels=(32, 64) , layers_per_block=2 , sample_size=32 , in_channels=3 , out_channels=3 , down_block_types=('''DownBlock2D''', '''AttnDownBlock2D''') , up_block_types=('''AttnUpBlock2D''', '''UpBlock2D''') , ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Any = {'''unet''': unet, '''scheduler''': scheduler} return components def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=0 ) -> Any: if str(SCREAMING_SNAKE_CASE_ ).startswith('''mps''' ): UpperCamelCase :List[Any] = torch.manual_seed(SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :List[Any] = torch.Generator(device=SCREAMING_SNAKE_CASE_ ).manual_seed(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = { '''batch_size''': 1, '''generator''': generator, '''num_inference_steps''': 2, '''output_type''': '''numpy''', } return inputs def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Optional[int] = '''cpu''' UpperCamelCase :Union[str, Any] = self.get_dummy_components() UpperCamelCase :Optional[Any] = self.pipeline_class(**SCREAMING_SNAKE_CASE_ ) pipe.to(SCREAMING_SNAKE_CASE_ ) pipe.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = self.get_dummy_inputs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = pipe(**SCREAMING_SNAKE_CASE_ ).images UpperCamelCase :str = image[0, -3:, -3:, -1] self.assertEqual(image.shape , (1, 32, 32, 3) ) UpperCamelCase :Tuple = np.array( [1.000e00, 5.717e-01, 4.717e-01, 1.000e00, 0.000e00, 1.000e00, 3.000e-04, 0.000e00, 9.000e-04] ) UpperCamelCase :List[str] = np.abs(image_slice.flatten() - expected_slice ).max() self.assertLessEqual(SCREAMING_SNAKE_CASE_ , 1e-3 ) def UpperCAmelCase ( self ) -> int: super().test_dict_tuple_outputs_equivalent(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Optional[int]: super().test_save_load_local(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Any: super().test_save_load_optional_components(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> str: super().test_inference_batch_single_identical(expected_max_diff=3e-3 ) @slow @require_torch_gpu class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :int = '''google/ddpm-cifar10-32''' UpperCamelCase :Union[str, Any] = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Tuple = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddim.to(SCREAMING_SNAKE_CASE_ ) ddim.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddim(generator=SCREAMING_SNAKE_CASE_ , eta=0.0 , output_type='''numpy''' ).images UpperCamelCase :int = image[0, -3:, -3:, -1] assert image.shape == (1, 32, 32, 3) UpperCamelCase :Tuple = np.array([0.1723, 0.1617, 0.1600, 0.1626, 0.1497, 0.1513, 0.1505, 0.1442, 0.1453] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2 def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = '''google/ddpm-ema-bedroom-256''' UpperCamelCase :Any = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = DDIMScheduler.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddpm.to(SCREAMING_SNAKE_CASE_ ) ddpm.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddpm(generator=SCREAMING_SNAKE_CASE_ , output_type='''numpy''' ).images UpperCamelCase :Optional[int] = image[0, -3:, -3:, -1] assert image.shape == (1, 256, 256, 3) UpperCamelCase :Dict = np.array([0.0060, 0.0201, 0.0344, 0.0024, 0.0018, 0.0002, 0.0022, 0.0000, 0.0069] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2
259
1
import warnings from collections import OrderedDict from typing import Any, Mapping, Optional from ... import PreTrainedTokenizer from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeqaSeqConfigWithPast from ...onnx.utils import compute_effective_axis_dimension from ...utils import TensorType, is_torch_available, logging __snake_case = logging.get_logger(__name__) __snake_case = { """facebook/bart-large""": """https://huggingface.co/facebook/bart-large/resolve/main/config.json""", # See all BART models at https://huggingface.co/models?filter=bart } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Union[str, Any] ='bart' UpperCamelCase_ : Tuple =['past_key_values'] UpperCamelCase_ : List[Any] ={'num_attention_heads': 'encoder_attention_heads', 'hidden_size': 'd_model'} def __init__( self , SCREAMING_SNAKE_CASE_=5_0265 , SCREAMING_SNAKE_CASE_=1024 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=4096 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=4096 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=1024 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=1 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=2 , **SCREAMING_SNAKE_CASE_ , ) -> Dict: UpperCamelCase :Optional[Any] = vocab_size UpperCamelCase :Any = max_position_embeddings UpperCamelCase :List[str] = d_model UpperCamelCase :Union[str, Any] = encoder_ffn_dim UpperCamelCase :Optional[int] = encoder_layers UpperCamelCase :Tuple = encoder_attention_heads UpperCamelCase :Optional[int] = decoder_ffn_dim UpperCamelCase :Union[str, Any] = decoder_layers UpperCamelCase :str = decoder_attention_heads UpperCamelCase :Optional[int] = dropout UpperCamelCase :Dict = attention_dropout UpperCamelCase :Optional[int] = activation_dropout UpperCamelCase :Tuple = activation_function UpperCamelCase :Tuple = init_std UpperCamelCase :List[str] = encoder_layerdrop UpperCamelCase :Dict = decoder_layerdrop UpperCamelCase :int = classifier_dropout UpperCamelCase :str = use_cache UpperCamelCase :Any = encoder_layers UpperCamelCase :List[Any] = scale_embedding # scale factor will be sqrt(d_model) if True super().__init__( num_labels=SCREAMING_SNAKE_CASE_ , pad_token_id=SCREAMING_SNAKE_CASE_ , bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , is_encoder_decoder=SCREAMING_SNAKE_CASE_ , decoder_start_token_id=SCREAMING_SNAKE_CASE_ , forced_eos_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) # ensure backward compatibility for BART CNN models if self.forced_bos_token_id is None and kwargs.get('''force_bos_token_to_be_generated''' , SCREAMING_SNAKE_CASE_ ): UpperCamelCase :Dict = self.bos_token_id warnings.warn( F'''Please make sure the config includes `forced_bos_token_id={self.bos_token_id}` in future versions. ''' '''The config can simply be saved and uploaded again to be fixed.''' ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" @property def UpperCAmelCase ( self ) -> Mapping[str, Mapping[int, str]]: if self.task in ["default", "seq2seq-lm"]: UpperCamelCase :Tuple = OrderedDict( [ ('''input_ids''', {0: '''batch''', 1: '''encoder_sequence'''}), ('''attention_mask''', {0: '''batch''', 1: '''encoder_sequence'''}), ] ) if self.use_past: UpperCamelCase :Optional[int] = {0: '''batch'''} UpperCamelCase :int = {0: '''batch''', 1: '''past_decoder_sequence + sequence'''} else: UpperCamelCase :str = {0: '''batch''', 1: '''decoder_sequence'''} UpperCamelCase :str = {0: '''batch''', 1: '''decoder_sequence'''} if self.use_past: self.fill_with_past_key_values_(SCREAMING_SNAKE_CASE_ , direction='''inputs''' ) elif self.task == "causal-lm": # TODO: figure this case out. UpperCamelCase :Optional[int] = OrderedDict( [ ('''input_ids''', {0: '''batch''', 1: '''encoder_sequence'''}), ('''attention_mask''', {0: '''batch''', 1: '''encoder_sequence'''}), ] ) if self.use_past: UpperCamelCase , UpperCamelCase :Union[str, Any] = self.num_layers for i in range(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :List[Any] = {0: '''batch''', 2: '''past_sequence + sequence'''} UpperCamelCase :Any = {0: '''batch''', 2: '''past_sequence + sequence'''} else: UpperCamelCase :int = OrderedDict( [ ('''input_ids''', {0: '''batch''', 1: '''encoder_sequence'''}), ('''attention_mask''', {0: '''batch''', 1: '''encoder_sequence'''}), ('''decoder_input_ids''', {0: '''batch''', 1: '''decoder_sequence'''}), ('''decoder_attention_mask''', {0: '''batch''', 1: '''decoder_sequence'''}), ] ) return common_inputs @property def UpperCAmelCase ( self ) -> Mapping[str, Mapping[int, str]]: if self.task in ["default", "seq2seq-lm"]: UpperCamelCase :List[str] = super().outputs else: UpperCamelCase :Dict = super(SCREAMING_SNAKE_CASE_ , self ).outputs if self.use_past: UpperCamelCase , UpperCamelCase :List[Any] = self.num_layers for i in range(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :str = {0: '''batch''', 2: '''past_sequence + sequence'''} UpperCamelCase :Tuple = {0: '''batch''', 2: '''past_sequence + sequence'''} return common_outputs def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = -1 , SCREAMING_SNAKE_CASE_ = -1 , SCREAMING_SNAKE_CASE_ = False , SCREAMING_SNAKE_CASE_ = None , ) -> Mapping[str, Any]: UpperCamelCase :Union[str, Any] = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Generate decoder inputs UpperCamelCase :Optional[int] = seq_length if not self.use_past else 1 UpperCamelCase :Dict = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = {F'''decoder_{name}''': tensor for name, tensor in decoder_inputs.items()} UpperCamelCase :int = dict(**SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) if self.use_past: if not is_torch_available(): raise ValueError('''Cannot generate dummy past_keys inputs without PyTorch installed.''' ) else: import torch UpperCamelCase , UpperCamelCase :Optional[int] = common_inputs['''input_ids'''].shape UpperCamelCase :Union[str, Any] = common_inputs['''decoder_input_ids'''].shape[1] UpperCamelCase , UpperCamelCase :Optional[Any] = self.num_attention_heads UpperCamelCase :Optional[int] = ( batch, num_encoder_attention_heads, encoder_seq_length, self._config.hidden_size // num_encoder_attention_heads, ) UpperCamelCase :str = decoder_seq_length + 3 UpperCamelCase :Tuple = ( batch, num_decoder_attention_heads, decoder_past_length, self._config.hidden_size // num_decoder_attention_heads, ) UpperCamelCase :Optional[int] = torch.cat( [common_inputs['''decoder_attention_mask'''], torch.ones(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ )] , dim=1 ) UpperCamelCase :List[str] = [] # If the number of encoder and decoder layers are present in the model configuration, both are considered UpperCamelCase , UpperCamelCase :Any = self.num_layers UpperCamelCase :Optional[Any] = min(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = max(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) - min_num_layers UpperCamelCase :Union[str, Any] = '''encoder''' if num_encoder_layers > num_decoder_layers else '''decoder''' for _ in range(SCREAMING_SNAKE_CASE_ ): common_inputs["past_key_values"].append( ( torch.zeros(SCREAMING_SNAKE_CASE_ ), torch.zeros(SCREAMING_SNAKE_CASE_ ), torch.zeros(SCREAMING_SNAKE_CASE_ ), torch.zeros(SCREAMING_SNAKE_CASE_ ), ) ) # TODO: test this. UpperCamelCase :Dict = encoder_shape if remaining_side_name == '''encoder''' else decoder_shape for _ in range(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ): common_inputs["past_key_values"].append((torch.zeros(SCREAMING_SNAKE_CASE_ ), torch.zeros(SCREAMING_SNAKE_CASE_ )) ) return common_inputs def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = -1 , SCREAMING_SNAKE_CASE_ = -1 , SCREAMING_SNAKE_CASE_ = False , SCREAMING_SNAKE_CASE_ = None , ) -> Mapping[str, Any]: UpperCamelCase :Any = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) if self.use_past: if not is_torch_available(): raise ValueError('''Cannot generate dummy past_keys inputs without PyTorch installed.''' ) else: import torch UpperCamelCase , UpperCamelCase :Optional[Any] = common_inputs['''input_ids'''].shape # Not using the same length for past_key_values UpperCamelCase :List[str] = seqlen + 2 UpperCamelCase , UpperCamelCase :Tuple = self.num_layers UpperCamelCase , UpperCamelCase :Tuple = self.num_attention_heads UpperCamelCase :int = ( batch, num_encoder_attention_heads, past_key_values_length, self._config.hidden_size // num_encoder_attention_heads, ) UpperCamelCase :List[str] = common_inputs['''attention_mask'''].dtype UpperCamelCase :Union[str, Any] = torch.cat( [common_inputs['''attention_mask'''], torch.ones(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , dtype=SCREAMING_SNAKE_CASE_ )] , dim=1 ) UpperCamelCase :Any = [ (torch.zeros(SCREAMING_SNAKE_CASE_ ), torch.zeros(SCREAMING_SNAKE_CASE_ )) for _ in range(SCREAMING_SNAKE_CASE_ ) ] return common_inputs def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = -1 , SCREAMING_SNAKE_CASE_ = -1 , SCREAMING_SNAKE_CASE_ = False , SCREAMING_SNAKE_CASE_ = None , ) -> Mapping[str, Any]: # Copied from OnnxConfig.generate_dummy_inputs # Did not use super(OnnxConfigWithPast, self).generate_dummy_inputs for code clarity. # If dynamic axis (-1) we forward with a fixed dimension of 2 samples to avoid optimizations made by ONNX UpperCamelCase :Tuple = compute_effective_axis_dimension( SCREAMING_SNAKE_CASE_ , fixed_dimension=OnnxConfig.default_fixed_batch , num_token_to_add=0 ) # If dynamic axis (-1) we forward with a fixed dimension of 8 tokens to avoid optimizations made by ONNX UpperCamelCase :Any = tokenizer.num_special_tokens_to_add(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = compute_effective_axis_dimension( SCREAMING_SNAKE_CASE_ , fixed_dimension=OnnxConfig.default_fixed_sequence , num_token_to_add=SCREAMING_SNAKE_CASE_ ) # Generate dummy inputs according to compute batch and sequence UpperCamelCase :Dict = [''' '''.join([tokenizer.unk_token] ) * seq_length] * batch_size UpperCamelCase :Union[str, Any] = dict(tokenizer(SCREAMING_SNAKE_CASE_ , return_tensors=SCREAMING_SNAKE_CASE_ ) ) return common_inputs def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = -1 , SCREAMING_SNAKE_CASE_ = -1 , SCREAMING_SNAKE_CASE_ = False , SCREAMING_SNAKE_CASE_ = None , ) -> Mapping[str, Any]: if self.task in ["default", "seq2seq-lm"]: UpperCamelCase :List[str] = self._generate_dummy_inputs_for_default_and_seqaseq_lm( SCREAMING_SNAKE_CASE_ , batch_size=SCREAMING_SNAKE_CASE_ , seq_length=SCREAMING_SNAKE_CASE_ , is_pair=SCREAMING_SNAKE_CASE_ , framework=SCREAMING_SNAKE_CASE_ ) elif self.task == "causal-lm": UpperCamelCase :Dict = self._generate_dummy_inputs_for_causal_lm( SCREAMING_SNAKE_CASE_ , batch_size=SCREAMING_SNAKE_CASE_ , seq_length=SCREAMING_SNAKE_CASE_ , is_pair=SCREAMING_SNAKE_CASE_ , framework=SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :Dict = self._generate_dummy_inputs_for_sequence_classification_and_question_answering( SCREAMING_SNAKE_CASE_ , batch_size=SCREAMING_SNAKE_CASE_ , seq_length=SCREAMING_SNAKE_CASE_ , is_pair=SCREAMING_SNAKE_CASE_ , framework=SCREAMING_SNAKE_CASE_ ) return common_inputs def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: if self.task in ["default", "seq2seq-lm"]: UpperCamelCase :Union[str, Any] = super()._flatten_past_key_values_(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :List[str] = super(SCREAMING_SNAKE_CASE_ , self )._flatten_past_key_values_( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ )
259
from json import JSONDecodeError # Workaround for requests.exceptions.JSONDecodeError import requests def _A ( SCREAMING_SNAKE_CASE__ : str = "isbn/0140328726" ): UpperCamelCase :Optional[int] = olid.strip().strip('''/''' ) # Remove leading/trailing whitespace & slashes if new_olid.count('''/''' ) != 1: UpperCamelCase :str = F'''{olid} is not a valid Open Library olid''' raise ValueError(SCREAMING_SNAKE_CASE__ ) return requests.get(F'''https://openlibrary.org/{new_olid}.json''' ).json() def _A ( SCREAMING_SNAKE_CASE__ : dict ): UpperCamelCase :str = { '''title''': '''Title''', '''publish_date''': '''Publish date''', '''authors''': '''Authors''', '''number_of_pages''': '''Number of pages:''', '''first_sentence''': '''First sentence''', '''isbn_10''': '''ISBN (10)''', '''isbn_13''': '''ISBN (13)''', } UpperCamelCase :Optional[Any] = {better_key: ol_book_data[key] for key, better_key in desired_keys.items()} UpperCamelCase :List[str] = [ get_openlibrary_data(author['''key'''] )['''name'''] for author in data['''Authors'''] ] UpperCamelCase :int = data['''First sentence''']['''value'''] for key, value in data.items(): if isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = ''', '''.join(SCREAMING_SNAKE_CASE__ ) return data if __name__ == "__main__": import doctest doctest.testmod() while True: __snake_case = input("""\nEnter the ISBN code to search (or 'quit' to stop): """).strip() if isbn.lower() in ("", "q", "quit", "exit", "stop"): break if len(isbn) not in (10, 13) or not isbn.isdigit(): print(f'''Sorry, {isbn} is not a valid ISBN. Please, input a valid ISBN.''') continue print(f'''\nSearching Open Library for ISBN: {isbn}...\n''') try: __snake_case = summarize_book(get_openlibrary_data(f'''isbn/{isbn}''')) print("""\n""".join(f'''{key}: {value}''' for key, value in book_summary.items())) except JSONDecodeError: # Workaround for requests.exceptions.RequestException: print(f'''Sorry, there are no results for ISBN: {isbn}.''')
259
1
import argparse import torch from transformers import YosoConfig, YosoForMaskedLM def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): if "model" in orig_key: UpperCamelCase :Union[str, Any] = orig_key.replace('''model.''' , '''''' ) if "norm1" in orig_key: UpperCamelCase :List[str] = orig_key.replace('''norm1''' , '''attention.output.LayerNorm''' ) if "norm2" in orig_key: UpperCamelCase :int = orig_key.replace('''norm2''' , '''output.LayerNorm''' ) if "norm" in orig_key: UpperCamelCase :List[str] = orig_key.replace('''norm''' , '''LayerNorm''' ) if "transformer" in orig_key: UpperCamelCase :Optional[Any] = orig_key.split('''.''' )[0].split('''_''' )[-1] UpperCamelCase :Optional[int] = orig_key.replace(F'''transformer_{layer_num}''' , F'''encoder.layer.{layer_num}''' ) if "mha.attn" in orig_key: UpperCamelCase :List[Any] = orig_key.replace('''mha.attn''' , '''attention.self''' ) if "mha" in orig_key: UpperCamelCase :Optional[int] = orig_key.replace('''mha''' , '''attention''' ) if "W_q" in orig_key: UpperCamelCase :List[Any] = orig_key.replace('''W_q''' , '''self.query''' ) if "W_k" in orig_key: UpperCamelCase :Tuple = orig_key.replace('''W_k''' , '''self.key''' ) if "W_v" in orig_key: UpperCamelCase :Optional[int] = orig_key.replace('''W_v''' , '''self.value''' ) if "ff1" in orig_key: UpperCamelCase :Tuple = orig_key.replace('''ff1''' , '''intermediate.dense''' ) if "ff2" in orig_key: UpperCamelCase :List[str] = orig_key.replace('''ff2''' , '''output.dense''' ) if "ff" in orig_key: UpperCamelCase :Union[str, Any] = orig_key.replace('''ff''' , '''output.dense''' ) if "mlm_class" in orig_key: UpperCamelCase :Tuple = orig_key.replace('''mlm.mlm_class''' , '''cls.predictions.decoder''' ) if "mlm" in orig_key: UpperCamelCase :Dict = orig_key.replace('''mlm''' , '''cls.predictions.transform''' ) if "cls" not in orig_key: UpperCamelCase :str = '''yoso.''' + orig_key return orig_key def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[Any] ): for key in orig_state_dict.copy().keys(): UpperCamelCase :str = orig_state_dict.pop(SCREAMING_SNAKE_CASE__ ) if ("pooler" in key) or ("sen_class" in key): continue else: UpperCamelCase :Union[str, Any] = val UpperCamelCase :List[str] = orig_state_dict['''cls.predictions.decoder.bias'''] UpperCamelCase :Optional[Any] = torch.arange(SCREAMING_SNAKE_CASE__ ).expand((1, -1) ) + 2 return orig_state_dict def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :List[Any] = torch.load(SCREAMING_SNAKE_CASE__ , map_location='''cpu''' )['''model_state_dict'''] UpperCamelCase :Union[str, Any] = YosoConfig.from_json_file(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = YosoForMaskedLM(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = convert_checkpoint_helper(config.max_position_embeddings , SCREAMING_SNAKE_CASE__ ) print(model.load_state_dict(SCREAMING_SNAKE_CASE__ ) ) model.eval() model.save_pretrained(SCREAMING_SNAKE_CASE__ ) print(F'''Checkpoint successfuly converted. Model saved at {pytorch_dump_path}''' ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--pytorch_model_path""", default=None, type=str, required=True, help="""Path to YOSO pytorch checkpoint.""" ) parser.add_argument( """--config_file""", default=None, type=str, required=True, help="""The json file for YOSO model config.""", ) parser.add_argument( """--pytorch_dump_path""", default=None, type=str, required=True, help="""Path to the output PyTorch model.""" ) __snake_case = parser.parse_args() convert_yoso_checkpoint(args.pytorch_model_path, args.config_file, args.pytorch_dump_path)
259
import inspect import tempfile import unittest from huggingface_hub import hf_hub_download from transformers import is_torch_available from transformers.testing_utils import is_flaky, require_torch, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin __snake_case = 1E-4 if is_torch_available(): import torch from transformers import AutoformerConfig, AutoformerForPrediction, AutoformerModel from transformers.models.autoformer.modeling_autoformer import AutoformerDecoder, AutoformerEncoder @require_torch class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=14 , SCREAMING_SNAKE_CASE_=10 , SCREAMING_SNAKE_CASE_=19 , SCREAMING_SNAKE_CASE_=5 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=[1, 2, 3, 4, 5] , SCREAMING_SNAKE_CASE_=25 , SCREAMING_SNAKE_CASE_=5 , ) -> str: UpperCamelCase :Any = d_model UpperCamelCase :List[str] = parent UpperCamelCase :List[Any] = batch_size UpperCamelCase :str = prediction_length UpperCamelCase :str = context_length UpperCamelCase :int = cardinality UpperCamelCase :Optional[Any] = num_time_features UpperCamelCase :Optional[Any] = lags_sequence UpperCamelCase :str = embedding_dimension UpperCamelCase :str = is_training UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :Tuple = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :List[Any] = attention_probs_dropout_prob UpperCamelCase :Optional[int] = context_length UpperCamelCase :Tuple = prediction_length + label_length UpperCamelCase :Optional[Any] = label_length UpperCamelCase :Optional[int] = moving_average UpperCamelCase :Union[str, Any] = autocorrelation_factor def UpperCAmelCase ( self ) -> Optional[int]: return AutoformerConfig( d_model=self.d_model , encoder_layers=self.num_hidden_layers , decoder_layers=self.num_hidden_layers , encoder_attention_heads=self.num_attention_heads , decoder_attention_heads=self.num_attention_heads , encoder_ffn_dim=self.intermediate_size , decoder_ffn_dim=self.intermediate_size , dropout=self.hidden_dropout_prob , attention_dropout=self.attention_probs_dropout_prob , prediction_length=self.prediction_length , context_length=self.context_length , label_length=self.label_length , lags_sequence=self.lags_sequence , num_time_features=self.num_time_features , num_static_categorical_features=1 , cardinality=[self.cardinality] , embedding_dimension=[self.embedding_dimension] , moving_average=self.moving_average , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :Optional[Any] = config.context_length + max(config.lags_sequence ) UpperCamelCase :Union[str, Any] = ids_tensor([self.batch_size, 1] , config.cardinality[0] ) UpperCamelCase :List[str] = floats_tensor([self.batch_size, _past_length, config.num_time_features] ) UpperCamelCase :Union[str, Any] = floats_tensor([self.batch_size, _past_length] ) UpperCamelCase :Any = floats_tensor([self.batch_size, _past_length] ) > 0.5 # decoder inputs UpperCamelCase :Tuple = floats_tensor([self.batch_size, config.prediction_length, config.num_time_features] ) UpperCamelCase :int = floats_tensor([self.batch_size, config.prediction_length] ) UpperCamelCase :Union[str, Any] = { '''past_values''': past_values, '''static_categorical_features''': static_categorical_features, '''past_time_features''': past_time_features, '''past_observed_mask''': past_observed_mask, '''future_time_features''': future_time_features, '''future_values''': future_values, } return inputs_dict def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.get_config() UpperCamelCase :Union[str, Any] = self.prepare_autoformer_inputs_dict(SCREAMING_SNAKE_CASE_ ) return config, inputs_dict def UpperCAmelCase ( self ) -> Any: UpperCamelCase , UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() return config, inputs_dict def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[Any]: UpperCamelCase :int = AutoformerModel(config=SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ).eval() UpperCamelCase :Any = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = outputs.encoder_last_hidden_state UpperCamelCase :str = outputs.last_hidden_state with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Any = model.get_encoder() encoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = AutoformerEncoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = model.create_network_inputs(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Tuple = model.decomposition_layer(transformer_inputs[:, : config.context_length, ...] ) UpperCamelCase :Tuple = torch.cat( (transformer_inputs[:, : config.context_length, ...], feature[:, : config.context_length, ...]) , dim=-1 , ) UpperCamelCase :Optional[Any] = encoder(inputs_embeds=SCREAMING_SNAKE_CASE_ )[0] self.parent.assertTrue((encoder_last_hidden_state_a - encoder_last_hidden_state).abs().max().item() < 1e-3 ) UpperCamelCase :Optional[Any] = ( torch.mean(transformer_inputs[:, : config.context_length, ...] , dim=1 ) .unsqueeze(1 ) .repeat(1 , config.prediction_length , 1 ) ) UpperCamelCase :Union[str, Any] = torch.zeros( [transformer_inputs.shape[0], config.prediction_length, transformer_inputs.shape[2]] , device=enc_input.device , ) UpperCamelCase :Tuple = torch.cat( ( torch.cat((seasonal_input[:, -config.label_length :, ...], zeros) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) UpperCamelCase :Optional[Any] = torch.cat( ( torch.cat((trend_input[:, -config.label_length :, ...], mean) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Union[str, Any] = model.get_decoder() decoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = AutoformerDecoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = decoder( trend=SCREAMING_SNAKE_CASE_ , inputs_embeds=SCREAMING_SNAKE_CASE_ , encoder_hidden_states=SCREAMING_SNAKE_CASE_ , )[0] self.parent.assertTrue((last_hidden_state_a - last_hidden_state).abs().max().item() < 1e-3 ) @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[str] =(AutoformerModel, AutoformerForPrediction) if is_torch_available() else () UpperCamelCase_ : List[str] =(AutoformerForPrediction,) if is_torch_available() else () UpperCamelCase_ : Optional[Any] ={'feature-extraction': AutoformerModel} if is_torch_available() else {} UpperCamelCase_ : Any =False UpperCamelCase_ : List[str] =False UpperCamelCase_ : Dict =False UpperCamelCase_ : Dict =False UpperCamelCase_ : int =False UpperCamelCase_ : Optional[int] =False def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = AutoformerModelTester(self ) UpperCamelCase :int = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase , UpperCamelCase :str = self.model_tester.prepare_config_and_inputs() for model_class in self.all_model_classes: UpperCamelCase :Optional[int] = model_class(SCREAMING_SNAKE_CASE_ ) with tempfile.TemporaryDirectory() as tmpdirname: model.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :List[str] = model_class.from_pretrained(SCREAMING_SNAKE_CASE_ , output_loading_info=SCREAMING_SNAKE_CASE_ ) self.assertEqual(info['''missing_keys'''] , [] ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_encoder_decoder_model_standalone(*SCREAMING_SNAKE_CASE_ ) @unittest.skip(reason='''Model has no tokens embeddings''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = inspect.signature(getattr(SCREAMING_SNAKE_CASE_ , '''forward''' ) ) # The main input is the name of the argument after `self` UpperCamelCase :List[str] = list(model_signature.parameters.keys() )[1] self.assertEqual(AutoformerModel.main_input_name , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase , UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Optional[Any] = [ '''past_values''', '''past_time_features''', '''past_observed_mask''', '''static_categorical_features''', '''static_real_features''', '''future_values''', '''future_time_features''', ] if model.__class__.__name__ in ["AutoformerForPrediction"]: expected_arg_names.append('''future_observed_mask''' ) expected_arg_names.extend( [ '''decoder_attention_mask''', '''head_mask''', '''decoder_head_mask''', '''cross_attn_head_mask''', '''encoder_outputs''', '''past_key_values''', '''output_hidden_states''', '''output_attentions''', '''use_cache''', '''return_dict''', ] ) self.assertListEqual(arg_names[: len(SCREAMING_SNAKE_CASE_ )] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :List[Any] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = True UpperCamelCase :Dict = getattr(self.model_tester , '''seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = getattr(self.model_tester , '''decoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = getattr(self.model_tester , '''encoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = getattr(self.model_tester , '''d_model''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = getattr(self.model_tester , '''num_attention_heads''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = d_model // num_attention_heads for model_class in self.all_model_classes: UpperCamelCase :Tuple = True UpperCamelCase :Tuple = False UpperCamelCase :Any = True UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :int = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) # check that output_attentions also work using config del inputs_dict["output_attentions"] UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) UpperCamelCase :List[str] = len(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = 7 if "last_hidden_state" in outputs: correct_outlen += 1 if "trend" in outputs: correct_outlen += 1 if "past_key_values" in outputs: correct_outlen += 1 # past_key_values have been returned if "loss" in outputs: correct_outlen += 1 if "params" in outputs: correct_outlen += 1 self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # decoder attentions UpperCamelCase :Union[str, Any] = outputs.decoder_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(decoder_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # cross attentions UpperCamelCase :Union[str, Any] = outputs.cross_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(cross_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # Check attention is always last and order is fine UpperCamelCase :Any = True UpperCamelCase :int = True UpperCamelCase :Any = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) self.assertEqual(out_len + 2 , len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(self_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) @is_flaky() def UpperCAmelCase ( self ) -> List[Any]: super().test_retain_grad_hidden_states_attentions() def _A ( SCREAMING_SNAKE_CASE__ : int="train-batch.pt" ): UpperCamelCase :Union[str, Any] = hf_hub_download(repo_id='''hf-internal-testing/tourism-monthly-batch''' , filename=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) UpperCamelCase :Tuple = torch.load(SCREAMING_SNAKE_CASE__ , map_location=SCREAMING_SNAKE_CASE__ ) return batch @require_torch @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :int = AutoformerModel.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = prepare_batch() with torch.no_grad(): UpperCamelCase :Optional[Any] = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , future_values=batch['''future_values'''] , future_time_features=batch['''future_time_features'''] , )[0] UpperCamelCase :Union[str, Any] = torch.Size( (64, model.config.prediction_length + model.config.label_length, model.config.feature_size) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = torch.tensor( [[0.3593, -1.3398, 0.6330], [0.2279, 1.5396, -0.1792], [0.0450, 1.3225, -0.2335]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Dict = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , ).encoder_last_hidden_state UpperCamelCase :Union[str, Any] = torch.Size((64, model.config.context_length, model.config.d_model) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = torch.tensor( [[-0.0734, -0.9036, 0.8358], [4.7186, 2.4113, 1.9581], [1.7953, 2.3558, 1.2970]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Tuple = model.generate( static_categorical_features=batch['''static_categorical_features'''] , past_time_features=batch['''past_time_features'''] , past_values=batch['''past_values'''] , future_time_features=batch['''future_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , ) UpperCamelCase :Optional[int] = torch.Size((64, model.config.num_parallel_samples, model.config.prediction_length) ) self.assertEqual(outputs.sequences.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor([3130.6763, 4056.5293, 7053.0786] , device=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = outputs.sequences.mean(dim=1 ) self.assertTrue(torch.allclose(mean_prediction[0, -3:] , SCREAMING_SNAKE_CASE_ , rtol=1e-1 ) )
259
1
from typing import Optional, Tuple, Union import torch from einops import rearrange, reduce from diffusers import DDIMScheduler, DDPMScheduler, DiffusionPipeline, ImagePipelineOutput, UNetaDConditionModel from diffusers.schedulers.scheduling_ddim import DDIMSchedulerOutput from diffusers.schedulers.scheduling_ddpm import DDPMSchedulerOutput __snake_case = 8 def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : Dict=BITS ): UpperCamelCase :str = x.device UpperCamelCase :Dict = (x * 255).int().clamp(0 , 255 ) UpperCamelCase :Any = 2 ** torch.arange(bits - 1 , -1 , -1 , device=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = rearrange(SCREAMING_SNAKE_CASE__ , '''d -> d 1 1''' ) UpperCamelCase :str = rearrange(SCREAMING_SNAKE_CASE__ , '''b c h w -> b c 1 h w''' ) UpperCamelCase :Dict = ((x & mask) != 0).float() UpperCamelCase :Any = rearrange(SCREAMING_SNAKE_CASE__ , '''b c d h w -> b (c d) h w''' ) UpperCamelCase :Optional[int] = bits * 2 - 1 return bits def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Dict=BITS ): UpperCamelCase :Tuple = x.device UpperCamelCase :Optional[Any] = (x > 0).int() UpperCamelCase :str = 2 ** torch.arange(bits - 1 , -1 , -1 , device=SCREAMING_SNAKE_CASE__ , dtype=torch.intaa ) UpperCamelCase :Dict = rearrange(SCREAMING_SNAKE_CASE__ , '''d -> d 1 1''' ) UpperCamelCase :Optional[Any] = rearrange(SCREAMING_SNAKE_CASE__ , '''b (c d) h w -> b c d h w''' , d=8 ) UpperCamelCase :int = reduce(x * mask , '''b c d h w -> b c h w''' , '''sum''' ) return (dec / 255).clamp(0.0 , 1.0 ) def _A ( self : Optional[Any] , SCREAMING_SNAKE_CASE__ : torch.FloatTensor , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : torch.FloatTensor , SCREAMING_SNAKE_CASE__ : float = 0.0 , SCREAMING_SNAKE_CASE__ : bool = True , SCREAMING_SNAKE_CASE__ : Optional[Any]=None , SCREAMING_SNAKE_CASE__ : bool = True , ): if self.num_inference_steps is None: raise ValueError( '''Number of inference steps is \'None\', you need to run \'set_timesteps\' after creating the scheduler''' ) # See formulas (12) and (16) of DDIM paper https://arxiv.org/pdf/2010.02502.pdf # Ideally, read DDIM paper in-detail understanding # Notation (<variable name> -> <name in paper> # - pred_noise_t -> e_theta(x_t, t) # - pred_original_sample -> f_theta(x_t, t) or x_0 # - std_dev_t -> sigma_t # - eta -> η # - pred_sample_direction -> "direction pointing to x_t" # - pred_prev_sample -> "x_t-1" # 1. get previous step value (=t-1) UpperCamelCase :Tuple = timestep - self.config.num_train_timesteps // self.num_inference_steps # 2. compute alphas, betas UpperCamelCase :List[Any] = self.alphas_cumprod[timestep] UpperCamelCase :List[str] = self.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else self.final_alpha_cumprod UpperCamelCase :str = 1 - alpha_prod_t # 3. compute predicted original sample from predicted noise also called # "predicted x_0" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf UpperCamelCase :str = (sample - beta_prod_t ** 0.5 * model_output) / alpha_prod_t ** 0.5 # 4. Clip "predicted x_0" UpperCamelCase :str = self.bit_scale if self.config.clip_sample: UpperCamelCase :int = torch.clamp(SCREAMING_SNAKE_CASE__ , -scale , SCREAMING_SNAKE_CASE__ ) # 5. compute variance: "sigma_t(η)" -> see formula (16) # σ_t = sqrt((1 − α_t−1)/(1 − α_t)) * sqrt(1 − α_t/α_t−1) UpperCamelCase :str = self._get_variance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = eta * variance ** 0.5 if use_clipped_model_output: # the model_output is always re-derived from the clipped x_0 in Glide UpperCamelCase :List[Any] = (sample - alpha_prod_t ** 0.5 * pred_original_sample) / beta_prod_t ** 0.5 # 6. compute "direction pointing to x_t" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf UpperCamelCase :Any = (1 - alpha_prod_t_prev - std_dev_t**2) ** 0.5 * model_output # 7. compute x_t without "random noise" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf UpperCamelCase :Union[str, Any] = alpha_prod_t_prev ** 0.5 * pred_original_sample + pred_sample_direction if eta > 0: # randn_like does not support generator https://github.com/pytorch/pytorch/issues/27072 UpperCamelCase :Union[str, Any] = model_output.device if torch.is_tensor(SCREAMING_SNAKE_CASE__ ) else '''cpu''' UpperCamelCase :Any = torch.randn(model_output.shape , dtype=model_output.dtype , generator=SCREAMING_SNAKE_CASE__ ).to(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[Any] = self._get_variance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) ** 0.5 * eta * noise UpperCamelCase :Any = prev_sample + variance if not return_dict: return (prev_sample,) return DDIMSchedulerOutput(prev_sample=SCREAMING_SNAKE_CASE__ , pred_original_sample=SCREAMING_SNAKE_CASE__ ) def _A ( self : Optional[Any] , SCREAMING_SNAKE_CASE__ : torch.FloatTensor , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : torch.FloatTensor , SCREAMING_SNAKE_CASE__ : Optional[Any]="epsilon" , SCREAMING_SNAKE_CASE__ : Optional[Any]=None , SCREAMING_SNAKE_CASE__ : bool = True , ): UpperCamelCase :Optional[int] = timestep if model_output.shape[1] == sample.shape[1] * 2 and self.variance_type in ["learned", "learned_range"]: UpperCamelCase , UpperCamelCase :Any = torch.split(SCREAMING_SNAKE_CASE__ , sample.shape[1] , dim=1 ) else: UpperCamelCase :Union[str, Any] = None # 1. compute alphas, betas UpperCamelCase :Union[str, Any] = self.alphas_cumprod[t] UpperCamelCase :Tuple = self.alphas_cumprod[t - 1] if t > 0 else self.one UpperCamelCase :Any = 1 - alpha_prod_t UpperCamelCase :List[Any] = 1 - alpha_prod_t_prev # 2. compute predicted original sample from predicted noise also called # "predicted x_0" of formula (15) from https://arxiv.org/pdf/2006.11239.pdf if prediction_type == "epsilon": UpperCamelCase :Optional[int] = (sample - beta_prod_t ** 0.5 * model_output) / alpha_prod_t ** 0.5 elif prediction_type == "sample": UpperCamelCase :Optional[int] = model_output else: raise ValueError(F'''Unsupported prediction_type {prediction_type}.''' ) # 3. Clip "predicted x_0" UpperCamelCase :Optional[int] = self.bit_scale if self.config.clip_sample: UpperCamelCase :List[Any] = torch.clamp(SCREAMING_SNAKE_CASE__ , -scale , SCREAMING_SNAKE_CASE__ ) # 4. Compute coefficients for pred_original_sample x_0 and current sample x_t # See formula (7) from https://arxiv.org/pdf/2006.11239.pdf UpperCamelCase :str = (alpha_prod_t_prev ** 0.5 * self.betas[t]) / beta_prod_t UpperCamelCase :Union[str, Any] = self.alphas[t] ** 0.5 * beta_prod_t_prev / beta_prod_t # 5. Compute predicted previous sample µ_t # See formula (7) from https://arxiv.org/pdf/2006.11239.pdf UpperCamelCase :Optional[Any] = pred_original_sample_coeff * pred_original_sample + current_sample_coeff * sample # 6. Add noise UpperCamelCase :List[Any] = 0 if t > 0: UpperCamelCase :Optional[Any] = torch.randn( model_output.size() , dtype=model_output.dtype , layout=model_output.layout , generator=SCREAMING_SNAKE_CASE__ ).to(model_output.device ) UpperCamelCase :str = (self._get_variance(SCREAMING_SNAKE_CASE__ , predicted_variance=SCREAMING_SNAKE_CASE__ ) ** 0.5) * noise UpperCamelCase :Dict = pred_prev_sample + variance if not return_dict: return (pred_prev_sample,) return DDPMSchedulerOutput(prev_sample=SCREAMING_SNAKE_CASE__ , pred_original_sample=SCREAMING_SNAKE_CASE__ ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = 1.0 , ) -> Union[str, Any]: super().__init__() UpperCamelCase :Tuple = bit_scale UpperCamelCase :Union[str, Any] = ( ddim_bit_scheduler_step if isinstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) else ddpm_bit_scheduler_step ) self.register_modules(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) @torch.no_grad() def __call__( self , SCREAMING_SNAKE_CASE_ = 256 , SCREAMING_SNAKE_CASE_ = 256 , SCREAMING_SNAKE_CASE_ = 50 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , SCREAMING_SNAKE_CASE_ = "pil" , SCREAMING_SNAKE_CASE_ = True , **SCREAMING_SNAKE_CASE_ , ) -> Union[Tuple, ImagePipelineOutput]: UpperCamelCase :Any = torch.randn( (batch_size, self.unet.config.in_channels, height, width) , generator=SCREAMING_SNAKE_CASE_ , ) UpperCamelCase :Dict = decimal_to_bits(SCREAMING_SNAKE_CASE_ ) * self.bit_scale UpperCamelCase :Any = latents.to(self.device ) self.scheduler.set_timesteps(SCREAMING_SNAKE_CASE_ ) for t in self.progress_bar(self.scheduler.timesteps ): # predict the noise residual UpperCamelCase :Any = self.unet(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ).sample # compute the previous noisy sample x_t -> x_t-1 UpperCamelCase :Union[str, Any] = self.scheduler.step(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ).prev_sample UpperCamelCase :str = bits_to_decimal(SCREAMING_SNAKE_CASE_ ) if output_type == "pil": UpperCamelCase :Dict = self.numpy_to_pil(SCREAMING_SNAKE_CASE_ ) if not return_dict: return (image,) return ImagePipelineOutput(images=SCREAMING_SNAKE_CASE_ )
259
import inspect import logging import os import random import shutil import tempfile import unittest import pytest import torch from torch import nn from torch.utils.data import DataLoader, TensorDataset from accelerate import Accelerator from accelerate.test_utils import execute_subprocess_async, require_cuda from accelerate.utils import ProjectConfiguration, set_seed __snake_case = logging.getLogger(__name__) def _A ( SCREAMING_SNAKE_CASE__ : Dict=2 , SCREAMING_SNAKE_CASE__ : Dict=3 , SCREAMING_SNAKE_CASE__ : Any=16 , SCREAMING_SNAKE_CASE__ : int = 10 , SCREAMING_SNAKE_CASE__ : int = 2 ): def get_dataset(SCREAMING_SNAKE_CASE__ : List[Any] ): UpperCamelCase :Union[str, Any] = torch.randn(batch_size * n_batches , 1 ) return TensorDataset(SCREAMING_SNAKE_CASE__ , a * x + b + 0.1 * torch.randn(batch_size * n_batches , 1 ) ) UpperCamelCase :str = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) return (train_dataloader, valid_dataloader) def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Any=None ): UpperCamelCase :Dict = [] for epoch in range(SCREAMING_SNAKE_CASE__ ): # Train quickly model.train() for batch in dataloader: UpperCamelCase , UpperCamelCase :Optional[Any] = batch UpperCamelCase :int = model(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = torch.nn.functional.mse_loss(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) accelerator.backward(SCREAMING_SNAKE_CASE__ ) optimizer.step() optimizer.zero_grad() rands.append(random.random() ) # Introduce some randomness if scheduler is not None: scheduler.step() return rands class UpperCAmelCase_ ( nn.Module ): """simple docstring""" def __init__( self ) -> str: super().__init__() UpperCamelCase :Optional[int] = nn.Parameter(torch.randn(1 ) ) UpperCamelCase :int = nn.Parameter(torch.randn(1 ) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> int: return x * self.a + self.b class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Dict: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Tuple = ProjectConfiguration(total_limit=1 , project_dir=SCREAMING_SNAKE_CASE_ , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Dict = Accelerator(project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Union[str, Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() # Save second state accelerator.save_state() self.assertEqual(len(os.listdir(accelerator.project_dir ) ) , 1 ) def UpperCAmelCase ( self ) -> str: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[str] = DummyModel() UpperCamelCase :Union[str, Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Dict = dummy_dataloaders() # Train baseline UpperCamelCase :Dict = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial UpperCamelCase :int = os.path.join(SCREAMING_SNAKE_CASE_ , '''initial''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[int] = optimizer.state_dict() UpperCamelCase :Optional[int] = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Any = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :List[Any] = dummy_dataloaders() UpperCamelCase :List[str] = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Tuple = model.a.item(), model.b.item() UpperCamelCase :Tuple = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything UpperCamelCase :Optional[int] = os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoint''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) # Load everything back in and make sure all states work accelerator.load_state(SCREAMING_SNAKE_CASE_ ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Union[str, Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :Optional[int] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :int = dummy_dataloaders() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() ((UpperCamelCase) , (UpperCamelCase)) :List[str] = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() UpperCamelCase :Any = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[int] = model.a.item(), model.b.item() UpperCamelCase :Any = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Union[str, Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Optional[Any] = ProjectConfiguration(iteration=1 , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything accelerator.save_state() # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_1''' ) ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :str = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[Any] = torch.tensor([1, 2, 3] ) UpperCamelCase :Any = torch.tensor([2, 3, 4] ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :Optional[Any] = torch.optim.Adam(net.parameters() ) UpperCamelCase :Optional[Any] = Accelerator() with self.assertRaises(SCREAMING_SNAKE_CASE_ ) as ve: accelerator.register_for_checkpointing(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = str(ve.exception ) self.assertTrue('''Item at index 0''' in message ) self.assertTrue('''Item at index 1''' in message ) self.assertFalse('''Item at index 2''' in message ) self.assertFalse('''Item at index 3''' in message ) def UpperCAmelCase ( self ) -> Any: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :List[str] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase :Any = torch.optim.lr_scheduler.StepLR(SCREAMING_SNAKE_CASE_ , step_size=1 , gamma=0.99 ) UpperCamelCase , UpperCamelCase :Any = dummy_dataloaders() UpperCamelCase :Optional[int] = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :str = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() UpperCamelCase :int = scheduler.state_dict() train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertNotEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) self.assertEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) def UpperCAmelCase ( self ) -> Union[str, Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ , total_limit=2 ) # Train baseline UpperCamelCase :Tuple = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = accelerator.prepare(SCREAMING_SNAKE_CASE_ ) # Save 3 states: for _ in range(11 ): accelerator.save_state() self.assertTrue(not os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_9''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_10''' ) ) ) @require_cuda def UpperCAmelCase ( self ) -> int: UpperCamelCase :int = ['''torchrun''', F'''--nproc_per_node={torch.cuda.device_count()}''', inspect.getfile(self.__class__ )] execute_subprocess_async(SCREAMING_SNAKE_CASE_ , env=os.environ.copy() ) if __name__ == "__main__": __snake_case = """/tmp/accelerate/state_checkpointing""" __snake_case = DummyModel() __snake_case = torch.optim.Adam(params=model.parameters(), lr=1E-3) __snake_case = torch.optim.lr_scheduler.StepLR(optimizer, step_size=1, gamma=0.9_9) __snake_case , __snake_case = dummy_dataloaders() __snake_case = ProjectConfiguration(automatic_checkpoint_naming=True) # Train baseline __snake_case = Accelerator(project_dir=savedir, project_config=project_config, mixed_precision="""no""") if accelerator.process_index == 0: if os.path.exists(savedir): shutil.rmtree(savedir) os.makedirs(savedir) __snake_case , __snake_case , __snake_case , __snake_case , __snake_case = accelerator.prepare( model, optimizer, train_dataloader, valid_dataloader, scheduler ) __snake_case , __snake_case = accelerator.prepare(model, optimizer) train(3, model, train_dataloader, optimizer, accelerator, scheduler) # Check that the intial optimizer is loaded on the GPU for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert param_device.type == accelerator.device.type __snake_case = model.cpu() accelerator.wait_for_everyone() accelerator.save_state() accelerator.wait_for_everyone() # Check CPU state accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""cpu""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == torch.device("""cpu""").type ), f"Loaded optimizer states did not match, expected to be loaded on the CPU but got {param_device}" # Check device state model.to(accelerator.device) accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""on_device""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == accelerator.device.type ), f"Loaded optimizer states did not match, expected to be loaded on {accelerator.device} but got {param_device}" # Check error with pytest.raises(TypeError, match="""Unsupported optimizer map location passed"""): accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""invalid""") accelerator.wait_for_everyone() if accelerator.process_index == 0: shutil.rmtree(savedir) accelerator.wait_for_everyone()
259
1
def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : int ): return int((input_a, input_a).count(0 ) != 0 ) def _A ( ): assert nand_gate(0 , 0 ) == 1 assert nand_gate(0 , 1 ) == 1 assert nand_gate(1 , 0 ) == 1 assert nand_gate(1 , 1 ) == 0 if __name__ == "__main__": print(nand_gate(0, 0)) print(nand_gate(0, 1)) print(nand_gate(1, 0)) print(nand_gate(1, 1))
259
import numpy as np __snake_case = [ ["""a""", """b""", """c""", """d""", """e"""], ["""f""", """g""", """h""", """i""", """k"""], ["""l""", """m""", """n""", """o""", """p"""], ["""q""", """r""", """s""", """t""", """u"""], ["""v""", """w""", """x""", """y""", """z"""], ] class UpperCAmelCase_ : """simple docstring""" def __init__( self ) -> None: UpperCamelCase :Dict = np.array(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> np.ndarray: UpperCamelCase , UpperCamelCase :Tuple = np.where(letter == self.SQUARE ) UpperCamelCase :List[Any] = np.concatenate([indexa + 1, indexa + 1] ) return indexes def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :int = self.SQUARE[indexa - 1, indexa - 1] return letter def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() UpperCamelCase :int = message.replace(''' ''' , '''''' ) UpperCamelCase :Dict = message.replace('''j''' , '''i''' ) UpperCamelCase :str = np.empty((2, len(SCREAMING_SNAKE_CASE_ )) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Union[str, Any] = numbers[0] UpperCamelCase :Dict = numbers[1] UpperCamelCase :Any = first_step.reshape(2 * len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = int(second_step[numbers_index * 2] ) UpperCamelCase :List[str] = int(second_step[(numbers_index * 2) + 1] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = encoded_message + letter return encoded_message def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() message.replace(''' ''' , '''''' ) UpperCamelCase :Optional[int] = np.empty(2 * len(SCREAMING_SNAKE_CASE_ ) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :List[str] = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Dict = numbers[0] UpperCamelCase :List[str] = numbers[1] UpperCamelCase :int = first_step.reshape((2, len(SCREAMING_SNAKE_CASE_ )) ) UpperCamelCase :Any = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Any = int(second_step[0, numbers_index] ) UpperCamelCase :List[Any] = int(second_step[1, numbers_index] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = decoded_message + letter return decoded_message
259
1
from collections import OrderedDict from ...utils import logging from .auto_factory import _BaseAutoModelClass, _LazyAutoMapping, auto_class_update from .configuration_auto import CONFIG_MAPPING_NAMES __snake_case = logging.get_logger(__name__) __snake_case = OrderedDict( [ # Base model mapping ("""albert""", """FlaxAlbertModel"""), ("""bart""", """FlaxBartModel"""), ("""beit""", """FlaxBeitModel"""), ("""bert""", """FlaxBertModel"""), ("""big_bird""", """FlaxBigBirdModel"""), ("""blenderbot""", """FlaxBlenderbotModel"""), ("""blenderbot-small""", """FlaxBlenderbotSmallModel"""), ("""clip""", """FlaxCLIPModel"""), ("""distilbert""", """FlaxDistilBertModel"""), ("""electra""", """FlaxElectraModel"""), ("""gpt-sw3""", """FlaxGPT2Model"""), ("""gpt2""", """FlaxGPT2Model"""), ("""gpt_neo""", """FlaxGPTNeoModel"""), ("""gptj""", """FlaxGPTJModel"""), ("""longt5""", """FlaxLongT5Model"""), ("""marian""", """FlaxMarianModel"""), ("""mbart""", """FlaxMBartModel"""), ("""mt5""", """FlaxMT5Model"""), ("""opt""", """FlaxOPTModel"""), ("""pegasus""", """FlaxPegasusModel"""), ("""regnet""", """FlaxRegNetModel"""), ("""resnet""", """FlaxResNetModel"""), ("""roberta""", """FlaxRobertaModel"""), ("""roberta-prelayernorm""", """FlaxRobertaPreLayerNormModel"""), ("""roformer""", """FlaxRoFormerModel"""), ("""t5""", """FlaxT5Model"""), ("""vision-text-dual-encoder""", """FlaxVisionTextDualEncoderModel"""), ("""vit""", """FlaxViTModel"""), ("""wav2vec2""", """FlaxWav2Vec2Model"""), ("""whisper""", """FlaxWhisperModel"""), ("""xglm""", """FlaxXGLMModel"""), ("""xlm-roberta""", """FlaxXLMRobertaModel"""), ] ) __snake_case = OrderedDict( [ # Model for pre-training mapping ("""albert""", """FlaxAlbertForPreTraining"""), ("""bart""", """FlaxBartForConditionalGeneration"""), ("""bert""", """FlaxBertForPreTraining"""), ("""big_bird""", """FlaxBigBirdForPreTraining"""), ("""electra""", """FlaxElectraForPreTraining"""), ("""longt5""", """FlaxLongT5ForConditionalGeneration"""), ("""mbart""", """FlaxMBartForConditionalGeneration"""), ("""mt5""", """FlaxMT5ForConditionalGeneration"""), ("""roberta""", """FlaxRobertaForMaskedLM"""), ("""roberta-prelayernorm""", """FlaxRobertaPreLayerNormForMaskedLM"""), ("""roformer""", """FlaxRoFormerForMaskedLM"""), ("""t5""", """FlaxT5ForConditionalGeneration"""), ("""wav2vec2""", """FlaxWav2Vec2ForPreTraining"""), ("""whisper""", """FlaxWhisperForConditionalGeneration"""), ("""xlm-roberta""", """FlaxXLMRobertaForMaskedLM"""), ] ) __snake_case = OrderedDict( [ # Model for Masked LM mapping ("""albert""", """FlaxAlbertForMaskedLM"""), ("""bart""", """FlaxBartForConditionalGeneration"""), ("""bert""", """FlaxBertForMaskedLM"""), ("""big_bird""", """FlaxBigBirdForMaskedLM"""), ("""distilbert""", """FlaxDistilBertForMaskedLM"""), ("""electra""", """FlaxElectraForMaskedLM"""), ("""mbart""", """FlaxMBartForConditionalGeneration"""), ("""roberta""", """FlaxRobertaForMaskedLM"""), ("""roberta-prelayernorm""", """FlaxRobertaPreLayerNormForMaskedLM"""), ("""roformer""", """FlaxRoFormerForMaskedLM"""), ("""xlm-roberta""", """FlaxXLMRobertaForMaskedLM"""), ] ) __snake_case = OrderedDict( [ # Model for Seq2Seq Causal LM mapping ("""bart""", """FlaxBartForConditionalGeneration"""), ("""blenderbot""", """FlaxBlenderbotForConditionalGeneration"""), ("""blenderbot-small""", """FlaxBlenderbotSmallForConditionalGeneration"""), ("""encoder-decoder""", """FlaxEncoderDecoderModel"""), ("""longt5""", """FlaxLongT5ForConditionalGeneration"""), ("""marian""", """FlaxMarianMTModel"""), ("""mbart""", """FlaxMBartForConditionalGeneration"""), ("""mt5""", """FlaxMT5ForConditionalGeneration"""), ("""pegasus""", """FlaxPegasusForConditionalGeneration"""), ("""t5""", """FlaxT5ForConditionalGeneration"""), ] ) __snake_case = OrderedDict( [ # Model for Image-classsification ("""beit""", """FlaxBeitForImageClassification"""), ("""regnet""", """FlaxRegNetForImageClassification"""), ("""resnet""", """FlaxResNetForImageClassification"""), ("""vit""", """FlaxViTForImageClassification"""), ] ) __snake_case = OrderedDict( [ ("""vision-encoder-decoder""", """FlaxVisionEncoderDecoderModel"""), ] ) __snake_case = OrderedDict( [ # Model for Causal LM mapping ("""bart""", """FlaxBartForCausalLM"""), ("""bert""", """FlaxBertForCausalLM"""), ("""big_bird""", """FlaxBigBirdForCausalLM"""), ("""electra""", """FlaxElectraForCausalLM"""), ("""gpt-sw3""", """FlaxGPT2LMHeadModel"""), ("""gpt2""", """FlaxGPT2LMHeadModel"""), ("""gpt_neo""", """FlaxGPTNeoForCausalLM"""), ("""gptj""", """FlaxGPTJForCausalLM"""), ("""opt""", """FlaxOPTForCausalLM"""), ("""roberta""", """FlaxRobertaForCausalLM"""), ("""roberta-prelayernorm""", """FlaxRobertaPreLayerNormForCausalLM"""), ("""xglm""", """FlaxXGLMForCausalLM"""), ("""xlm-roberta""", """FlaxXLMRobertaForCausalLM"""), ] ) __snake_case = OrderedDict( [ # Model for Sequence Classification mapping ("""albert""", """FlaxAlbertForSequenceClassification"""), ("""bart""", """FlaxBartForSequenceClassification"""), ("""bert""", """FlaxBertForSequenceClassification"""), ("""big_bird""", """FlaxBigBirdForSequenceClassification"""), ("""distilbert""", """FlaxDistilBertForSequenceClassification"""), ("""electra""", """FlaxElectraForSequenceClassification"""), ("""mbart""", """FlaxMBartForSequenceClassification"""), ("""roberta""", """FlaxRobertaForSequenceClassification"""), ("""roberta-prelayernorm""", """FlaxRobertaPreLayerNormForSequenceClassification"""), ("""roformer""", """FlaxRoFormerForSequenceClassification"""), ("""xlm-roberta""", """FlaxXLMRobertaForSequenceClassification"""), ] ) __snake_case = OrderedDict( [ # Model for Question Answering mapping ("""albert""", """FlaxAlbertForQuestionAnswering"""), ("""bart""", """FlaxBartForQuestionAnswering"""), ("""bert""", """FlaxBertForQuestionAnswering"""), ("""big_bird""", """FlaxBigBirdForQuestionAnswering"""), ("""distilbert""", """FlaxDistilBertForQuestionAnswering"""), ("""electra""", """FlaxElectraForQuestionAnswering"""), ("""mbart""", """FlaxMBartForQuestionAnswering"""), ("""roberta""", """FlaxRobertaForQuestionAnswering"""), ("""roberta-prelayernorm""", """FlaxRobertaPreLayerNormForQuestionAnswering"""), ("""roformer""", """FlaxRoFormerForQuestionAnswering"""), ("""xlm-roberta""", """FlaxXLMRobertaForQuestionAnswering"""), ] ) __snake_case = OrderedDict( [ # Model for Token Classification mapping ("""albert""", """FlaxAlbertForTokenClassification"""), ("""bert""", """FlaxBertForTokenClassification"""), ("""big_bird""", """FlaxBigBirdForTokenClassification"""), ("""distilbert""", """FlaxDistilBertForTokenClassification"""), ("""electra""", """FlaxElectraForTokenClassification"""), ("""roberta""", """FlaxRobertaForTokenClassification"""), ("""roberta-prelayernorm""", """FlaxRobertaPreLayerNormForTokenClassification"""), ("""roformer""", """FlaxRoFormerForTokenClassification"""), ("""xlm-roberta""", """FlaxXLMRobertaForTokenClassification"""), ] ) __snake_case = OrderedDict( [ # Model for Multiple Choice mapping ("""albert""", """FlaxAlbertForMultipleChoice"""), ("""bert""", """FlaxBertForMultipleChoice"""), ("""big_bird""", """FlaxBigBirdForMultipleChoice"""), ("""distilbert""", """FlaxDistilBertForMultipleChoice"""), ("""electra""", """FlaxElectraForMultipleChoice"""), ("""roberta""", """FlaxRobertaForMultipleChoice"""), ("""roberta-prelayernorm""", """FlaxRobertaPreLayerNormForMultipleChoice"""), ("""roformer""", """FlaxRoFormerForMultipleChoice"""), ("""xlm-roberta""", """FlaxXLMRobertaForMultipleChoice"""), ] ) __snake_case = OrderedDict( [ ("""bert""", """FlaxBertForNextSentencePrediction"""), ] ) __snake_case = OrderedDict( [ ("""speech-encoder-decoder""", """FlaxSpeechEncoderDecoderModel"""), ("""whisper""", """FlaxWhisperForConditionalGeneration"""), ] ) __snake_case = OrderedDict( [ ("""whisper""", """FlaxWhisperForAudioClassification"""), ] ) __snake_case = _LazyAutoMapping(CONFIG_MAPPING_NAMES, FLAX_MODEL_MAPPING_NAMES) __snake_case = _LazyAutoMapping(CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_PRETRAINING_MAPPING_NAMES) __snake_case = _LazyAutoMapping(CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_MASKED_LM_MAPPING_NAMES) __snake_case = _LazyAutoMapping( CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING_NAMES ) __snake_case = _LazyAutoMapping( CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING_NAMES ) __snake_case = _LazyAutoMapping(CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_VISION_2_SEQ_MAPPING_NAMES) __snake_case = _LazyAutoMapping(CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_CAUSAL_LM_MAPPING_NAMES) __snake_case = _LazyAutoMapping( CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING_NAMES ) __snake_case = _LazyAutoMapping( CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_QUESTION_ANSWERING_MAPPING_NAMES ) __snake_case = _LazyAutoMapping( CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING_NAMES ) __snake_case = _LazyAutoMapping( CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_MULTIPLE_CHOICE_MAPPING_NAMES ) __snake_case = _LazyAutoMapping( CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING_NAMES ) __snake_case = _LazyAutoMapping( CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_SPEECH_SEQ_2_SEQ_MAPPING_NAMES ) __snake_case = _LazyAutoMapping( CONFIG_MAPPING_NAMES, FLAX_MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING_NAMES ) class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : Optional[int] =FLAX_MODEL_MAPPING __snake_case = auto_class_update(FlaxAutoModel) class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : List[Any] =FLAX_MODEL_FOR_PRETRAINING_MAPPING __snake_case = auto_class_update(FlaxAutoModelForPreTraining, head_doc="""pretraining""") class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : Tuple =FLAX_MODEL_FOR_CAUSAL_LM_MAPPING __snake_case = auto_class_update(FlaxAutoModelForCausalLM, head_doc="""causal language modeling""") class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : Any =FLAX_MODEL_FOR_MASKED_LM_MAPPING __snake_case = auto_class_update(FlaxAutoModelForMaskedLM, head_doc="""masked language modeling""") class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : Optional[int] =FLAX_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING __snake_case = auto_class_update( FlaxAutoModelForSeqaSeqLM, head_doc="""sequence-to-sequence language modeling""", checkpoint_for_example="""t5-base""" ) class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : Optional[int] =FLAX_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING __snake_case = auto_class_update( FlaxAutoModelForSequenceClassification, head_doc="""sequence classification""" ) class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : List[Any] =FLAX_MODEL_FOR_QUESTION_ANSWERING_MAPPING __snake_case = auto_class_update(FlaxAutoModelForQuestionAnswering, head_doc="""question answering""") class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : List[Any] =FLAX_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING __snake_case = auto_class_update( FlaxAutoModelForTokenClassification, head_doc="""token classification""" ) class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : Optional[int] =FLAX_MODEL_FOR_MULTIPLE_CHOICE_MAPPING __snake_case = auto_class_update(FlaxAutoModelForMultipleChoice, head_doc="""multiple choice""") class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : List[Any] =FLAX_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING __snake_case = auto_class_update( FlaxAutoModelForNextSentencePrediction, head_doc="""next sentence prediction""" ) class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : str =FLAX_MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING __snake_case = auto_class_update( FlaxAutoModelForImageClassification, head_doc="""image classification""" ) class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : Dict =FLAX_MODEL_FOR_VISION_2_SEQ_MAPPING __snake_case = auto_class_update(FlaxAutoModelForVisionaSeq, head_doc="""vision-to-text modeling""") class UpperCAmelCase_ ( _BaseAutoModelClass ): """simple docstring""" UpperCamelCase_ : List[Any] =FLAX_MODEL_FOR_SPEECH_SEQ_2_SEQ_MAPPING __snake_case = auto_class_update( FlaxAutoModelForSpeechSeqaSeq, head_doc="""sequence-to-sequence speech-to-text modeling""" )
259
import argparse import collections import numpy as np import torch from flax import traverse_util from tax import checkpoints from transformers import MTaConfig, UMTaEncoderModel, UMTaForConditionalGeneration from transformers.utils import logging logging.set_verbosity_info() def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Tuple ): return params[F'''{prefix}/{prefix}/relpos_bias/rel_embedding'''][:, i, :] def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Any="attention" ): UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/key/kernel'''][:, i, :, :] ) UpperCamelCase :Optional[Any] = k_tmp.reshape(k_tmp.shape[0] , k_tmp.shape[1] * k_tmp.shape[2] ) UpperCamelCase :Optional[int] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/out/kernel'''][:, i, :, :] ) UpperCamelCase :List[Any] = o_tmp.reshape(o_tmp.shape[0] * o_tmp.shape[1] , o_tmp.shape[2] ) UpperCamelCase :Union[str, Any] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/query/kernel'''][:, i, :, :] ) UpperCamelCase :Any = q_tmp.reshape(q_tmp.shape[0] , q_tmp.shape[1] * q_tmp.shape[2] ) UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/value/kernel'''][:, i, :, :] ) UpperCamelCase :str = v_tmp.reshape(v_tmp.shape[0] , v_tmp.shape[1] * v_tmp.shape[2] ) return k, o, q, v def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : List[str]=False ): if split_mlp_wi: UpperCamelCase :List[Any] = params[F'''{prefix}/{prefix}/mlp/wi_0/kernel'''][:, i, :] UpperCamelCase :int = params[F'''{prefix}/{prefix}/mlp/wi_1/kernel'''][:, i, :] UpperCamelCase :str = (wi_a, wi_a) else: UpperCamelCase :Optional[Any] = params[F'''{prefix}/{prefix}/mlp/wi/kernel'''][:, i, :] UpperCamelCase :Optional[int] = params[F'''{prefix}/{prefix}/mlp/wo/kernel'''][:, i, :] return wi, wo def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[int] ): return params[F'''{prefix}/{prefix}/{layer_name}/scale'''][:, i] def _A ( SCREAMING_SNAKE_CASE__ : dict , *, SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : bool , SCREAMING_SNAKE_CASE__ : bool = False ): UpperCamelCase :Tuple = traverse_util.flatten_dict(variables['''target'''] ) UpperCamelCase :List[Any] = {'''/'''.join(SCREAMING_SNAKE_CASE__ ): v for k, v in old.items()} # v1.1 models have a gated GeLU with wi_0 and wi_1 instead of wi UpperCamelCase :int = '''encoder/encoder/mlp/wi_0/kernel''' in old print('''Split MLP:''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = collections.OrderedDict() # Shared embeddings. UpperCamelCase :int = old['''token_embedder/embedding'''] # Encoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :Dict = k.T UpperCamelCase :Optional[Any] = o.T UpperCamelCase :int = q.T UpperCamelCase :Any = v.T # Block i, layer 1 (MLP). UpperCamelCase :Tuple = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Any = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[Any] = wi[0].T UpperCamelCase :Tuple = wi[1].T else: UpperCamelCase :Optional[Any] = wi.T UpperCamelCase :Dict = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :List[str] = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' ).T UpperCamelCase :Optional[Any] = old['''encoder/encoder_norm/scale'''] if not scalable_attention: UpperCamelCase :str = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''encoder''' ).T UpperCamelCase :Any = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''decoder''' ).T if not is_encoder_only: # Decoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :Union[str, Any] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_self_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Dict = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''self_attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :int = k.T UpperCamelCase :Optional[int] = o.T UpperCamelCase :Tuple = q.T UpperCamelCase :List[str] = v.T # Block i, layer 1 (Cross Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_cross_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[Any] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''encoder_decoder_attention''' ) UpperCamelCase :Tuple = layer_norm UpperCamelCase :Optional[Any] = k.T UpperCamelCase :List[str] = o.T UpperCamelCase :List[str] = q.T UpperCamelCase :str = v.T # Block i, layer 2 (MLP). UpperCamelCase :List[str] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Optional[int] = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[str] = wi[0].T UpperCamelCase :str = wi[1].T else: UpperCamelCase :Dict = wi.T UpperCamelCase :Optional[Any] = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :Tuple = tax_relpos_bias_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' ).T UpperCamelCase :Union[str, Any] = old['''decoder/decoder_norm/scale'''] # LM Head (only in v1.1 checkpoints, in v1.0 embeddings are used instead) if "decoder/logits_dense/kernel" in old: UpperCamelCase :Union[str, Any] = old['''decoder/logits_dense/kernel'''].T return new def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : bool ): UpperCamelCase :Optional[int] = collections.OrderedDict([(k, torch.from_numpy(v.copy() )) for (k, v) in converted_params.items()] ) # Add what is missing. if "encoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if not is_encoder_only: if "decoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if "lm_head.weight" not in state_dict: # For old 1.0 models. print('''Using shared word embeddings as lm_head.''' ) UpperCamelCase :List[Any] = state_dict['''shared.weight'''] return state_dict def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Dict = checkpoints.load_tax_checkpoint(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = convert_tax_to_pytorch( SCREAMING_SNAKE_CASE__ , num_layers=config.num_layers , is_encoder_only=SCREAMING_SNAKE_CASE__ , scalable_attention=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = make_state_dict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) model.load_state_dict(SCREAMING_SNAKE_CASE__ , strict=SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : bool = False , SCREAMING_SNAKE_CASE__ : bool = False , ): UpperCamelCase :Any = MTaConfig.from_json_file(SCREAMING_SNAKE_CASE__ ) print(F'''Building PyTorch model from configuration: {config}''' ) # Non-v1.1 checkpoints could also use T5Model, but this works for all. # The v1.0 checkpoints will simply have an LM head that is the word embeddings. if is_encoder_only: UpperCamelCase :List[str] = UMTaEncoderModel(SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :Any = UMTaForConditionalGeneration(SCREAMING_SNAKE_CASE__ ) # Load weights from tf checkpoint load_tax_weights_in_ta(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Save pytorch-model print(F'''Save PyTorch model to {pytorch_dump_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) # Verify that we can load the checkpoint. model.from_pretrained(SCREAMING_SNAKE_CASE__ ) print('''Done''' ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser(description="""Converts a native T5X checkpoint into a PyTorch checkpoint.""") # Required parameters parser.add_argument( """--t5x_checkpoint_path""", default=None, type=str, required=True, help="""Path to the T5X checkpoint.""" ) parser.add_argument( """--config_file""", default=None, type=str, required=True, help="""The config json file corresponding to the pre-trained T5 model.\nThis specifies the model architecture.""", ) parser.add_argument( """--pytorch_dump_path""", default=None, type=str, required=True, help="""Path to the output PyTorch model.""" ) parser.add_argument( """--is_encoder_only""", action="""store_true""", help="""Check if the model is encoder-decoder model""", default=False ) parser.add_argument( """--scalable_attention""", action="""store_true""", help="""Whether the model uses scaled attention (umt5 model)""", default=False, ) __snake_case = parser.parse_args() convert_tax_checkpoint_to_pytorch( args.tax_checkpoint_path, args.config_file, args.pytorch_dump_path, args.is_encoder_only, args.scalable_attention, )
259
1
# flake8: noqa # Lint as: python3 from typing import Dict, List, Optional, Type from .. import config from ..utils import logging from .formatting import ( ArrowFormatter, CustomFormatter, Formatter, PandasFormatter, PythonFormatter, TensorFormatter, format_table, query_table, ) from .np_formatter import NumpyFormatter __snake_case = logging.get_logger(__name__) __snake_case = {} __snake_case = {} __snake_case = {} def _A ( SCREAMING_SNAKE_CASE__ : type , SCREAMING_SNAKE_CASE__ : Optional[str] , SCREAMING_SNAKE_CASE__ : Optional[List[str]] = None , ): UpperCamelCase :int = aliases if aliases is not None else [] if format_type in _FORMAT_TYPES: logger.warning( F'''Overwriting format type \'{format_type}\' ({_FORMAT_TYPES[format_type].__name__} -> {formatter_cls.__name__})''' ) UpperCamelCase :Tuple = formatter_cls for alias in set(aliases + [format_type] ): if alias in _FORMAT_TYPES_ALIASES: logger.warning( F'''Overwriting format type alias \'{alias}\' ({_FORMAT_TYPES_ALIASES[alias]} -> {format_type})''' ) UpperCamelCase :List[str] = format_type def _A ( SCREAMING_SNAKE_CASE__ : Exception , SCREAMING_SNAKE_CASE__ : Optional[str] , SCREAMING_SNAKE_CASE__ : Optional[List[str]] = None ): UpperCamelCase :List[str] = aliases if aliases is not None else [] for alias in set(aliases + [format_type] ): UpperCamelCase :List[Any] = unavailable_error # Here we define all the available formatting functions that can be used by `Dataset.set_format` _register_formatter(PythonFormatter, None, aliases=["""python"""]) _register_formatter(ArrowFormatter, """arrow""", aliases=["""pa""", """pyarrow"""]) _register_formatter(NumpyFormatter, """numpy""", aliases=["""np"""]) _register_formatter(PandasFormatter, """pandas""", aliases=["""pd"""]) _register_formatter(CustomFormatter, """custom""") if config.TORCH_AVAILABLE: from .torch_formatter import TorchFormatter _register_formatter(TorchFormatter, """torch""", aliases=["""pt""", """pytorch"""]) else: __snake_case = ValueError("""PyTorch needs to be installed to be able to return PyTorch tensors.""") _register_unavailable_formatter(_torch_error, """torch""", aliases=["""pt""", """pytorch"""]) if config.TF_AVAILABLE: from .tf_formatter import TFFormatter _register_formatter(TFFormatter, """tensorflow""", aliases=["""tf"""]) else: __snake_case = ValueError("""Tensorflow needs to be installed to be able to return Tensorflow tensors.""") _register_unavailable_formatter(_tf_error, """tensorflow""", aliases=["""tf"""]) if config.JAX_AVAILABLE: from .jax_formatter import JaxFormatter _register_formatter(JaxFormatter, """jax""", aliases=[]) else: __snake_case = ValueError("""JAX needs to be installed to be able to return JAX arrays.""") _register_unavailable_formatter(_jax_error, """jax""", aliases=[]) def _A ( SCREAMING_SNAKE_CASE__ : Optional[str] ): if format_type in _FORMAT_TYPES_ALIASES: return _FORMAT_TYPES_ALIASES[format_type] else: return format_type def _A ( SCREAMING_SNAKE_CASE__ : Optional[str] , **SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :int = get_format_type_from_alias(SCREAMING_SNAKE_CASE__ ) if format_type in _FORMAT_TYPES: return _FORMAT_TYPES[format_type](**SCREAMING_SNAKE_CASE__ ) if format_type in _FORMAT_TYPES_ALIASES_UNAVAILABLE: raise _FORMAT_TYPES_ALIASES_UNAVAILABLE[format_type] else: raise ValueError( F'''Return type should be None or selected in {list(type for type in _FORMAT_TYPES.keys() if type != None )}, but got \'{format_type}\'''' )
259
def _A ( SCREAMING_SNAKE_CASE__ : list[int] , SCREAMING_SNAKE_CASE__ : list[int] ): UpperCamelCase :Tuple = len(SCREAMING_SNAKE_CASE__ ) print('''The following activities are selected:''' ) # The first activity is always selected UpperCamelCase :Dict = 0 print(SCREAMING_SNAKE_CASE__ , end=''',''' ) # Consider rest of the activities for j in range(SCREAMING_SNAKE_CASE__ ): # If this activity has start time greater than # or equal to the finish time of previously # selected activity, then select it if start[j] >= finish[i]: print(SCREAMING_SNAKE_CASE__ , end=''',''' ) UpperCamelCase :List[str] = j if __name__ == "__main__": import doctest doctest.testmod() __snake_case = [1, 3, 0, 5, 8, 5] __snake_case = [2, 4, 6, 7, 9, 9] print_max_activities(start, finish)
259
1
import inspect import tempfile from collections import OrderedDict, UserDict from collections.abc import MutableMapping from contextlib import ExitStack, contextmanager from dataclasses import fields from enum import Enum from typing import Any, ContextManager, List, Tuple import numpy as np from .import_utils import is_flax_available, is_tf_available, is_torch_available, is_torch_fx_proxy if is_flax_available(): import jax.numpy as jnp class UpperCAmelCase_ ( lowercase ): """simple docstring""" def __get__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=None ) -> List[str]: # See docs.python.org/3/howto/descriptor.html#properties if obj is None: return self if self.fget is None: raise AttributeError('''unreadable attribute''' ) UpperCamelCase :Any = '''__cached_''' + self.fget.__name__ UpperCamelCase :str = getattr(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) if cached is None: UpperCamelCase :Any = self.fget(SCREAMING_SNAKE_CASE_ ) setattr(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) return cached def _A ( SCREAMING_SNAKE_CASE__ : Dict ): UpperCamelCase :Optional[int] = val.lower() if val in {"y", "yes", "t", "true", "on", "1"}: return 1 if val in {"n", "no", "f", "false", "off", "0"}: return 0 raise ValueError(F'''invalid truth value {val!r}''' ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] ): if is_torch_fx_proxy(SCREAMING_SNAKE_CASE__ ): return True if is_torch_available(): import torch if isinstance(SCREAMING_SNAKE_CASE__ , torch.Tensor ): return True if is_tf_available(): import tensorflow as tf if isinstance(SCREAMING_SNAKE_CASE__ , tf.Tensor ): return True if is_flax_available(): import jax.numpy as jnp from jax.core import Tracer if isinstance(SCREAMING_SNAKE_CASE__ , (jnp.ndarray, Tracer) ): return True return isinstance(SCREAMING_SNAKE_CASE__ , np.ndarray ) def _A ( SCREAMING_SNAKE_CASE__ : List[Any] ): return isinstance(SCREAMING_SNAKE_CASE__ , np.ndarray ) def _A ( SCREAMING_SNAKE_CASE__ : Dict ): return _is_numpy(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] ): import torch return isinstance(SCREAMING_SNAKE_CASE__ , torch.Tensor ) def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): return False if not is_torch_available() else _is_torch(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] ): import torch return isinstance(SCREAMING_SNAKE_CASE__ , torch.device ) def _A ( SCREAMING_SNAKE_CASE__ : Any ): return False if not is_torch_available() else _is_torch_device(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : List[Any] ): import torch if isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): if hasattr(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Dict = getattr(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) else: return False return isinstance(SCREAMING_SNAKE_CASE__ , torch.dtype ) def _A ( SCREAMING_SNAKE_CASE__ : int ): return False if not is_torch_available() else _is_torch_dtype(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): import tensorflow as tf return isinstance(SCREAMING_SNAKE_CASE__ , tf.Tensor ) def _A ( SCREAMING_SNAKE_CASE__ : str ): return False if not is_tf_available() else _is_tensorflow(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] ): import tensorflow as tf # the `is_symbolic_tensor` predicate is only available starting with TF 2.14 if hasattr(SCREAMING_SNAKE_CASE__ , '''is_symbolic_tensor''' ): return tf.is_symbolic_tensor(SCREAMING_SNAKE_CASE__ ) return type(SCREAMING_SNAKE_CASE__ ) == tf.Tensor def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): return False if not is_tf_available() else _is_tf_symbolic_tensor(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): import jax.numpy as jnp # noqa: F811 return isinstance(SCREAMING_SNAKE_CASE__ , jnp.ndarray ) def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): return False if not is_flax_available() else _is_jax(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): if isinstance(SCREAMING_SNAKE_CASE__ , (dict, UserDict) ): return {k: to_py_obj(SCREAMING_SNAKE_CASE__ ) for k, v in obj.items()} elif isinstance(SCREAMING_SNAKE_CASE__ , (list, tuple) ): return [to_py_obj(SCREAMING_SNAKE_CASE__ ) for o in obj] elif is_tf_tensor(SCREAMING_SNAKE_CASE__ ): return obj.numpy().tolist() elif is_torch_tensor(SCREAMING_SNAKE_CASE__ ): return obj.detach().cpu().tolist() elif is_jax_tensor(SCREAMING_SNAKE_CASE__ ): return np.asarray(SCREAMING_SNAKE_CASE__ ).tolist() elif isinstance(SCREAMING_SNAKE_CASE__ , (np.ndarray, np.number) ): # tolist also works on 0d np arrays return obj.tolist() else: return obj def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): if isinstance(SCREAMING_SNAKE_CASE__ , (dict, UserDict) ): return {k: to_numpy(SCREAMING_SNAKE_CASE__ ) for k, v in obj.items()} elif isinstance(SCREAMING_SNAKE_CASE__ , (list, tuple) ): return np.array(SCREAMING_SNAKE_CASE__ ) elif is_tf_tensor(SCREAMING_SNAKE_CASE__ ): return obj.numpy() elif is_torch_tensor(SCREAMING_SNAKE_CASE__ ): return obj.detach().cpu().numpy() elif is_jax_tensor(SCREAMING_SNAKE_CASE__ ): return np.asarray(SCREAMING_SNAKE_CASE__ ) else: return obj class UpperCAmelCase_ ( lowercase ): """simple docstring""" def UpperCAmelCase ( self ) -> List[str]: UpperCamelCase :Any = fields(self ) # Safety and consistency checks if not len(SCREAMING_SNAKE_CASE_ ): raise ValueError(F'''{self.__class__.__name__} has no fields.''' ) if not all(field.default is None for field in class_fields[1:] ): raise ValueError(F'''{self.__class__.__name__} should not have more than one required field.''' ) UpperCamelCase :Optional[int] = getattr(self , class_fields[0].name ) UpperCamelCase :str = all(getattr(self , field.name ) is None for field in class_fields[1:] ) if other_fields_are_none and not is_tensor(SCREAMING_SNAKE_CASE_ ): if isinstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ): UpperCamelCase :List[Any] = first_field.items() UpperCamelCase :Dict = True else: try: UpperCamelCase :Any = iter(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = True except TypeError: UpperCamelCase :List[Any] = False # if we provided an iterator as first field and the iterator is a (key, value) iterator # set the associated fields if first_field_iterator: for idx, element in enumerate(SCREAMING_SNAKE_CASE_ ): if ( not isinstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) or not len(SCREAMING_SNAKE_CASE_ ) == 2 or not isinstance(element[0] , SCREAMING_SNAKE_CASE_ ) ): if idx == 0: # If we do not have an iterator of key/values, set it as attribute UpperCamelCase :Union[str, Any] = first_field else: # If we have a mixed iterator, raise an error raise ValueError( F'''Cannot set key/value for {element}. It needs to be a tuple (key, value).''' ) break setattr(self , element[0] , element[1] ) if element[1] is not None: UpperCamelCase :str = element[1] elif first_field is not None: UpperCamelCase :Optional[Any] = first_field else: for field in class_fields: UpperCamelCase :List[Any] = getattr(self , field.name ) if v is not None: UpperCamelCase :List[str] = v def __delitem__( self , *SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> List[Any]: raise Exception(F'''You cannot use ``__delitem__`` on a {self.__class__.__name__} instance.''' ) def UpperCAmelCase ( self , *SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> Optional[Any]: raise Exception(F'''You cannot use ``setdefault`` on a {self.__class__.__name__} instance.''' ) def UpperCAmelCase ( self , *SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> Optional[int]: raise Exception(F'''You cannot use ``pop`` on a {self.__class__.__name__} instance.''' ) def UpperCAmelCase ( self , *SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> Tuple: raise Exception(F'''You cannot use ``update`` on a {self.__class__.__name__} instance.''' ) def __getitem__( self , SCREAMING_SNAKE_CASE_ ) -> Any: if isinstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ): UpperCamelCase :Union[str, Any] = dict(self.items() ) return inner_dict[k] else: return self.to_tuple()[k] def __setattr__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> str: if name in self.keys() and value is not None: # Don't call self.__setitem__ to avoid recursion errors super().__setitem__(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) super().__setattr__(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def __setitem__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Dict: # Will raise a KeyException if needed super().__setitem__(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Don't call self.__setattr__ to avoid recursion errors super().__setattr__(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple[Any]: return tuple(self[k] for k in self.keys() ) class UpperCAmelCase_ ( lowercase, lowercase ): """simple docstring""" @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ ) -> int: raise ValueError( F'''{value} is not a valid {cls.__name__}, please select one of {list(cls._valueamember_map_.keys() )}''' ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Tuple ='longest' UpperCamelCase_ : Any ='max_length' UpperCamelCase_ : Optional[int] ='do_not_pad' class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Any ='pt' UpperCamelCase_ : Optional[Any] ='tf' UpperCamelCase_ : str ='np' UpperCamelCase_ : List[str] ='jax' class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ ) -> Dict: UpperCamelCase :Union[str, Any] = context_managers UpperCamelCase :str = ExitStack() def __enter__( self ) -> List[str]: for context_manager in self.context_managers: self.stack.enter_context(SCREAMING_SNAKE_CASE_ ) def __exit__( self , *SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> Optional[int]: self.stack.__exit__(*SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Any = infer_framework(SCREAMING_SNAKE_CASE__ ) if framework == "tf": UpperCamelCase :Union[str, Any] = inspect.signature(model_class.call ) # TensorFlow models elif framework == "pt": UpperCamelCase :Any = inspect.signature(model_class.forward ) # PyTorch models else: UpperCamelCase :Optional[int] = inspect.signature(model_class.__call__ ) # Flax models for p in signature.parameters: if p == "return_loss" and signature.parameters[p].default is True: return True return False def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :List[Any] = model_class.__name__ UpperCamelCase :Optional[int] = infer_framework(SCREAMING_SNAKE_CASE__ ) if framework == "tf": UpperCamelCase :Any = inspect.signature(model_class.call ) # TensorFlow models elif framework == "pt": UpperCamelCase :Dict = inspect.signature(model_class.forward ) # PyTorch models else: UpperCamelCase :Tuple = inspect.signature(model_class.__call__ ) # Flax models if "QuestionAnswering" in model_name: return [p for p in signature.parameters if "label" in p or p in ("start_positions", "end_positions")] else: return [p for p in signature.parameters if "label" in p] def _A ( SCREAMING_SNAKE_CASE__ : MutableMapping , SCREAMING_SNAKE_CASE__ : str = "" , SCREAMING_SNAKE_CASE__ : str = "." ): def _flatten_dict(SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : str="" , SCREAMING_SNAKE_CASE__ : Optional[Any]="." ): for k, v in d.items(): UpperCamelCase :Optional[int] = str(SCREAMING_SNAKE_CASE__ ) + delimiter + str(SCREAMING_SNAKE_CASE__ ) if parent_key else k if v and isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): yield from flatten_dict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , delimiter=SCREAMING_SNAKE_CASE__ ).items() else: yield key, v return dict(_flatten_dict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) ) @contextmanager def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : bool = False ): if use_temp_dir: with tempfile.TemporaryDirectory() as tmp_dir: yield tmp_dir else: yield working_dir def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : Tuple=None ): if is_numpy_array(SCREAMING_SNAKE_CASE__ ): return np.transpose(SCREAMING_SNAKE_CASE__ , axes=SCREAMING_SNAKE_CASE__ ) elif is_torch_tensor(SCREAMING_SNAKE_CASE__ ): return array.T if axes is None else array.permute(*SCREAMING_SNAKE_CASE__ ) elif is_tf_tensor(SCREAMING_SNAKE_CASE__ ): import tensorflow as tf return tf.transpose(SCREAMING_SNAKE_CASE__ , perm=SCREAMING_SNAKE_CASE__ ) elif is_jax_tensor(SCREAMING_SNAKE_CASE__ ): return jnp.transpose(SCREAMING_SNAKE_CASE__ , axes=SCREAMING_SNAKE_CASE__ ) else: raise ValueError(F'''Type not supported for transpose: {type(SCREAMING_SNAKE_CASE__ )}.''' ) def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict ): if is_numpy_array(SCREAMING_SNAKE_CASE__ ): return np.reshape(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif is_torch_tensor(SCREAMING_SNAKE_CASE__ ): return array.reshape(*SCREAMING_SNAKE_CASE__ ) elif is_tf_tensor(SCREAMING_SNAKE_CASE__ ): import tensorflow as tf return tf.reshape(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif is_jax_tensor(SCREAMING_SNAKE_CASE__ ): return jnp.reshape(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) else: raise ValueError(F'''Type not supported for reshape: {type(SCREAMING_SNAKE_CASE__ )}.''' ) def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Any=None ): if is_numpy_array(SCREAMING_SNAKE_CASE__ ): return np.squeeze(SCREAMING_SNAKE_CASE__ , axis=SCREAMING_SNAKE_CASE__ ) elif is_torch_tensor(SCREAMING_SNAKE_CASE__ ): return array.squeeze() if axis is None else array.squeeze(dim=SCREAMING_SNAKE_CASE__ ) elif is_tf_tensor(SCREAMING_SNAKE_CASE__ ): import tensorflow as tf return tf.squeeze(SCREAMING_SNAKE_CASE__ , axis=SCREAMING_SNAKE_CASE__ ) elif is_jax_tensor(SCREAMING_SNAKE_CASE__ ): return jnp.squeeze(SCREAMING_SNAKE_CASE__ , axis=SCREAMING_SNAKE_CASE__ ) else: raise ValueError(F'''Type not supported for squeeze: {type(SCREAMING_SNAKE_CASE__ )}.''' ) def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] ): if is_numpy_array(SCREAMING_SNAKE_CASE__ ): return np.expand_dims(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif is_torch_tensor(SCREAMING_SNAKE_CASE__ ): return array.unsqueeze(dim=SCREAMING_SNAKE_CASE__ ) elif is_tf_tensor(SCREAMING_SNAKE_CASE__ ): import tensorflow as tf return tf.expand_dims(SCREAMING_SNAKE_CASE__ , axis=SCREAMING_SNAKE_CASE__ ) elif is_jax_tensor(SCREAMING_SNAKE_CASE__ ): return jnp.expand_dims(SCREAMING_SNAKE_CASE__ , axis=SCREAMING_SNAKE_CASE__ ) else: raise ValueError(F'''Type not supported for expand_dims: {type(SCREAMING_SNAKE_CASE__ )}.''' ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] ): if is_numpy_array(SCREAMING_SNAKE_CASE__ ): return np.size(SCREAMING_SNAKE_CASE__ ) elif is_torch_tensor(SCREAMING_SNAKE_CASE__ ): return array.numel() elif is_tf_tensor(SCREAMING_SNAKE_CASE__ ): import tensorflow as tf return tf.size(SCREAMING_SNAKE_CASE__ ) elif is_jax_tensor(SCREAMING_SNAKE_CASE__ ): return array.size else: raise ValueError(F'''Type not supported for expand_dims: {type(SCREAMING_SNAKE_CASE__ )}.''' ) def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : str ): for key, value in auto_map.items(): if isinstance(SCREAMING_SNAKE_CASE__ , (tuple, list) ): UpperCamelCase :Optional[Any] = [F'''{repo_id}--{v}''' if (v is not None and '''--''' not in v) else v for v in value] elif value is not None and "--" not in value: UpperCamelCase :List[str] = F'''{repo_id}--{value}''' return auto_map def _A ( SCREAMING_SNAKE_CASE__ : str ): for base_class in inspect.getmro(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = base_class.__module__ UpperCamelCase :str = base_class.__name__ if module.startswith('''tensorflow''' ) or module.startswith('''keras''' ) or name == "TFPreTrainedModel": return "tf" elif module.startswith('''torch''' ) or name == "PreTrainedModel": return "pt" elif module.startswith('''flax''' ) or module.startswith('''jax''' ) or name == "FlaxPreTrainedModel": return "flax" else: raise TypeError(F'''Could not infer framework from class {model_class}.''' )
259
import copy import os from typing import Union from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """microsoft/git-base""": """https://huggingface.co/microsoft/git-base/resolve/main/config.json""", } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Dict ='git_vision_model' def __init__( self , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=224 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_="quick_gelu" , SCREAMING_SNAKE_CASE_=1e-5 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.02 , **SCREAMING_SNAKE_CASE_ , ) -> Tuple: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :Dict = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :List[str] = num_channels UpperCamelCase :Optional[int] = patch_size UpperCamelCase :Optional[int] = image_size UpperCamelCase :List[Any] = initializer_range UpperCamelCase :Union[str, Any] = attention_dropout UpperCamelCase :Tuple = layer_norm_eps UpperCamelCase :Optional[Any] = hidden_act @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> "PretrainedConfig": cls._set_token_in_kwargs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Dict = cls.get_config_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) # get the vision config dict if we are loading from GITConfig if config_dict.get('''model_type''' ) == "git": UpperCamelCase :Tuple = config_dict['''vision_config'''] if "model_type" in config_dict and hasattr(cls , '''model_type''' ) and config_dict["model_type"] != cls.model_type: logger.warning( F'''You are using a model of type {config_dict["model_type"]} to instantiate a model of type ''' F'''{cls.model_type}. This is not supported for all configurations of models and can yield errors.''' ) return cls.from_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Optional[Any] ='git' def __init__( self , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=3_0522 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=6 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=1024 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-12 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_="absolute" , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=101 , SCREAMING_SNAKE_CASE_=102 , SCREAMING_SNAKE_CASE_=None , **SCREAMING_SNAKE_CASE_ , ) -> int: super().__init__(bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , pad_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) if vision_config is None: UpperCamelCase :Tuple = {} logger.info('''vision_config is None. initializing the GitVisionConfig with default values.''' ) UpperCamelCase :Union[str, Any] = GitVisionConfig(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = vocab_size UpperCamelCase :Optional[Any] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :List[Any] = num_attention_heads UpperCamelCase :Dict = hidden_act UpperCamelCase :List[str] = intermediate_size UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :Optional[int] = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = max_position_embeddings UpperCamelCase :Tuple = initializer_range UpperCamelCase :Any = layer_norm_eps UpperCamelCase :int = position_embedding_type UpperCamelCase :Dict = use_cache UpperCamelCase :Tuple = tie_word_embeddings UpperCamelCase :Union[str, Any] = num_image_with_embedding UpperCamelCase :Optional[int] = bos_token_id UpperCamelCase :List[Any] = eos_token_id def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Union[str, Any] = copy.deepcopy(self.__dict__ ) UpperCamelCase :Optional[int] = self.vision_config.to_dict() UpperCamelCase :int = self.__class__.model_type return output
259
1
from __future__ import annotations from typing import Generic, TypeVar __snake_case = TypeVar("""T""") class UpperCAmelCase_ ( Generic[T] ): """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ ) -> None: UpperCamelCase :Tuple = data UpperCamelCase :Optional[int] = self UpperCamelCase :Optional[Any] = 0 class UpperCAmelCase_ ( Generic[T] ): """simple docstring""" def __init__( self ) -> None: # map from node name to the node object UpperCamelCase :dict[T, DisjointSetTreeNode[T]] = {} def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> None: # create a new set with x as its member UpperCamelCase :Optional[Any] = DisjointSetTreeNode(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> DisjointSetTreeNode[T]: # find the set x belongs to (with path-compression) UpperCamelCase :Dict = self.map[data] if elem_ref != elem_ref.parent: UpperCamelCase :Tuple = self.find_set(elem_ref.parent.data ) return elem_ref.parent def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> None: # helper function for union operation if nodea.rank > nodea.rank: UpperCamelCase :Any = nodea else: UpperCamelCase :List[Any] = nodea if nodea.rank == nodea.rank: nodea.rank += 1 def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> None: # merge 2 disjoint sets self.link(self.find_set(SCREAMING_SNAKE_CASE_ ) , self.find_set(SCREAMING_SNAKE_CASE_ ) ) class UpperCAmelCase_ ( Generic[T] ): """simple docstring""" def __init__( self ) -> None: # connections: map from the node to the neighbouring nodes (with weights) UpperCamelCase :dict[T, dict[T, int]] = {} def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> None: # add a node ONLY if its not present in the graph if node not in self.connections: UpperCamelCase :List[Any] = {} def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> None: # add an edge with the given weight self.add_node(SCREAMING_SNAKE_CASE_ ) self.add_node(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = weight UpperCamelCase :Optional[int] = weight def UpperCAmelCase ( self ) -> GraphUndirectedWeighted[T]: UpperCamelCase :Union[str, Any] = [] UpperCamelCase :int = set() for start in self.connections: for end in self.connections[start]: if (start, end) not in seen: seen.add((end, start) ) edges.append((start, end, self.connections[start][end]) ) edges.sort(key=lambda SCREAMING_SNAKE_CASE_ : x[2] ) # creating the disjoint set UpperCamelCase :List[Any] = DisjointSetTree[T]() for node in self.connections: disjoint_set.make_set(SCREAMING_SNAKE_CASE_ ) # MST generation UpperCamelCase :str = 0 UpperCamelCase :Dict = 0 UpperCamelCase :Tuple = GraphUndirectedWeighted[T]() while num_edges < len(self.connections ) - 1: UpperCamelCase , UpperCamelCase , UpperCamelCase :str = edges[index] index += 1 UpperCamelCase :Dict = disjoint_set.find_set(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = disjoint_set.find_set(SCREAMING_SNAKE_CASE_ ) if parent_u != parent_v: num_edges += 1 graph.add_edge(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) disjoint_set.union(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) return graph
259
import time from contextlib import contextmanager from pathlib import Path import pytest import requests from huggingface_hub.hf_api import HfApi, HfFolder __snake_case = """__DUMMY_TRANSFORMERS_USER__""" __snake_case = """Dummy User""" __snake_case = """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""" __snake_case = """https://hub-ci.huggingface.co""" __snake_case = CI_HUB_ENDPOINT + """/datasets/{repo_id}/resolve/{revision}/{path}""" __snake_case = CI_HUB_ENDPOINT + """/{repo_id}/resolve/{revision}/{filename}""" __snake_case = Path("""~/.huggingface/hub_ci_token""").expanduser() @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): monkeypatch.setattr( '''huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any ): monkeypatch.setattr('''datasets.config.HF_ENDPOINT''' , SCREAMING_SNAKE_CASE__ ) monkeypatch.setattr('''datasets.config.HUB_DATASETS_URL''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): monkeypatch.setattr('''huggingface_hub.hf_api.HfFolder.path_token''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : List[Any] ): HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield HfFolder.delete_token() @pytest.fixture(scope='''session''' ) def _A ( ): return HfApi(endpoint=SCREAMING_SNAKE_CASE__ ) @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi ): UpperCamelCase :Tuple = HfFolder.get_token() HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield CI_HUB_USER_TOKEN if previous_token is not None: HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Dict ): def _cleanup_repo(SCREAMING_SNAKE_CASE__ : Tuple ): hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) return _cleanup_repo @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): @contextmanager def _temporary_repo(SCREAMING_SNAKE_CASE__ : Any ): try: yield repo_id finally: cleanup_repo(SCREAMING_SNAKE_CASE__ ) return _temporary_repo @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :Union[str, Any] = F'''repo_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :int = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data/text_data.txt''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Dict ): return hf_private_dataset_repo_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Optional[int] = F'''repo_zipped_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Any = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str] ): return hf_private_dataset_repo_zipped_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Dict = F'''repo_zipped_img_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Dict = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple ): return hf_private_dataset_repo_zipped_img_data_
259
1
from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """google/canine-s""": """https://huggingface.co/google/canine-s/resolve/main/config.json""", # See all CANINE models at https://huggingface.co/models?filter=canine } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[Any] ='canine' def __init__( self , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=1_6384 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-12 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_=0XE_0_0_0 , SCREAMING_SNAKE_CASE_=0XE_0_0_1 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=8 , SCREAMING_SNAKE_CASE_=1_6384 , SCREAMING_SNAKE_CASE_=128 , **SCREAMING_SNAKE_CASE_ , ) -> Optional[int]: super().__init__(pad_token_id=SCREAMING_SNAKE_CASE_ , bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = max_position_embeddings UpperCamelCase :Tuple = hidden_size UpperCamelCase :int = num_hidden_layers UpperCamelCase :Tuple = num_attention_heads UpperCamelCase :List[Any] = intermediate_size UpperCamelCase :Optional[Any] = hidden_act UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :List[str] = attention_probs_dropout_prob UpperCamelCase :Tuple = initializer_range UpperCamelCase :Tuple = type_vocab_size UpperCamelCase :Any = layer_norm_eps # Character config: UpperCamelCase :Optional[int] = downsampling_rate UpperCamelCase :int = upsampling_kernel_size UpperCamelCase :str = num_hash_functions UpperCamelCase :Union[str, Any] = num_hash_buckets UpperCamelCase :List[Any] = local_transformer_stride
259
from __future__ import annotations import unittest from transformers import RoFormerConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask from ...test_pipeline_mixin import PipelineTesterMixin if is_tf_available(): import tensorflow as tf from transformers import ( TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerModel, ) from transformers.models.roformer.modeling_tf_roformer import ( TFRoFormerSelfAttention, TFRoFormerSinusoidalPositionalEmbedding, ) class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=99 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=512 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=None , ) -> Dict: UpperCamelCase :Any = parent UpperCamelCase :Dict = 13 UpperCamelCase :List[Any] = 7 UpperCamelCase :List[Any] = True UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = True UpperCamelCase :List[str] = True UpperCamelCase :Dict = 99 UpperCamelCase :Any = 32 UpperCamelCase :Tuple = 2 UpperCamelCase :Union[str, Any] = 4 UpperCamelCase :List[str] = 37 UpperCamelCase :Dict = '''gelu''' UpperCamelCase :Dict = 0.1 UpperCamelCase :Tuple = 0.1 UpperCamelCase :Dict = 512 UpperCamelCase :str = 16 UpperCamelCase :Optional[Any] = 2 UpperCamelCase :Dict = 0.02 UpperCamelCase :Optional[int] = 3 UpperCamelCase :int = 4 UpperCamelCase :Dict = None def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size, self.seq_length] , self.vocab_size ) UpperCamelCase :Optional[int] = None if self.use_input_mask: UpperCamelCase :Dict = random_attention_mask([self.batch_size, self.seq_length] ) UpperCamelCase :Dict = None if self.use_token_type_ids: UpperCamelCase :List[Any] = ids_tensor([self.batch_size, self.seq_length] , self.type_vocab_size ) UpperCamelCase :Union[str, Any] = None UpperCamelCase :Optional[int] = None UpperCamelCase :Any = None if self.use_labels: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size] , self.type_sequence_label_size ) UpperCamelCase :Optional[Any] = ids_tensor([self.batch_size, self.seq_length] , self.num_labels ) UpperCamelCase :int = ids_tensor([self.batch_size] , self.num_choices ) UpperCamelCase :Union[str, Any] = RoFormerConfig( vocab_size=self.vocab_size , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , max_position_embeddings=self.max_position_embeddings , type_vocab_size=self.type_vocab_size , initializer_range=self.initializer_range , return_dict=SCREAMING_SNAKE_CASE_ , ) return config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[Any] = TFRoFormerModel(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = {'''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids} UpperCamelCase :int = [input_ids, input_mask] UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = True UpperCamelCase :Union[str, Any] = TFRoFormerForCausalLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Any = model(SCREAMING_SNAKE_CASE_ )['''logits'''] self.parent.assertListEqual( list(prediction_scores.numpy().shape ) , [self.batch_size, self.seq_length, self.vocab_size] ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :str = TFRoFormerForMaskedLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.vocab_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :List[Any] = self.num_labels UpperCamelCase :int = TFRoFormerForSequenceClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Optional[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = self.num_choices UpperCamelCase :Any = TFRoFormerForMultipleChoice(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :int = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :Any = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :List[Any] = { '''input_ids''': multiple_choice_inputs_ids, '''attention_mask''': multiple_choice_input_mask, '''token_type_ids''': multiple_choice_token_type_ids, } UpperCamelCase :Dict = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_choices) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :Union[str, Any] = self.num_labels UpperCamelCase :Dict = TFRoFormerForTokenClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Tuple = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :Union[str, Any] = TFRoFormerForQuestionAnswering(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.start_logits.shape , (self.batch_size, self.seq_length) ) self.parent.assertEqual(result.end_logits.shape , (self.batch_size, self.seq_length) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() ( ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ) :Union[str, Any] = config_and_inputs UpperCamelCase :Any = {'''input_ids''': input_ids, '''token_type_ids''': token_type_ids, '''attention_mask''': input_mask} return config, inputs_dict @require_tf class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : str =( ( TFRoFormerModel, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerForMultipleChoice, ) if is_tf_available() else () ) UpperCamelCase_ : Tuple =( { 'feature-extraction': TFRoFormerModel, 'fill-mask': TFRoFormerForMaskedLM, 'question-answering': TFRoFormerForQuestionAnswering, 'text-classification': TFRoFormerForSequenceClassification, 'text-generation': TFRoFormerForCausalLM, 'token-classification': TFRoFormerForTokenClassification, 'zero-shot': TFRoFormerForSequenceClassification, } if is_tf_available() else {} ) UpperCamelCase_ : Tuple =False UpperCamelCase_ : Optional[Any] =False def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: if pipeline_test_casse_name == "TextGenerationPipelineTests": return True return False def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = TFRoFormerModelTester(self ) UpperCamelCase :Optional[int] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> List[str]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_masked_lm(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_lm_head(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_multiple_choice(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_question_answering(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_sequence_classification(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_token_classification(*SCREAMING_SNAKE_CASE_ ) @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = TFRoFormerModel.from_pretrained('''junnyu/roformer_chinese_base''' ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Tuple = TFRoFormerForMaskedLM.from_pretrained('''junnyu/roformer_chinese_base''' ) UpperCamelCase :Union[str, Any] = tf.constant([[0, 1, 2, 3, 4, 5]] ) UpperCamelCase :str = model(SCREAMING_SNAKE_CASE_ )[0] # TODO Replace vocab size UpperCamelCase :Tuple = 5_0000 UpperCamelCase :Optional[Any] = [1, 6, vocab_size] self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) print(output[:, :3, :3] ) # TODO Replace values below with what was printed above. UpperCamelCase :int = tf.constant( [ [ [-0.1205_3341, -1.026_4901, 0.2922_1946], [-1.513_3783, 0.19_7433, 0.1519_0607], [-5.013_5403, -3.90_0256, -0.8403_8764], ] ] ) tf.debugging.assert_near(output[:, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Optional[int] =1E-4 def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = tf.constant([[4, 10]] ) UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=6 , embedding_dim=6 ) UpperCamelCase :str = emba(input_ids.shape ) UpperCamelCase :List[str] = tf.constant( [[0.0000, 0.0000, 0.0000, 1.0000, 1.0000, 1.0000], [0.8415, 0.0464, 0.0022, 0.5403, 0.9989, 1.0000]] ) tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Dict = tf.constant( [ [0.0000, 0.0000, 0.0000, 0.0000, 0.0000], [0.8415, 0.8219, 0.8020, 0.7819, 0.7617], [0.9093, 0.9364, 0.9581, 0.9749, 0.9870], ] ) UpperCamelCase :Dict = TFRoFormerSinusoidalPositionalEmbedding(num_positions=512 , embedding_dim=512 ) emba([2, 16, 512] ) UpperCamelCase :Any = emba.weight[:3, :5] tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[Any] =1E-4 def UpperCAmelCase ( self ) -> List[str]: # 2,12,16,64 UpperCamelCase :List[Any] = tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = -tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=32 , embedding_dim=64 ) UpperCamelCase :int = embed_positions([2, 16, 768] )[None, None, :, :] UpperCamelCase , UpperCamelCase :List[str] = TFRoFormerSelfAttention.apply_rotary_position_embeddings( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = tf.constant( [ [0.0000, 0.0100, 0.0200, 0.0300, 0.0400, 0.0500, 0.0600, 0.0700], [-0.2012, 0.8897, 0.0263, 0.9401, 0.2074, 0.9463, 0.3481, 0.9343], [-1.7057, 0.6271, -1.2145, 1.3897, -0.6303, 1.7647, -0.1173, 1.8985], [-2.1731, -1.6397, -2.7358, 0.2854, -2.1840, 1.7183, -1.3018, 2.4871], [0.2717, -3.6173, -2.9206, -2.1988, -3.6638, 0.3858, -2.9155, 2.2980], [3.9859, -2.1580, -0.7984, -4.4904, -4.1181, -2.0252, -4.4782, 1.1253], ] ) UpperCamelCase :Optional[int] = tf.constant( [ [0.0000, -0.0100, -0.0200, -0.0300, -0.0400, -0.0500, -0.0600, -0.0700], [0.2012, -0.8897, -0.0263, -0.9401, -0.2074, -0.9463, -0.3481, -0.9343], [1.7057, -0.6271, 1.2145, -1.3897, 0.6303, -1.7647, 0.1173, -1.8985], [2.1731, 1.6397, 2.7358, -0.2854, 2.1840, -1.7183, 1.3018, -2.4871], [-0.2717, 3.6173, 2.9206, 2.1988, 3.6638, -0.3858, 2.9155, -2.2980], [-3.9859, 2.1580, 0.7984, 4.4904, 4.1181, 2.0252, 4.4782, -1.1253], ] ) tf.debugging.assert_near(query_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) tf.debugging.assert_near(key_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance )
259
1
from . import ( albert, align, altclip, audio_spectrogram_transformer, auto, autoformer, bark, bart, barthez, bartpho, beit, bert, bert_generation, bert_japanese, bertweet, big_bird, bigbird_pegasus, biogpt, bit, blenderbot, blenderbot_small, blip, blip_a, bloom, bridgetower, byta, camembert, canine, chinese_clip, clap, clip, clipseg, codegen, conditional_detr, convbert, convnext, convnextva, cpm, cpmant, ctrl, cvt, dataavec, deberta, deberta_va, decision_transformer, deformable_detr, deit, deprecated, deta, detr, dialogpt, dinat, distilbert, dit, donut, dpr, dpt, efficientformer, efficientnet, electra, encodec, encoder_decoder, ernie, ernie_m, esm, falcon, flaubert, flava, fnet, focalnet, fsmt, funnel, git, glpn, gpta, gpt_bigcode, gpt_neo, gpt_neox, gpt_neox_japanese, gpt_swa, gptj, gptsan_japanese, graphormer, groupvit, herbert, hubert, ibert, imagegpt, informer, instructblip, jukebox, layoutlm, layoutlmva, layoutlmva, layoutxlm, led, levit, lilt, llama, longformer, longta, luke, lxmert, mam_aaa, marian, markuplm, maskaformer, maskformer, mbart, mbartaa, mega, megatron_bert, megatron_gpta, mgp_str, mluke, mobilebert, mobilenet_va, mobilenet_va, mobilevit, mobilevitva, mpnet, mra, mta, musicgen, mvp, nat, nezha, nllb, nllb_moe, nystromformer, oneformer, open_llama, openai, opt, owlvit, pegasus, pegasus_x, perceiver, phobert, pixastruct, plbart, poolformer, prophetnet, qdqbert, rag, realm, reformer, regnet, rembert, resnet, roberta, roberta_prelayernorm, roc_bert, roformer, rwkv, sam, segformer, sew, sew_d, speech_encoder_decoder, speech_to_text, speech_to_text_a, speechta, splinter, squeezebert, swiftformer, swin, swinasr, swinva, switch_transformers, ta, table_transformer, tapas, time_series_transformer, timesformer, timm_backbone, transfo_xl, trocr, tvlt, umta, unispeech, unispeech_sat, upernet, videomae, vilt, vision_encoder_decoder, vision_text_dual_encoder, visual_bert, vit, vit_hybrid, vit_mae, vit_msn, vivit, wavaveca, wavaveca_conformer, wavaveca_phoneme, wavaveca_with_lm, wavlm, whisper, x_clip, xglm, xlm, xlm_prophetnet, xlm_roberta, xlm_roberta_xl, xlnet, xmod, yolos, yoso, )
259
import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, _config_zero_init, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from torch import nn from transformers import MODEL_MAPPING, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTModel from transformers.models.dpt.modeling_dpt import DPT_PRETRAINED_MODEL_ARCHIVE_LIST if is_vision_available(): from PIL import Image from transformers import DPTImageProcessor class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=[0, 1, 2, 3] , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=[1, 384, 24, 24] , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=None , ) -> int: UpperCamelCase :List[Any] = parent UpperCamelCase :List[str] = batch_size UpperCamelCase :Optional[Any] = image_size UpperCamelCase :Optional[Any] = patch_size UpperCamelCase :Optional[Any] = num_channels UpperCamelCase :Union[str, Any] = is_training UpperCamelCase :Dict = use_labels UpperCamelCase :List[Any] = hidden_size UpperCamelCase :Optional[int] = num_hidden_layers UpperCamelCase :Any = backbone_out_indices UpperCamelCase :int = num_attention_heads UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :Optional[int] = hidden_dropout_prob UpperCamelCase :int = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = initializer_range UpperCamelCase :List[Any] = num_labels UpperCamelCase :Any = backbone_featmap_shape UpperCamelCase :Optional[int] = scope UpperCamelCase :Optional[int] = is_hybrid # sequence length of DPT = num_patches + 1 (we add 1 for the [CLS] token) UpperCamelCase :Tuple = (image_size // patch_size) ** 2 UpperCamelCase :int = num_patches + 1 def UpperCAmelCase ( self ) -> str: UpperCamelCase :Tuple = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size] ) UpperCamelCase :int = None if self.use_labels: UpperCamelCase :str = ids_tensor([self.batch_size, self.image_size, self.image_size] , self.num_labels ) UpperCamelCase :Any = self.get_config() return config, pixel_values, labels def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Tuple = { '''global_padding''': '''same''', '''layer_type''': '''bottleneck''', '''depths''': [3, 4, 9], '''out_features''': ['''stage1''', '''stage2''', '''stage3'''], '''embedding_dynamic_padding''': True, '''hidden_sizes''': [96, 192, 384, 768], '''num_groups''': 2, } return DPTConfig( image_size=self.image_size , patch_size=self.patch_size , num_channels=self.num_channels , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , backbone_out_indices=self.backbone_out_indices , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , is_decoder=SCREAMING_SNAKE_CASE_ , initializer_range=self.initializer_range , is_hybrid=self.is_hybrid , backbone_config=SCREAMING_SNAKE_CASE_ , backbone_featmap_shape=self.backbone_featmap_shape , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[int] = DPTModel(config=SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Optional[int] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :Tuple = self.num_labels UpperCamelCase :Any = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Union[str, Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.predicted_depth.shape , (self.batch_size, self.image_size, self.image_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :int = self.num_labels UpperCamelCase :str = DPTForSemanticSegmentation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :List[str] = model(SCREAMING_SNAKE_CASE_ , labels=SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual( result.logits.shape , (self.batch_size, self.num_labels, self.image_size, self.image_size) ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[Any] = self.prepare_config_and_inputs() UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = config_and_inputs UpperCamelCase :List[Any] = {'''pixel_values''': pixel_values} return config, inputs_dict @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Tuple =(DPTModel, DPTForDepthEstimation, DPTForSemanticSegmentation) if is_torch_available() else () UpperCamelCase_ : Optional[Any] =( { 'depth-estimation': DPTForDepthEstimation, 'feature-extraction': DPTModel, 'image-segmentation': DPTForSemanticSegmentation, } if is_torch_available() else {} ) UpperCamelCase_ : List[Any] =False UpperCamelCase_ : Optional[int] =False UpperCamelCase_ : Union[str, Any] =False def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = DPTModelTester(self ) UpperCamelCase :List[Any] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() @unittest.skip(reason='''DPT does not use inputs_embeds''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(model.get_input_embeddings() , (nn.Module) ) UpperCamelCase :Optional[int] = model.get_output_embeddings() self.assertTrue(x is None or isinstance(SCREAMING_SNAKE_CASE_ , nn.Linear ) ) def UpperCAmelCase ( self ) -> int: UpperCamelCase , UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Optional[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Any = ['''pixel_values'''] self.assertListEqual(arg_names[:1] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Any = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_depth_estimation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_semantic_segmentation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :int = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ): continue UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.train() UpperCamelCase :Union[str, Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Optional[int]: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Union[str, Any] = False UpperCamelCase :Dict = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ) or not model_class.supports_gradient_checkpointing: continue UpperCamelCase :Tuple = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.gradient_checkpointing_enable() model.train() UpperCamelCase :List[Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = _config_zero_init(SCREAMING_SNAKE_CASE_ ) for model_class in self.all_model_classes: UpperCamelCase :Tuple = model_class(config=SCREAMING_SNAKE_CASE_ ) # Skip the check for the backbone UpperCamelCase :List[str] = [] for name, module in model.named_modules(): if module.__class__.__name__ == "DPTViTHybridEmbeddings": UpperCamelCase :Tuple = [F'''{name}.{key}''' for key in module.state_dict().keys()] break for name, param in model.named_parameters(): if param.requires_grad: if name in backbone_params: continue self.assertIn( ((param.data.mean() * 1e9).round() / 1e9).item() , [0.0, 1.0] , msg=F'''Parameter {name} of model {model_class} seems not properly initialized''' , ) @unittest.skip('''Will be fixed soon by reducing the size of the model used for common tests.''' ) def UpperCAmelCase ( self ) -> Tuple: pass @slow def UpperCAmelCase ( self ) -> Any: for model_name in DPT_PRETRAINED_MODEL_ARCHIVE_LIST[1:]: UpperCamelCase :int = DPTModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: # We do this test only for DPTForDepthEstimation since it is the only model that uses readout_type UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Optional[Any] = '''add''' with self.assertRaises(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :int = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) def _A ( ): UpperCamelCase :List[Any] = Image.open('''./tests/fixtures/tests_samples/COCO/000000039769.png''' ) return image @require_torch @require_vision @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> str: UpperCamelCase :Any = DPTImageProcessor.from_pretrained('''Intel/dpt-hybrid-midas''' ) UpperCamelCase :int = DPTForDepthEstimation.from_pretrained('''Intel/dpt-hybrid-midas''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = prepare_img() UpperCamelCase :Union[str, Any] = image_processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' ).to(SCREAMING_SNAKE_CASE_ ) # forward pass with torch.no_grad(): UpperCamelCase :Union[str, Any] = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = outputs.predicted_depth # verify the predicted depth UpperCamelCase :List[str] = torch.Size((1, 384, 384) ) self.assertEqual(predicted_depth.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor( [[[5.6437, 5.6146, 5.6511], [5.4371, 5.5649, 5.5958], [5.5215, 5.5184, 5.5293]]] ).to(SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(outputs.predicted_depth[:3, :3, :3] / 100 , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) )
259
1
import math from datetime import datetime, timedelta def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :int = year % 19 UpperCamelCase :Union[str, Any] = year % 4 UpperCamelCase :int = year % 7 UpperCamelCase :List[str] = math.floor(year / 100 ) UpperCamelCase :Optional[Any] = math.floor((13 + 8 * leap_day_inhibits) / 25 ) UpperCamelCase :List[str] = leap_day_inhibits / 4 UpperCamelCase :Any = ( 15 - lunar_orbit_correction + leap_day_inhibits - leap_day_reinstall_number ) % 30 UpperCamelCase :Dict = (4 + leap_day_inhibits - leap_day_reinstall_number) % 7 # days to be added to March 21 UpperCamelCase :List[Any] = (19 * metonic_cycle + secular_moon_shift) % 30 # PHM -> Paschal Full Moon UpperCamelCase :str = ( 2 * julian_leap_year + 4 * non_leap_year + 6 * days_to_add + century_starting_point ) % 7 if days_to_add == 29 and days_from_phm_to_sunday == 6: return datetime(SCREAMING_SNAKE_CASE__ , 4 , 19 ) elif days_to_add == 28 and days_from_phm_to_sunday == 6: return datetime(SCREAMING_SNAKE_CASE__ , 4 , 18 ) else: return datetime(SCREAMING_SNAKE_CASE__ , 3 , 22 ) + timedelta( days=int(days_to_add + days_from_phm_to_sunday ) ) if __name__ == "__main__": for year in (19_94, 20_00, 20_10, 20_21, 20_23): __snake_case = """will be""" if year > datetime.now().year else """was""" print(f'''Easter in {year} {tense} {gauss_easter(year)}''')
259
def _A ( ): for n in range(1 , 1000000 ): yield n * (n + 1) // 2 def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :Optional[int] = 1 UpperCamelCase :List[Any] = 2 while i * i <= n: UpperCamelCase :str = 0 while n % i == 0: n //= i multiplicity += 1 divisors_count *= multiplicity + 1 i += 1 if n > 1: divisors_count *= 2 return divisors_count def _A ( ): return next(i for i in triangle_number_generator() if count_divisors(SCREAMING_SNAKE_CASE__ ) > 500 ) if __name__ == "__main__": print(solution())
259
1
import random import unittest import torch from diffusers import IFInpaintingSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import ( TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_PARAMS, ) from ..test_pipelines_common import PipelineTesterMixin from . import IFPipelineTesterMixin @skip_mps class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : str =IFInpaintingSuperResolutionPipeline UpperCamelCase_ : Optional[Any] =TEXT_GUIDED_IMAGE_INPAINTING_PARAMS - {'width', 'height'} UpperCamelCase_ : Dict =TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS.union({'original_image'} ) UpperCamelCase_ : str =PipelineTesterMixin.required_optional_params - {'latents'} def UpperCAmelCase ( self ) -> str: return self._get_superresolution_dummy_components() def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=0 ) -> List[str]: if str(SCREAMING_SNAKE_CASE_ ).startswith('''mps''' ): UpperCamelCase :Optional[int] = torch.manual_seed(SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :str = torch.Generator(device=SCREAMING_SNAKE_CASE_ ).manual_seed(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = floats_tensor((1, 3, 16, 16) , rng=random.Random(SCREAMING_SNAKE_CASE_ ) ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = floats_tensor((1, 3, 32, 32) , rng=random.Random(SCREAMING_SNAKE_CASE_ ) ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = floats_tensor((1, 3, 32, 32) , rng=random.Random(SCREAMING_SNAKE_CASE_ ) ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = { '''prompt''': '''A painting of a squirrel eating a burger''', '''image''': image, '''original_image''': original_image, '''mask_image''': mask_image, '''generator''': generator, '''num_inference_steps''': 2, '''output_type''': '''numpy''', } return inputs @unittest.skipIf( torch_device != '''cuda''' or not is_xformers_available() , reason='''XFormers attention is only available with CUDA and `xformers` installed''' , ) def UpperCAmelCase ( self ) -> Union[str, Any]: self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3 ) def UpperCAmelCase ( self ) -> str: self._test_save_load_optional_components() @unittest.skipIf(torch_device != '''cuda''' , reason='''float16 requires CUDA''' ) def UpperCAmelCase ( self ) -> Union[str, Any]: # Due to non-determinism in save load of the hf-internal-testing/tiny-random-t5 text encoder super().test_save_load_floataa(expected_max_diff=1e-1 ) def UpperCAmelCase ( self ) -> int: self._test_attention_slicing_forward_pass(expected_max_diff=1e-2 ) def UpperCAmelCase ( self ) -> int: self._test_save_load_local() def UpperCAmelCase ( self ) -> Any: self._test_inference_batch_single_identical( expected_max_diff=1e-2 , )
259
def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] ): # Return True if there is node that has not iterated. UpperCamelCase :Tuple = [False] * len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = [] queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = True while queue: UpperCamelCase :Optional[Any] = queue.pop(0 ) for ind in range(len(graph[u] ) ): if visited[ind] is False and graph[u][ind] > 0: queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = True UpperCamelCase :Optional[int] = u return visited[t] def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : str ): # This array is filled by BFS and to store path UpperCamelCase :Optional[int] = [-1] * (len(SCREAMING_SNAKE_CASE__ )) UpperCamelCase :Optional[int] = 0 while bfs(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Dict = float('''Inf''' ) UpperCamelCase :str = sink while s != source: # Find the minimum value in select path UpperCamelCase :Optional[Any] = min(SCREAMING_SNAKE_CASE__ , graph[parent[s]][s] ) UpperCamelCase :Any = parent[s] max_flow += path_flow UpperCamelCase :Tuple = sink while v != source: UpperCamelCase :List[str] = parent[v] graph[u][v] -= path_flow graph[v][u] += path_flow UpperCamelCase :Any = parent[v] return max_flow __snake_case = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] __snake_case , __snake_case = 0, 5 print(ford_fulkerson(graph, source, sink))
259
1
from typing import Dict, Optional import numpy as np import datasets __snake_case = """ IoU is the area of overlap between the predicted segmentation and the ground truth divided by the area of union between the predicted segmentation and the ground truth. For binary (two classes) or multi-class segmentation, the mean IoU of the image is calculated by taking the IoU of each class and averaging them. """ __snake_case = """ Args: predictions (`List[ndarray]`): List of predicted segmentation maps, each of shape (height, width). Each segmentation map can be of a different size. references (`List[ndarray]`): List of ground truth segmentation maps, each of shape (height, width). Each segmentation map can be of a different size. num_labels (`int`): Number of classes (categories). ignore_index (`int`): Index that will be ignored during evaluation. nan_to_num (`int`, *optional*): If specified, NaN values will be replaced by the number defined by the user. label_map (`dict`, *optional*): If specified, dictionary mapping old label indices to new label indices. reduce_labels (`bool`, *optional*, defaults to `False`): Whether or not to reduce all label values of segmentation maps by 1. Usually used for datasets where 0 is used for background, and background itself is not included in all classes of a dataset (e.g. ADE20k). The background label will be replaced by 255. Returns: `Dict[str, float | ndarray]` comprising various elements: - *mean_iou* (`float`): Mean Intersection-over-Union (IoU averaged over all categories). - *mean_accuracy* (`float`): Mean accuracy (averaged over all categories). - *overall_accuracy* (`float`): Overall accuracy on all images. - *per_category_accuracy* (`ndarray` of shape `(num_labels,)`): Per category accuracy. - *per_category_iou* (`ndarray` of shape `(num_labels,)`): Per category IoU. Examples: >>> import numpy as np >>> mean_iou = datasets.load_metric(\"mean_iou\") >>> # suppose one has 3 different segmentation maps predicted >>> predicted_1 = np.array([[1, 2], [3, 4], [5, 255]]) >>> actual_1 = np.array([[0, 3], [5, 4], [6, 255]]) >>> predicted_2 = np.array([[2, 7], [9, 2], [3, 6]]) >>> actual_2 = np.array([[1, 7], [9, 2], [3, 6]]) >>> predicted_3 = np.array([[2, 2, 3], [8, 2, 4], [3, 255, 2]]) >>> actual_3 = np.array([[1, 2, 2], [8, 2, 1], [3, 255, 1]]) >>> predicted = [predicted_1, predicted_2, predicted_3] >>> ground_truth = [actual_1, actual_2, actual_3] >>> results = mean_iou.compute(predictions=predicted, references=ground_truth, num_labels=10, ignore_index=255, reduce_labels=False) >>> print(results) # doctest: +NORMALIZE_WHITESPACE {'mean_iou': 0.47750000000000004, 'mean_accuracy': 0.5916666666666666, 'overall_accuracy': 0.5263157894736842, 'per_category_iou': array([0. , 0. , 0.375, 0.4 , 0.5 , 0. , 0.5 , 1. , 1. , 1. ]), 'per_category_accuracy': array([0. , 0. , 0.75 , 0.66666667, 1. , 0. , 0.5 , 1. , 1. , 1. ])} """ __snake_case = """\ @software{MMSegmentation_Contributors_OpenMMLab_Semantic_Segmentation_2020, author = {{MMSegmentation Contributors}}, license = {Apache-2.0}, month = {7}, title = {{OpenMMLab Semantic Segmentation Toolbox and Benchmark}}, url = {https://github.com/open-mmlab/mmsegmentation}, year = {2020} }""" def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : bool , SCREAMING_SNAKE_CASE__ : Optional[Dict[int, int]] = None , SCREAMING_SNAKE_CASE__ : bool = False , ): if label_map is not None: for old_id, new_id in label_map.items(): UpperCamelCase :Dict = new_id # turn into Numpy arrays UpperCamelCase :Dict = np.array(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = np.array(SCREAMING_SNAKE_CASE__ ) if reduce_labels: UpperCamelCase :str = 255 UpperCamelCase :Any = label - 1 UpperCamelCase :Optional[int] = 255 UpperCamelCase :List[str] = label != ignore_index UpperCamelCase :List[str] = np.not_equal(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = pred_label[mask] UpperCamelCase :Dict = np.array(SCREAMING_SNAKE_CASE__ )[mask] UpperCamelCase :Any = pred_label[pred_label == label] UpperCamelCase :Optional[Any] = np.histogram(SCREAMING_SNAKE_CASE__ , bins=SCREAMING_SNAKE_CASE__ , range=(0, num_labels - 1) )[0] UpperCamelCase :Optional[int] = np.histogram(SCREAMING_SNAKE_CASE__ , bins=SCREAMING_SNAKE_CASE__ , range=(0, num_labels - 1) )[0] UpperCamelCase :Union[str, Any] = np.histogram(SCREAMING_SNAKE_CASE__ , bins=SCREAMING_SNAKE_CASE__ , range=(0, num_labels - 1) )[0] UpperCamelCase :Optional[int] = area_pred_label + area_label - area_intersect return area_intersect, area_union, area_pred_label, area_label def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : bool , SCREAMING_SNAKE_CASE__ : Optional[Dict[int, int]] = None , SCREAMING_SNAKE_CASE__ : bool = False , ): UpperCamelCase :Union[str, Any] = np.zeros((num_labels,) , dtype=np.floataa ) UpperCamelCase :List[Any] = np.zeros((num_labels,) , dtype=np.floataa ) UpperCamelCase :List[Any] = np.zeros((num_labels,) , dtype=np.floataa ) UpperCamelCase :int = np.zeros((num_labels,) , dtype=np.floataa ) for result, gt_seg_map in zip(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Any = intersect_and_union( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) total_area_intersect += area_intersect total_area_union += area_union total_area_pred_label += area_pred_label total_area_label += area_label return total_area_intersect, total_area_union, total_area_pred_label, total_area_label def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : bool , SCREAMING_SNAKE_CASE__ : Optional[int] = None , SCREAMING_SNAKE_CASE__ : Optional[Dict[int, int]] = None , SCREAMING_SNAKE_CASE__ : bool = False , ): UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Dict = total_intersect_and_union( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # compute metrics UpperCamelCase :str = {} UpperCamelCase :List[str] = total_area_intersect.sum() / total_area_label.sum() UpperCamelCase :List[Any] = total_area_intersect / total_area_union UpperCamelCase :Optional[Any] = total_area_intersect / total_area_label UpperCamelCase :Optional[int] = np.nanmean(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = np.nanmean(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = all_acc UpperCamelCase :List[Any] = iou UpperCamelCase :Optional[Any] = acc if nan_to_num is not None: UpperCamelCase :Optional[Any] = {metric: np.nan_to_num(SCREAMING_SNAKE_CASE__ , nan=SCREAMING_SNAKE_CASE__ ) for metric, metric_value in metrics.items()} return metrics @datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION ) class UpperCAmelCase_ ( datasets.Metric ): """simple docstring""" def UpperCAmelCase ( self ) -> Optional[int]: return datasets.MetricInfo( description=_DESCRIPTION , citation=_CITATION , inputs_description=_KWARGS_DESCRIPTION , features=datasets.Features( # 1st Seq - height dim, 2nd - width dim { '''predictions''': datasets.Sequence(datasets.Sequence(datasets.Value('''uint16''' ) ) ), '''references''': datasets.Sequence(datasets.Sequence(datasets.Value('''uint16''' ) ) ), } ) , reference_urls=[ '''https://github.com/open-mmlab/mmsegmentation/blob/71c201b1813267d78764f306a297ca717827c4bf/mmseg/core/evaluation/metrics.py''' ] , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = False , ) -> Any: UpperCamelCase :str = mean_iou( results=SCREAMING_SNAKE_CASE_ , gt_seg_maps=SCREAMING_SNAKE_CASE_ , num_labels=SCREAMING_SNAKE_CASE_ , ignore_index=SCREAMING_SNAKE_CASE_ , nan_to_num=SCREAMING_SNAKE_CASE_ , label_map=SCREAMING_SNAKE_CASE_ , reduce_labels=SCREAMING_SNAKE_CASE_ , ) return iou_result
259
from __future__ import annotations from typing import Any def _A ( SCREAMING_SNAKE_CASE__ : list[Any] ): create_state_space_tree(SCREAMING_SNAKE_CASE__ , [] , 0 ) def _A ( SCREAMING_SNAKE_CASE__ : list[Any] , SCREAMING_SNAKE_CASE__ : list[Any] , SCREAMING_SNAKE_CASE__ : int ): if index == len(SCREAMING_SNAKE_CASE__ ): print(SCREAMING_SNAKE_CASE__ ) return create_state_space_tree(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , index + 1 ) current_subsequence.append(sequence[index] ) create_state_space_tree(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , index + 1 ) current_subsequence.pop() if __name__ == "__main__": __snake_case = [3, 1, 2, 4] generate_all_subsequences(seq) seq.clear() seq.extend(["""A""", """B""", """C"""]) generate_all_subsequences(seq)
259
1
from typing import Any, Callable, Dict, List, Optional, Union import torch from transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer from diffusers import ( AutoencoderKL, DDIMScheduler, DiffusionPipeline, LMSDiscreteScheduler, PNDMScheduler, StableDiffusionPipeline, UNetaDConditionModel, ) from diffusers.pipelines.stable_diffusion import StableDiffusionPipelineOutput from diffusers.pipelines.stable_diffusion.safety_checker import StableDiffusionSafetyChecker __snake_case = """CompVis/stable-diffusion-v1-1""" __snake_case = """CompVis/stable-diffusion-v1-2""" __snake_case = """CompVis/stable-diffusion-v1-3""" __snake_case = """CompVis/stable-diffusion-v1-4""" class UpperCAmelCase_ ( lowercase ): """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = True , ) -> Optional[int]: super()._init_() UpperCamelCase :List[Any] = StableDiffusionPipeline.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = StableDiffusionPipeline.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = StableDiffusionPipeline.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = StableDiffusionPipeline( vae=SCREAMING_SNAKE_CASE_ , text_encoder=SCREAMING_SNAKE_CASE_ , tokenizer=SCREAMING_SNAKE_CASE_ , unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ , safety_checker=SCREAMING_SNAKE_CASE_ , feature_extractor=SCREAMING_SNAKE_CASE_ , requires_safety_checker=SCREAMING_SNAKE_CASE_ , ) self.register_modules(pipelinea=self.pipea , pipelinea=self.pipea , pipelinea=self.pipea , pipelinea=self.pipea ) @property def UpperCAmelCase ( self ) -> Dict[str, Any]: return {k: getattr(self , SCREAMING_SNAKE_CASE_ ) for k in self.config.keys() if not k.startswith('''_''' )} def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ = "auto" ) -> Dict: if slice_size == "auto": # half the attention head size is usually a good trade-off between # speed and memory UpperCamelCase :List[str] = self.unet.config.attention_head_dim // 2 self.unet.set_attention_slice(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: self.enable_attention_slicing(SCREAMING_SNAKE_CASE_ ) @torch.no_grad() def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 50 , SCREAMING_SNAKE_CASE_ = 7.5 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , SCREAMING_SNAKE_CASE_ = 0.0 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = "pil" , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , **SCREAMING_SNAKE_CASE_ , ) -> Optional[int]: return self.pipea( prompt=SCREAMING_SNAKE_CASE_ , height=SCREAMING_SNAKE_CASE_ , width=SCREAMING_SNAKE_CASE_ , num_inference_steps=SCREAMING_SNAKE_CASE_ , guidance_scale=SCREAMING_SNAKE_CASE_ , negative_prompt=SCREAMING_SNAKE_CASE_ , num_images_per_prompt=SCREAMING_SNAKE_CASE_ , eta=SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , latents=SCREAMING_SNAKE_CASE_ , output_type=SCREAMING_SNAKE_CASE_ , return_dict=SCREAMING_SNAKE_CASE_ , callback=SCREAMING_SNAKE_CASE_ , callback_steps=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) @torch.no_grad() def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 50 , SCREAMING_SNAKE_CASE_ = 7.5 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , SCREAMING_SNAKE_CASE_ = 0.0 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = "pil" , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , **SCREAMING_SNAKE_CASE_ , ) -> List[str]: return self.pipea( prompt=SCREAMING_SNAKE_CASE_ , height=SCREAMING_SNAKE_CASE_ , width=SCREAMING_SNAKE_CASE_ , num_inference_steps=SCREAMING_SNAKE_CASE_ , guidance_scale=SCREAMING_SNAKE_CASE_ , negative_prompt=SCREAMING_SNAKE_CASE_ , num_images_per_prompt=SCREAMING_SNAKE_CASE_ , eta=SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , latents=SCREAMING_SNAKE_CASE_ , output_type=SCREAMING_SNAKE_CASE_ , return_dict=SCREAMING_SNAKE_CASE_ , callback=SCREAMING_SNAKE_CASE_ , callback_steps=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) @torch.no_grad() def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 50 , SCREAMING_SNAKE_CASE_ = 7.5 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , SCREAMING_SNAKE_CASE_ = 0.0 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = "pil" , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , **SCREAMING_SNAKE_CASE_ , ) -> Union[str, Any]: return self.pipea( prompt=SCREAMING_SNAKE_CASE_ , height=SCREAMING_SNAKE_CASE_ , width=SCREAMING_SNAKE_CASE_ , num_inference_steps=SCREAMING_SNAKE_CASE_ , guidance_scale=SCREAMING_SNAKE_CASE_ , negative_prompt=SCREAMING_SNAKE_CASE_ , num_images_per_prompt=SCREAMING_SNAKE_CASE_ , eta=SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , latents=SCREAMING_SNAKE_CASE_ , output_type=SCREAMING_SNAKE_CASE_ , return_dict=SCREAMING_SNAKE_CASE_ , callback=SCREAMING_SNAKE_CASE_ , callback_steps=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) @torch.no_grad() def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 50 , SCREAMING_SNAKE_CASE_ = 7.5 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , SCREAMING_SNAKE_CASE_ = 0.0 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = "pil" , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , **SCREAMING_SNAKE_CASE_ , ) -> int: return self.pipea( prompt=SCREAMING_SNAKE_CASE_ , height=SCREAMING_SNAKE_CASE_ , width=SCREAMING_SNAKE_CASE_ , num_inference_steps=SCREAMING_SNAKE_CASE_ , guidance_scale=SCREAMING_SNAKE_CASE_ , negative_prompt=SCREAMING_SNAKE_CASE_ , num_images_per_prompt=SCREAMING_SNAKE_CASE_ , eta=SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , latents=SCREAMING_SNAKE_CASE_ , output_type=SCREAMING_SNAKE_CASE_ , return_dict=SCREAMING_SNAKE_CASE_ , callback=SCREAMING_SNAKE_CASE_ , callback_steps=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) @torch.no_grad() def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 512 , SCREAMING_SNAKE_CASE_ = 50 , SCREAMING_SNAKE_CASE_ = 7.5 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , SCREAMING_SNAKE_CASE_ = 0.0 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = "pil" , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = 1 , **SCREAMING_SNAKE_CASE_ , ) -> Any: UpperCamelCase :str = '''cuda''' if torch.cuda.is_available() else '''cpu''' self.to(SCREAMING_SNAKE_CASE_ ) # Checks if the height and width are divisible by 8 or not if height % 8 != 0 or width % 8 != 0: raise ValueError(F'''`height` and `width` must be divisible by 8 but are {height} and {width}.''' ) # Get first result from Stable Diffusion Checkpoint v1.1 UpperCamelCase :str = self.textaimg_sda_a( prompt=SCREAMING_SNAKE_CASE_ , height=SCREAMING_SNAKE_CASE_ , width=SCREAMING_SNAKE_CASE_ , num_inference_steps=SCREAMING_SNAKE_CASE_ , guidance_scale=SCREAMING_SNAKE_CASE_ , negative_prompt=SCREAMING_SNAKE_CASE_ , num_images_per_prompt=SCREAMING_SNAKE_CASE_ , eta=SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , latents=SCREAMING_SNAKE_CASE_ , output_type=SCREAMING_SNAKE_CASE_ , return_dict=SCREAMING_SNAKE_CASE_ , callback=SCREAMING_SNAKE_CASE_ , callback_steps=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) # Get first result from Stable Diffusion Checkpoint v1.2 UpperCamelCase :int = self.textaimg_sda_a( prompt=SCREAMING_SNAKE_CASE_ , height=SCREAMING_SNAKE_CASE_ , width=SCREAMING_SNAKE_CASE_ , num_inference_steps=SCREAMING_SNAKE_CASE_ , guidance_scale=SCREAMING_SNAKE_CASE_ , negative_prompt=SCREAMING_SNAKE_CASE_ , num_images_per_prompt=SCREAMING_SNAKE_CASE_ , eta=SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , latents=SCREAMING_SNAKE_CASE_ , output_type=SCREAMING_SNAKE_CASE_ , return_dict=SCREAMING_SNAKE_CASE_ , callback=SCREAMING_SNAKE_CASE_ , callback_steps=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) # Get first result from Stable Diffusion Checkpoint v1.3 UpperCamelCase :Tuple = self.textaimg_sda_a( prompt=SCREAMING_SNAKE_CASE_ , height=SCREAMING_SNAKE_CASE_ , width=SCREAMING_SNAKE_CASE_ , num_inference_steps=SCREAMING_SNAKE_CASE_ , guidance_scale=SCREAMING_SNAKE_CASE_ , negative_prompt=SCREAMING_SNAKE_CASE_ , num_images_per_prompt=SCREAMING_SNAKE_CASE_ , eta=SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , latents=SCREAMING_SNAKE_CASE_ , output_type=SCREAMING_SNAKE_CASE_ , return_dict=SCREAMING_SNAKE_CASE_ , callback=SCREAMING_SNAKE_CASE_ , callback_steps=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) # Get first result from Stable Diffusion Checkpoint v1.4 UpperCamelCase :Union[str, Any] = self.textaimg_sda_a( prompt=SCREAMING_SNAKE_CASE_ , height=SCREAMING_SNAKE_CASE_ , width=SCREAMING_SNAKE_CASE_ , num_inference_steps=SCREAMING_SNAKE_CASE_ , guidance_scale=SCREAMING_SNAKE_CASE_ , negative_prompt=SCREAMING_SNAKE_CASE_ , num_images_per_prompt=SCREAMING_SNAKE_CASE_ , eta=SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , latents=SCREAMING_SNAKE_CASE_ , output_type=SCREAMING_SNAKE_CASE_ , return_dict=SCREAMING_SNAKE_CASE_ , callback=SCREAMING_SNAKE_CASE_ , callback_steps=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) # Get all result images into a single list and pass it via StableDiffusionPipelineOutput for final result return StableDiffusionPipelineOutput([resa[0], resa[0], resa[0], resa[0]] )
259
from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, get_resize_output_image_size, normalize, rescale, resize, to_channel_dimension_format, ) from ...image_utils import ( IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, ChannelDimension, ImageInput, PILImageResampling, is_batched, to_numpy_array, valid_images, ) from ...utils import TensorType, logging __snake_case = logging.get_logger(__name__) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[Any] =['pixel_values'] def __init__( self , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = PILImageResampling.BICUBIC , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = 1 / 255 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> None: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = size if size is not None else {'''height''': 224, '''width''': 224} UpperCamelCase :Optional[Any] = get_size_dict(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = crop_size if crop_size is not None else {'''height''': 224, '''width''': 224} UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ , default_to_square=SCREAMING_SNAKE_CASE_ , param_name='''crop_size''' ) UpperCamelCase :Optional[int] = do_resize UpperCamelCase :int = do_rescale UpperCamelCase :Tuple = do_normalize UpperCamelCase :str = do_center_crop UpperCamelCase :int = crop_size UpperCamelCase :Tuple = size UpperCamelCase :List[str] = resample UpperCamelCase :Tuple = rescale_factor UpperCamelCase :Optional[Any] = image_mean if image_mean is not None else IMAGENET_DEFAULT_MEAN UpperCamelCase :Optional[int] = image_std if image_std is not None else IMAGENET_DEFAULT_STD def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = PILImageResampling.BILINEAR , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ ) if "shortest_edge" in size: UpperCamelCase :str = get_resize_output_image_size(SCREAMING_SNAKE_CASE_ , size=size['''shortest_edge'''] , default_to_square=SCREAMING_SNAKE_CASE_ ) # size = get_resize_output_image_size(image, size["shortest_edge"], size["longest_edge"]) elif "height" in size and "width" in size: UpperCamelCase :Optional[int] = (size['''height'''], size['''width''']) else: raise ValueError(F'''Size must contain \'height\' and \'width\' keys or \'shortest_edge\' key. Got {size.keys()}''' ) return resize(SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ , resample=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: UpperCamelCase :Union[str, Any] = get_size_dict(SCREAMING_SNAKE_CASE_ ) if "height" not in size or "width" not in size: raise ValueError(F'''The `size` parameter must contain the keys (height, width). Got {size.keys()}''' ) return center_crop(SCREAMING_SNAKE_CASE_ , size=(size['''height'''], size['''width''']) , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ ) -> np.ndarray: return rescale(SCREAMING_SNAKE_CASE_ , scale=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: return normalize(SCREAMING_SNAKE_CASE_ , mean=SCREAMING_SNAKE_CASE_ , std=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = ChannelDimension.FIRST , **SCREAMING_SNAKE_CASE_ , ) -> BatchFeature: UpperCamelCase :Union[str, Any] = do_resize if do_resize is not None else self.do_resize UpperCamelCase :Optional[int] = do_rescale if do_rescale is not None else self.do_rescale UpperCamelCase :Optional[Any] = do_normalize if do_normalize is not None else self.do_normalize UpperCamelCase :Union[str, Any] = do_center_crop if do_center_crop is not None else self.do_center_crop UpperCamelCase :Optional[int] = crop_size if crop_size is not None else self.crop_size UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ , param_name='''crop_size''' , default_to_square=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = resample if resample is not None else self.resample UpperCamelCase :List[str] = rescale_factor if rescale_factor is not None else self.rescale_factor UpperCamelCase :Optional[Any] = image_mean if image_mean is not None else self.image_mean UpperCamelCase :Dict = image_std if image_std is not None else self.image_std UpperCamelCase :Dict = size if size is not None else self.size UpperCamelCase :Optional[int] = get_size_dict(SCREAMING_SNAKE_CASE_ ) if not is_batched(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :str = [images] if not valid_images(SCREAMING_SNAKE_CASE_ ): raise ValueError( '''Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, ''' '''torch.Tensor, tf.Tensor or jax.ndarray.''' ) if do_resize and size is None: raise ValueError('''Size must be specified if do_resize is True.''' ) if do_center_crop and crop_size is None: raise ValueError('''Crop size must be specified if do_center_crop is True.''' ) if do_rescale and rescale_factor is None: raise ValueError('''Rescale factor must be specified if do_rescale is True.''' ) # All transformations expect numpy arrays. UpperCamelCase :Tuple = [to_numpy_array(SCREAMING_SNAKE_CASE_ ) for image in images] if do_resize: UpperCamelCase :List[Any] = [self.resize(image=SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ , resample=SCREAMING_SNAKE_CASE_ ) for image in images] if do_center_crop: UpperCamelCase :Tuple = [self.center_crop(image=SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ ) for image in images] if do_rescale: UpperCamelCase :Union[str, Any] = [self.rescale(image=SCREAMING_SNAKE_CASE_ , scale=SCREAMING_SNAKE_CASE_ ) for image in images] if do_normalize: UpperCamelCase :Union[str, Any] = [self.normalize(image=SCREAMING_SNAKE_CASE_ , mean=SCREAMING_SNAKE_CASE_ , std=SCREAMING_SNAKE_CASE_ ) for image in images] UpperCamelCase :List[str] = [to_channel_dimension_format(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) for image in images] UpperCamelCase :int = {'''pixel_values''': images} return BatchFeature(data=SCREAMING_SNAKE_CASE_ , tensor_type=SCREAMING_SNAKE_CASE_ )
259
1
import argparse import json import os import pickle import shutil import numpy as np import torch from distiller import Distiller from lm_seqs_dataset import LmSeqsDataset from transformers import ( BertConfig, BertForMaskedLM, BertTokenizer, DistilBertConfig, DistilBertForMaskedLM, DistilBertTokenizer, GPTaConfig, GPTaLMHeadModel, GPTaTokenizer, RobertaConfig, RobertaForMaskedLM, RobertaTokenizer, ) from utils import git_log, init_gpu_params, logger, set_seed __snake_case = { """distilbert""": (DistilBertConfig, DistilBertForMaskedLM, DistilBertTokenizer), """roberta""": (RobertaConfig, RobertaForMaskedLM, RobertaTokenizer), """bert""": (BertConfig, BertForMaskedLM, BertTokenizer), """gpt2""": (GPTaConfig, GPTaLMHeadModel, GPTaTokenizer), } def _A ( SCREAMING_SNAKE_CASE__ : int ): assert (args.mlm and args.alpha_mlm > 0.0) or (not args.mlm and args.alpha_mlm == 0.0) assert (args.alpha_mlm > 0.0 and args.alpha_clm == 0.0) or (args.alpha_mlm == 0.0 and args.alpha_clm > 0.0) if args.mlm: assert os.path.isfile(args.token_counts ) assert (args.student_type in ["roberta", "distilbert"]) and (args.teacher_type in ["roberta", "bert"]) else: assert (args.student_type in ["gpt2"]) and (args.teacher_type in ["gpt2"]) assert args.teacher_type == args.student_type or ( args.student_type == "distilbert" and args.teacher_type == "bert" ) assert os.path.isfile(args.student_config ) if args.student_pretrained_weights is not None: assert os.path.isfile(args.student_pretrained_weights ) if args.freeze_token_type_embds: assert args.student_type in ["roberta"] assert args.alpha_ce >= 0.0 assert args.alpha_mlm >= 0.0 assert args.alpha_clm >= 0.0 assert args.alpha_mse >= 0.0 assert args.alpha_cos >= 0.0 assert args.alpha_ce + args.alpha_mlm + args.alpha_clm + args.alpha_mse + args.alpha_cos > 0.0 def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : Union[str, Any] ): if args.student_type == "roberta": UpperCamelCase :Optional[Any] = False elif args.student_type == "gpt2": UpperCamelCase :List[str] = False def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str] ): if args.student_type == "roberta": UpperCamelCase :List[str] = False def _A ( ): UpperCamelCase :List[str] = argparse.ArgumentParser(description='''Training''' ) parser.add_argument('''--force''' , action='''store_true''' , help='''Overwrite dump_path if it already exists.''' ) parser.add_argument( '''--dump_path''' , type=SCREAMING_SNAKE_CASE__ , required=SCREAMING_SNAKE_CASE__ , help='''The output directory (log, checkpoints, parameters, etc.)''' ) parser.add_argument( '''--data_file''' , type=SCREAMING_SNAKE_CASE__ , required=SCREAMING_SNAKE_CASE__ , help='''The binarized file (tokenized + tokens_to_ids) and grouped by sequence.''' , ) parser.add_argument( '''--student_type''' , type=SCREAMING_SNAKE_CASE__ , choices=['''distilbert''', '''roberta''', '''gpt2'''] , required=SCREAMING_SNAKE_CASE__ , help='''The student type (DistilBERT, RoBERTa).''' , ) parser.add_argument('''--student_config''' , type=SCREAMING_SNAKE_CASE__ , required=SCREAMING_SNAKE_CASE__ , help='''Path to the student configuration.''' ) parser.add_argument( '''--student_pretrained_weights''' , default=SCREAMING_SNAKE_CASE__ , type=SCREAMING_SNAKE_CASE__ , help='''Load student initialization checkpoint.''' ) parser.add_argument( '''--teacher_type''' , choices=['''bert''', '''roberta''', '''gpt2'''] , required=SCREAMING_SNAKE_CASE__ , help='''Teacher type (BERT, RoBERTa).''' ) parser.add_argument('''--teacher_name''' , type=SCREAMING_SNAKE_CASE__ , required=SCREAMING_SNAKE_CASE__ , help='''The teacher model.''' ) parser.add_argument('''--temperature''' , default=2.0 , type=SCREAMING_SNAKE_CASE__ , help='''Temperature for the softmax temperature.''' ) parser.add_argument( '''--alpha_ce''' , default=0.5 , type=SCREAMING_SNAKE_CASE__ , help='''Linear weight for the distillation loss. Must be >=0.''' ) parser.add_argument( '''--alpha_mlm''' , default=0.0 , type=SCREAMING_SNAKE_CASE__ , help='''Linear weight for the MLM loss. Must be >=0. Should be used in conjunction with `mlm` flag.''' , ) parser.add_argument('''--alpha_clm''' , default=0.5 , type=SCREAMING_SNAKE_CASE__ , help='''Linear weight for the CLM loss. Must be >=0.''' ) parser.add_argument('''--alpha_mse''' , default=0.0 , type=SCREAMING_SNAKE_CASE__ , help='''Linear weight of the MSE loss. Must be >=0.''' ) parser.add_argument( '''--alpha_cos''' , default=0.0 , type=SCREAMING_SNAKE_CASE__ , help='''Linear weight of the cosine embedding loss. Must be >=0.''' ) parser.add_argument( '''--mlm''' , action='''store_true''' , help='''The LM step: MLM or CLM. If `mlm` is True, the MLM is used over CLM.''' ) parser.add_argument( '''--mlm_mask_prop''' , default=0.15 , type=SCREAMING_SNAKE_CASE__ , help='''Proportion of tokens for which we need to make a prediction.''' , ) parser.add_argument('''--word_mask''' , default=0.8 , type=SCREAMING_SNAKE_CASE__ , help='''Proportion of tokens to mask out.''' ) parser.add_argument('''--word_keep''' , default=0.1 , type=SCREAMING_SNAKE_CASE__ , help='''Proportion of tokens to keep.''' ) parser.add_argument('''--word_rand''' , default=0.1 , type=SCREAMING_SNAKE_CASE__ , help='''Proportion of tokens to randomly replace.''' ) parser.add_argument( '''--mlm_smoothing''' , default=0.7 , type=SCREAMING_SNAKE_CASE__ , help='''Smoothing parameter to emphasize more rare tokens (see XLM, similar to word2vec).''' , ) parser.add_argument('''--token_counts''' , type=SCREAMING_SNAKE_CASE__ , help='''The token counts in the data_file for MLM.''' ) parser.add_argument( '''--restrict_ce_to_mask''' , action='''store_true''' , help='''If true, compute the distillation loss only the [MLM] prediction distribution.''' , ) parser.add_argument( '''--freeze_pos_embs''' , action='''store_true''' , help='''Freeze positional embeddings during distillation. For student_type in [\'roberta\', \'gpt2\'] only.''' , ) parser.add_argument( '''--freeze_token_type_embds''' , action='''store_true''' , help='''Freeze token type embeddings during distillation if existent. For student_type in [\'roberta\'] only.''' , ) parser.add_argument('''--n_epoch''' , type=SCREAMING_SNAKE_CASE__ , default=3 , help='''Number of pass on the whole dataset.''' ) parser.add_argument('''--batch_size''' , type=SCREAMING_SNAKE_CASE__ , default=5 , help='''Batch size (for each process).''' ) parser.add_argument( '''--group_by_size''' , action='''store_false''' , help='''If true, group sequences that have similar length into the same batch. Default is true.''' , ) parser.add_argument( '''--gradient_accumulation_steps''' , type=SCREAMING_SNAKE_CASE__ , default=50 , help='''Gradient accumulation for larger training batches.''' , ) parser.add_argument('''--warmup_prop''' , default=0.05 , type=SCREAMING_SNAKE_CASE__ , help='''Linear warmup proportion.''' ) parser.add_argument('''--weight_decay''' , default=0.0 , type=SCREAMING_SNAKE_CASE__ , help='''Weight decay if we apply some.''' ) parser.add_argument('''--learning_rate''' , default=5e-4 , type=SCREAMING_SNAKE_CASE__ , help='''The initial learning rate for Adam.''' ) parser.add_argument('''--adam_epsilon''' , default=1e-6 , type=SCREAMING_SNAKE_CASE__ , help='''Epsilon for Adam optimizer.''' ) parser.add_argument('''--max_grad_norm''' , default=5.0 , type=SCREAMING_SNAKE_CASE__ , help='''Max gradient norm.''' ) parser.add_argument('''--initializer_range''' , default=0.02 , type=SCREAMING_SNAKE_CASE__ , help='''Random initialization range.''' ) parser.add_argument( '''--fp16''' , action='''store_true''' , help='''Whether to use 16-bit (mixed) precision (through NVIDIA apex) instead of 32-bit''' , ) parser.add_argument( '''--fp16_opt_level''' , type=SCREAMING_SNAKE_CASE__ , default='''O1''' , help=( '''For fp16: Apex AMP optimization level selected in [\'O0\', \'O1\', \'O2\', and \'O3\'].''' '''See details at https://nvidia.github.io/apex/amp.html''' ) , ) parser.add_argument('''--n_gpu''' , type=SCREAMING_SNAKE_CASE__ , default=1 , help='''Number of GPUs in the node.''' ) parser.add_argument('''--local_rank''' , type=SCREAMING_SNAKE_CASE__ , default=-1 , help='''Distributed training - Local rank''' ) parser.add_argument('''--seed''' , type=SCREAMING_SNAKE_CASE__ , default=56 , help='''Random seed''' ) parser.add_argument('''--log_interval''' , type=SCREAMING_SNAKE_CASE__ , default=500 , help='''Tensorboard logging interval.''' ) parser.add_argument('''--checkpoint_interval''' , type=SCREAMING_SNAKE_CASE__ , default=4000 , help='''Checkpoint interval.''' ) UpperCamelCase :int = parser.parse_args() sanity_checks(SCREAMING_SNAKE_CASE__ ) # ARGS # init_gpu_params(SCREAMING_SNAKE_CASE__ ) set_seed(SCREAMING_SNAKE_CASE__ ) if args.is_master: if os.path.exists(args.dump_path ): if not args.force: raise ValueError( F'''Serialization dir {args.dump_path} already exists, but you have not precised wheter to overwrite''' ''' itUse `--force` if you want to overwrite it''' ) else: shutil.rmtree(args.dump_path ) if not os.path.exists(args.dump_path ): os.makedirs(args.dump_path ) logger.info(F'''Experiment will be dumped and logged in {args.dump_path}''' ) # SAVE PARAMS # logger.info(F'''Param: {args}''' ) with open(os.path.join(args.dump_path , '''parameters.json''' ) , '''w''' ) as f: json.dump(vars(SCREAMING_SNAKE_CASE__ ) , SCREAMING_SNAKE_CASE__ , indent=4 ) git_log(args.dump_path ) UpperCamelCase , UpperCamelCase , UpperCamelCase :List[Any] = MODEL_CLASSES[args.student_type] UpperCamelCase , UpperCamelCase , UpperCamelCase :Dict = MODEL_CLASSES[args.teacher_type] # TOKENIZER # UpperCamelCase :List[str] = teacher_tokenizer_class.from_pretrained(args.teacher_name ) UpperCamelCase :Dict = {} for tok_name, tok_symbol in tokenizer.special_tokens_map.items(): UpperCamelCase :Tuple = tokenizer.all_special_tokens.index(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = tokenizer.all_special_ids[idx] logger.info(F'''Special tokens {special_tok_ids}''' ) UpperCamelCase :Tuple = special_tok_ids UpperCamelCase :List[Any] = tokenizer.max_model_input_sizes[args.teacher_name] # DATA LOADER # logger.info(F'''Loading data from {args.data_file}''' ) with open(args.data_file , '''rb''' ) as fp: UpperCamelCase :Tuple = pickle.load(SCREAMING_SNAKE_CASE__ ) if args.mlm: logger.info(F'''Loading token counts from {args.token_counts} (already pre-computed)''' ) with open(args.token_counts , '''rb''' ) as fp: UpperCamelCase :Optional[Any] = pickle.load(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = np.maximum(SCREAMING_SNAKE_CASE__ , 1 ) ** -args.mlm_smoothing for idx in special_tok_ids.values(): UpperCamelCase :Optional[Any] = 0.0 # do not predict special tokens UpperCamelCase :Optional[int] = torch.from_numpy(SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :int = None UpperCamelCase :Dict = LmSeqsDataset(params=SCREAMING_SNAKE_CASE__ , data=SCREAMING_SNAKE_CASE__ ) logger.info('''Data loader created.''' ) # STUDENT # logger.info(F'''Loading student config from {args.student_config}''' ) UpperCamelCase :List[str] = student_config_class.from_pretrained(args.student_config ) UpperCamelCase :Union[str, Any] = True if args.student_pretrained_weights is not None: logger.info(F'''Loading pretrained weights from {args.student_pretrained_weights}''' ) UpperCamelCase :Optional[Any] = student_model_class.from_pretrained(args.student_pretrained_weights , config=SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :Any = student_model_class(SCREAMING_SNAKE_CASE__ ) if args.n_gpu > 0: student.to(F'''cuda:{args.local_rank}''' ) logger.info('''Student loaded.''' ) # TEACHER # UpperCamelCase :Dict = teacher_model_class.from_pretrained(args.teacher_name , output_hidden_states=SCREAMING_SNAKE_CASE__ ) if args.n_gpu > 0: teacher.to(F'''cuda:{args.local_rank}''' ) logger.info(F'''Teacher loaded from {args.teacher_name}.''' ) # FREEZING # if args.freeze_pos_embs: freeze_pos_embeddings(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) if args.freeze_token_type_embds: freeze_token_type_embeddings(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # SANITY CHECKS # assert student.config.vocab_size == teacher.config.vocab_size assert student.config.hidden_size == teacher.config.hidden_size assert student.config.max_position_embeddings == teacher.config.max_position_embeddings if args.mlm: assert token_probs.size(0 ) == stu_architecture_config.vocab_size # DISTILLER # torch.cuda.empty_cache() UpperCamelCase :Optional[Any] = Distiller( params=SCREAMING_SNAKE_CASE__ , dataset=SCREAMING_SNAKE_CASE__ , token_probs=SCREAMING_SNAKE_CASE__ , student=SCREAMING_SNAKE_CASE__ , teacher=SCREAMING_SNAKE_CASE__ ) distiller.train() logger.info('''Let\'s go get some drinks.''' ) if __name__ == "__main__": main()
259
import os import sys import tempfile import torch from .state import AcceleratorState from .utils import PrecisionType, PrepareForLaunch, is_mps_available, patch_environment def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str]=() , SCREAMING_SNAKE_CASE__ : List[Any]=None , SCREAMING_SNAKE_CASE__ : List[Any]="no" , SCREAMING_SNAKE_CASE__ : Dict="29500" ): UpperCamelCase :List[Any] = False UpperCamelCase :Tuple = False if any(key.startswith('''KAGGLE''' ) for key in os.environ.keys() ): UpperCamelCase :Dict = True elif "IPython" in sys.modules: UpperCamelCase :int = '''google.colab''' in str(sys.modules['''IPython'''].get_ipython() ) try: UpperCamelCase :Any = PrecisionType(mixed_precision.lower() ) except ValueError: raise ValueError( F'''Unknown mixed_precision mode: {args.mixed_precision.lower()}. Choose between {PrecisionType.list()}.''' ) if (in_colab or in_kaggle) and (os.environ.get('''TPU_NAME''' , SCREAMING_SNAKE_CASE__ ) is not None): # TPU launch import torch_xla.distributed.xla_multiprocessing as xmp if len(AcceleratorState._shared_state ) > 0: raise ValueError( '''To train on TPU in Colab or Kaggle Kernel, the `Accelerator` should only be initialized inside ''' '''your training function. Restart your notebook and make sure no cells initializes an ''' '''`Accelerator`.''' ) if num_processes is None: UpperCamelCase :Tuple = 8 UpperCamelCase :Optional[int] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , distributed_type='''TPU''' ) print(F'''Launching a training on {num_processes} TPU cores.''' ) xmp.spawn(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' ) elif in_colab: # No need for a distributed launch otherwise as it's either CPU or one GPU. if torch.cuda.is_available(): print('''Launching training on one GPU.''' ) else: print('''Launching training on one CPU.''' ) function(*SCREAMING_SNAKE_CASE__ ) else: if num_processes is None: raise ValueError( '''You have to specify the number of GPUs you would like to use, add `num_processes=...` to your call.''' ) if num_processes > 1: # Multi-GPU launch from torch.multiprocessing import start_processes from torch.multiprocessing.spawn import ProcessRaisedException if len(AcceleratorState._shared_state ) > 0: raise ValueError( '''To launch a multi-GPU training from your notebook, the `Accelerator` should only be initialized ''' '''inside your training function. Restart your notebook and make sure no cells initializes an ''' '''`Accelerator`.''' ) if torch.cuda.is_initialized(): raise ValueError( '''To launch a multi-GPU training from your notebook, you need to avoid running any instruction ''' '''using `torch.cuda` in any cell. Restart your notebook and make sure no cells use any CUDA ''' '''function.''' ) # torch.distributed will expect a few environment variable to be here. We set the ones common to each # process here (the other ones will be set be the launcher). with patch_environment( world_size=SCREAMING_SNAKE_CASE__ , master_addr='''127.0.01''' , master_port=SCREAMING_SNAKE_CASE__ , mixed_precision=SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , distributed_type='''MULTI_GPU''' ) print(F'''Launching training on {num_processes} GPUs.''' ) try: start_processes(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' ) except ProcessRaisedException as e: if "Cannot re-initialize CUDA in forked subprocess" in e.args[0]: raise RuntimeError( '''CUDA has been initialized before the `notebook_launcher` could create a forked subprocess. ''' '''This likely stems from an outside import causing issues once the `notebook_launcher()` is called. ''' '''Please review your imports and test them when running the `notebook_launcher()` to identify ''' '''which one is problematic.''' ) from e else: # No need for a distributed launch otherwise as it's either CPU, GPU or MPS. if is_mps_available(): UpperCamelCase :Any = '''1''' print('''Launching training on MPS.''' ) elif torch.cuda.is_available(): print('''Launching training on one GPU.''' ) else: print('''Launching training on CPU.''' ) function(*SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Tuple=() , SCREAMING_SNAKE_CASE__ : int=2 ): from torch.multiprocessing import start_processes with tempfile.NamedTemporaryFile() as tmp_file: # torch.distributed will expect a few environment variable to be here. We set the ones common to each # process here (the other ones will be set be the launcher). with patch_environment( world_size=SCREAMING_SNAKE_CASE__ , master_addr='''127.0.01''' , master_port='''29500''' , accelerate_mixed_precision='''no''' , accelerate_debug_rdv_file=tmp_file.name , accelerate_use_cpu='''yes''' , ): UpperCamelCase :Optional[int] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , debug=SCREAMING_SNAKE_CASE__ ) start_processes(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' )
259
1
from __future__ import annotations def _A ( SCREAMING_SNAKE_CASE__ : list[int] , SCREAMING_SNAKE_CASE__ : list[int] , SCREAMING_SNAKE_CASE__ : list[int] , SCREAMING_SNAKE_CASE__ : list[list[str]] , SCREAMING_SNAKE_CASE__ : int , ): UpperCamelCase :Dict = len(SCREAMING_SNAKE_CASE__ ) # If row is equal to the size of the board it means there are a queen in each row in # the current board (possible_board) if row == n: # We convert the variable possible_board that looks like this: [1, 3, 0, 2] to # this: ['. Q . . ', '. . . Q ', 'Q . . . ', '. . Q . '] boards.append(['''. ''' * i + '''Q ''' + '''. ''' * (n - 1 - i) for i in possible_board] ) return # We iterate each column in the row to find all possible results in each row for col in range(SCREAMING_SNAKE_CASE__ ): # We apply that we learned previously. First we check that in the current board # (possible_board) there are not other same value because if there is it means # that there are a collision in vertical. Then we apply the two formulas we # learned before: # # 45º: y - x = b or 45: row - col = b # 135º: y + x = b or row + col = b. # # And we verify if the results of this two formulas not exist in their variables # respectively. (diagonal_right_collisions, diagonal_left_collisions) # # If any or these are True it means there is a collision so we continue to the # next value in the for loop. if ( col in possible_board or row - col in diagonal_right_collisions or row + col in diagonal_left_collisions ): continue # If it is False we call dfs function again and we update the inputs depth_first_search( [*possible_board, col] , [*diagonal_right_collisions, row - col] , [*diagonal_left_collisions, row + col] , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , ) def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :list[list[str]] = [] depth_first_search([] , [] , [] , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Print all the boards for board in boards: for column in board: print(SCREAMING_SNAKE_CASE__ ) print('''''' ) print(len(SCREAMING_SNAKE_CASE__ ) , '''solutions were found.''' ) if __name__ == "__main__": import doctest doctest.testmod() n_queens_solution(4)
259
import sys def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Any = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] UpperCamelCase :List[Any] = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] for chain_length in range(2 , SCREAMING_SNAKE_CASE__ ): for a in range(1 , n - chain_length + 1 ): UpperCamelCase :Optional[Any] = a + chain_length - 1 UpperCamelCase :int = sys.maxsize for c in range(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Any = ( matrix[a][c] + matrix[c + 1][b] + array[a - 1] * array[c] * array[b] ) if cost < matrix[a][b]: UpperCamelCase :int = cost UpperCamelCase :List[str] = c return matrix, sol def _A ( SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] ): if i == j: print('''A''' + str(SCREAMING_SNAKE_CASE__ ) , end=''' ''' ) else: print('''(''' , end=''' ''' ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , optimal_solution[i][j] ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , optimal_solution[i][j] + 1 , SCREAMING_SNAKE_CASE__ ) print(''')''' , end=''' ''' ) def _A ( ): UpperCamelCase :Optional[int] = [30, 35, 15, 5, 10, 20, 25] UpperCamelCase :Optional[Any] = len(SCREAMING_SNAKE_CASE__ ) # Size of matrix created from above array will be # 30*35 35*15 15*5 5*10 10*20 20*25 UpperCamelCase , UpperCamelCase :Dict = matrix_chain_order(SCREAMING_SNAKE_CASE__ ) print('''No. of Operation required: ''' + str(matrix[1][n - 1] ) ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , 1 , n - 1 ) if __name__ == "__main__": main()
259
1
import random import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer from diffusers import ( AutoencoderKL, DDIMScheduler, UNetaDConditionModel, VideoToVideoSDPipeline, ) from diffusers.utils import floats_tensor, is_xformers_available, skip_mps from diffusers.utils.testing_utils import enable_full_determinism, slow, torch_device from ..pipeline_params import ( TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS, ) from ..test_pipelines_common import PipelineTesterMixin enable_full_determinism() @skip_mps class UpperCAmelCase_ ( lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[Any] =VideoToVideoSDPipeline UpperCamelCase_ : Dict =TEXT_GUIDED_IMAGE_VARIATION_PARAMS.union({'video'} ) - {'image', 'width', 'height'} UpperCamelCase_ : Tuple =TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS.union({'video'} ) - {'image'} UpperCamelCase_ : str =PipelineTesterMixin.required_optional_params - {'latents'} UpperCamelCase_ : List[str] =False # No `output_type`. UpperCamelCase_ : List[str] =frozenset( [ 'num_inference_steps', 'generator', 'latents', 'return_dict', 'callback', 'callback_steps', ] ) def UpperCAmelCase ( self ) -> List[Any]: torch.manual_seed(0 ) UpperCamelCase :Optional[int] = UNetaDConditionModel( block_out_channels=(32, 64, 64, 64) , layers_per_block=2 , sample_size=32 , in_channels=4 , out_channels=4 , down_block_types=('''CrossAttnDownBlock3D''', '''CrossAttnDownBlock3D''', '''CrossAttnDownBlock3D''', '''DownBlock3D''') , up_block_types=('''UpBlock3D''', '''CrossAttnUpBlock3D''', '''CrossAttnUpBlock3D''', '''CrossAttnUpBlock3D''') , cross_attention_dim=32 , attention_head_dim=4 , ) UpperCamelCase :str = DDIMScheduler( beta_start=0.0_0085 , beta_end=0.012 , beta_schedule='''scaled_linear''' , clip_sample=SCREAMING_SNAKE_CASE_ , set_alpha_to_one=SCREAMING_SNAKE_CASE_ , ) torch.manual_seed(0 ) UpperCamelCase :int = AutoencoderKL( block_out_channels=[32, 64] , in_channels=3 , out_channels=3 , down_block_types=['''DownEncoderBlock2D''', '''DownEncoderBlock2D'''] , up_block_types=['''UpDecoderBlock2D''', '''UpDecoderBlock2D'''] , latent_channels=4 , sample_size=128 , ) torch.manual_seed(0 ) UpperCamelCase :List[str] = CLIPTextConfig( bos_token_id=0 , eos_token_id=2 , hidden_size=32 , intermediate_size=37 , layer_norm_eps=1e-05 , num_attention_heads=4 , num_hidden_layers=5 , pad_token_id=1 , vocab_size=1000 , hidden_act='''gelu''' , projection_dim=512 , ) UpperCamelCase :Union[str, Any] = CLIPTextModel(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = CLIPTokenizer.from_pretrained('''hf-internal-testing/tiny-random-clip''' ) UpperCamelCase :int = { '''unet''': unet, '''scheduler''': scheduler, '''vae''': vae, '''text_encoder''': text_encoder, '''tokenizer''': tokenizer, } return components def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=0 ) -> Tuple: # 3 frames UpperCamelCase :Union[str, Any] = floats_tensor((1, 3, 3, 32, 32) , rng=random.Random(SCREAMING_SNAKE_CASE_ ) ).to(SCREAMING_SNAKE_CASE_ ) if str(SCREAMING_SNAKE_CASE_ ).startswith('''mps''' ): UpperCamelCase :Tuple = torch.manual_seed(SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :List[Any] = torch.Generator(device=SCREAMING_SNAKE_CASE_ ).manual_seed(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = { '''prompt''': '''A painting of a squirrel eating a burger''', '''video''': video, '''generator''': generator, '''num_inference_steps''': 2, '''guidance_scale''': 6.0, '''output_type''': '''pt''', } return inputs def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Optional[int] = '''cpu''' # ensure determinism for the device-dependent torch.Generator UpperCamelCase :List[str] = self.get_dummy_components() UpperCamelCase :Any = VideoToVideoSDPipeline(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = sd_pipe.to(SCREAMING_SNAKE_CASE_ ) sd_pipe.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = self.get_dummy_inputs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = '''np''' UpperCamelCase :Optional[int] = sd_pipe(**SCREAMING_SNAKE_CASE_ ).frames UpperCamelCase :Optional[Any] = frames[0][-3:, -3:, -1] assert frames[0].shape == (32, 32, 3) UpperCamelCase :List[str] = np.array([106, 117, 113, 174, 137, 112, 148, 151, 131] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2 @unittest.skipIf( torch_device != '''cuda''' or not is_xformers_available() , reason='''XFormers attention is only available with CUDA and `xformers` installed''' , ) def UpperCAmelCase ( self ) -> Union[str, Any]: self._test_xformers_attention_forwardGenerator_pass(test_mean_pixel_difference=SCREAMING_SNAKE_CASE_ , expected_max_diff=5e-3 ) @unittest.skip(reason='''Batching needs to be properly figured out first for this pipeline.''' ) def UpperCAmelCase ( self ) -> List[str]: pass @unittest.skip(reason='''Batching needs to be properly figured out first for this pipeline.''' ) def UpperCAmelCase ( self ) -> List[str]: pass @unittest.skip(reason='''`num_images_per_prompt` argument is not supported for this pipeline.''' ) def UpperCAmelCase ( self ) -> Any: pass def UpperCAmelCase ( self ) -> Tuple: return super().test_progress_bar() @slow @skip_mps class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Union[str, Any] = VideoToVideoSDPipeline.from_pretrained('''cerspense/zeroscope_v2_XL''' , torch_dtype=torch.floataa ) pipe.enable_model_cpu_offload() # 10 frames UpperCamelCase :Union[str, Any] = torch.Generator(device='''cpu''' ).manual_seed(0 ) UpperCamelCase :str = torch.randn((1, 10, 3, 1024, 576) , generator=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = video.to('''cuda''' ) UpperCamelCase :int = '''Spiderman is surfing''' UpperCamelCase :Optional[int] = pipe(SCREAMING_SNAKE_CASE_ , video=SCREAMING_SNAKE_CASE_ , generator=SCREAMING_SNAKE_CASE_ , num_inference_steps=3 , output_type='''pt''' ).frames UpperCamelCase :Tuple = np.array([-1.045_8984, -1.127_9297, -0.966_3086, -0.9150_3906, -0.7509_7656] ) assert np.abs(video_frames.cpu().numpy()[0, 0, 0, 0, -5:] - expected_array ).sum() < 1e-2
259
import argparse import json import os from pathlib import Path import requests import torch from transformers import JukeboxConfig, JukeboxModel from transformers.utils import logging logging.set_verbosity_info() __snake_case = logging.get_logger(__name__) __snake_case = """https://openaipublic.azureedge.net/jukebox/models/""" __snake_case = { """jukebox-1b-lyrics""": [ """5b/vqvae.pth.tar""", """5b/prior_level_0.pth.tar""", """5b/prior_level_1.pth.tar""", """1b_lyrics/prior_level_2.pth.tar""", ], """jukebox-5b-lyrics""": [ """5b/vqvae.pth.tar""", """5b/prior_level_0.pth.tar""", """5b/prior_level_1.pth.tar""", """5b_lyrics/prior_level_2.pth.tar""", ], } def _A ( SCREAMING_SNAKE_CASE__ : List[Any] ): if key.endswith('''.model.1.bias''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :int = key.replace('''.model.1.bias''' , '''.conv1d_1.bias''' ) elif key.endswith('''.model.1.weight''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Union[str, Any] = key.replace('''.model.1.weight''' , '''.conv1d_1.weight''' ) elif key.endswith('''.model.3.bias''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Optional[Any] = key.replace('''.model.3.bias''' , '''.conv1d_2.bias''' ) elif key.endswith('''.model.3.weight''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Optional[int] = key.replace('''.model.3.weight''' , '''.conv1d_2.weight''' ) if "conditioner_blocks.0." in key: UpperCamelCase :Any = key.replace('''conditioner_blocks.0''' , '''conditioner_blocks''' ) if "prime_prior" in key: UpperCamelCase :int = key.replace('''prime_prior''' , '''encoder''' ) if ".emb." in key and "total" not in key and "absolute" not in key and "relative" not in key: UpperCamelCase :Any = key.replace('''.emb.''' , '''.''' ) if key.endswith('''k''' ): # replace vqvae.X.k with vqvae.X.codebook return key.replace('''.k''' , '''.codebook''' ) if "y_emb." in key: return key.replace('''y_emb.''' , '''metadata_embedding.''' ) if "x_emb.emb." in key: UpperCamelCase :str = key.replace('''0.x_emb.emb''' , '''embed_tokens''' ) if "prime_state_ln" in key: return key.replace('''prime_state_ln''' , '''encoder.final_layer_norm''' ) if ".ln" in key: return key.replace('''.ln''' , '''.layer_norm''' ) if "_ln" in key: return key.replace('''_ln''' , '''_layer_norm''' ) if "prime_state_proj" in key: return key.replace('''prime_state_proj''' , '''encoder.proj_in''' ) if "prime_x_out" in key: return key.replace('''prime_x_out''' , '''encoder.lm_head''' ) if "prior.x_out" in key: return key.replace('''x_out''' , '''fc_proj_out''' ) if "x_emb" in key: return key.replace('''x_emb''' , '''embed_tokens''' ) return key def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Optional[int] = {} import re UpperCamelCase :int = re.compile(R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :str = re.compile( R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :int = re.compile(R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Tuple = re.compile(R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :int = re.compile( R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Optional[int] = re.compile(R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Optional[Any] = re.compile(R'''conditioner_blocks.(\d*).cond.model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :int = re.compile( R'''conditioner_blocks.(\d*).cond.model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Tuple = re.compile(R'''conditioner_blocks.(\d*).cond.model.(\d*).(bias|weight)''' ) for original_key, value in state_dict.items(): # rename vqvae.encoder keys if re_encoder_block_conv_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = re_encoder_block_conv_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = regex_match.groups() UpperCamelCase :List[str] = int(groups[2] ) * 2 + int(groups[3] ) UpperCamelCase :List[Any] = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :int = re_encoder_block_conv_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_encoder_block_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_encoder_block_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[Any] = regex_match.groups() UpperCamelCase :Any = int(groups[2] ) * 2 + int(groups[3] ) UpperCamelCase :Any = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :str = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}.''' UpperCamelCase :List[str] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Union[str, Any] = prefix + resnet_block UpperCamelCase :str = re_encoder_block_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_encoder_block_proj_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[int] = re_encoder_block_proj_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = regex_match.groups() UpperCamelCase :int = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.proj_out.{groups[-1]}''' UpperCamelCase :str = re_encoder_block_proj_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # rename vqvae.decoder keys elif re_decoder_block_conv_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_decoder_block_conv_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = regex_match.groups() UpperCamelCase :str = int(groups[2] ) * 2 + int(groups[3] ) - 2 UpperCamelCase :List[Any] = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :Union[str, Any] = re_decoder_block_conv_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_decoder_block_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_decoder_block_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = regex_match.groups() UpperCamelCase :List[str] = int(groups[2] ) * 2 + int(groups[3] ) - 2 UpperCamelCase :Optional[int] = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :Any = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}.''' UpperCamelCase :Optional[int] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Any = prefix + resnet_block UpperCamelCase :Optional[int] = re_decoder_block_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_decoder_block_proj_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[int] = re_decoder_block_proj_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[Any] = regex_match.groups() UpperCamelCase :List[Any] = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.proj_in.{groups[-1]}''' UpperCamelCase :Any = re_decoder_block_proj_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # rename prior cond.model to upsampler.upsample_block and resnet elif re_prior_cond_conv_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_prior_cond_conv_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = regex_match.groups() UpperCamelCase :str = int(groups[1] ) * 2 + int(groups[2] ) - 2 UpperCamelCase :Tuple = F'''conditioner_blocks.upsampler.upsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :int = re_prior_cond_conv_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_prior_cond_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = re_prior_cond_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = regex_match.groups() UpperCamelCase :Optional[Any] = int(groups[1] ) * 2 + int(groups[2] ) - 2 UpperCamelCase :int = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :Tuple = F'''conditioner_blocks.upsampler.upsample_block.{block_index}.''' UpperCamelCase :List[Any] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Any = prefix + resnet_block UpperCamelCase :Dict = re_prior_cond_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_prior_cond_proj_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = re_prior_cond_proj_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = regex_match.groups() UpperCamelCase :Dict = F'''conditioner_blocks.upsampler.proj_in.{groups[-1]}''' UpperCamelCase :Any = re_prior_cond_proj_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # keep original key else: UpperCamelCase :List[str] = original_key UpperCamelCase :Any = replace_key(SCREAMING_SNAKE_CASE__ ) if F'''{key_prefix}.{key}''' not in model_state_dict or key is None: print(F'''failed converting {original_key} to {key}, does not match''' ) # handle missmatched shape elif value.shape != model_state_dict[F'''{key_prefix}.{key}'''].shape: UpperCamelCase :Union[str, Any] = model_state_dict[F'''{key_prefix}.{key}'''] print(F'''{original_key}-> {key} : \nshape {val.shape} and { value.shape}, do not match''' ) UpperCamelCase :List[Any] = original_key UpperCamelCase :Any = original_key UpperCamelCase :Optional[int] = value return new_dict @torch.no_grad() def _A ( SCREAMING_SNAKE_CASE__ : List[str]=None , SCREAMING_SNAKE_CASE__ : Dict=None ): for file in MODEL_MAPPING[model_name]: if not os.path.isfile(F'''{pytorch_dump_folder_path}/{file.split("/" )[-1]}''' ): UpperCamelCase :Dict = requests.get(F'''{PREFIX}{file}''' , allow_redirects=SCREAMING_SNAKE_CASE__ ) os.makedirs(F'''{pytorch_dump_folder_path}/''' , exist_ok=SCREAMING_SNAKE_CASE__ ) open(F'''{pytorch_dump_folder_path}/{file.split("/" )[-1]}''' , '''wb''' ).write(r.content ) UpperCamelCase :Optional[int] = MODEL_MAPPING[model_name.split('''/''' )[-1]] UpperCamelCase :Any = JukeboxConfig.from_pretrained(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = JukeboxModel(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = [] UpperCamelCase :List[Any] = {} for i, dict_name in enumerate(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = torch.load(F'''{pytorch_dump_folder_path}/{dict_name.split("/" )[-1]}''' )['''model'''] UpperCamelCase :Tuple = {} for k in old_dic.keys(): if k.endswith('''.b''' ): UpperCamelCase :Optional[int] = old_dic[k] elif k.endswith('''.w''' ): UpperCamelCase :Optional[Any] = old_dic[k] elif "level_2" not in dict_name and "cond.model." in k: UpperCamelCase :Optional[Any] = old_dic[k] else: UpperCamelCase :Any = old_dic[k] UpperCamelCase :Any = '''vqvae''' if i == 0 else F'''priors.{3 - i}''' UpperCamelCase :Dict = fix_jukebox_keys(SCREAMING_SNAKE_CASE__ , model.state_dict() , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) weight_dict.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = weight_dict.pop(0 ) model.vqvae.load_state_dict(SCREAMING_SNAKE_CASE__ ) for i in range(len(SCREAMING_SNAKE_CASE__ ) ): model.priors[i].load_state_dict(weight_dict[2 - i] ) Path(SCREAMING_SNAKE_CASE__ ).mkdir(exist_ok=SCREAMING_SNAKE_CASE__ ) with open(F'''{pytorch_dump_folder_path}/mapping.json''' , '''w''' ) as txtfile: json.dump(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) print(F'''Saving model {model_name} to {pytorch_dump_folder_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) return weight_dict if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--model_name""", default="""jukebox-5b-lyrics""", type=str, help="""Name of the model you'd like to convert.""", ) parser.add_argument( """--pytorch_dump_folder_path""", default="""jukebox-5b-lyrics-converted""", type=str, help="""Path to the output PyTorch model directory.""", ) __snake_case = parser.parse_args() convert_openai_checkpoint(args.model_name, args.pytorch_dump_folder_path)
259
1
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import ( SwiftFormerConfig, SwiftFormerForImageClassification, ViTImageProcessor, ) from transformers.utils import logging logging.set_verbosity_info() __snake_case = logging.get_logger(__name__) __snake_case = torch.device("""cpu""") def _A ( ): UpperCamelCase :Tuple = '''http://images.cocodataset.org/val2017/000000039769.jpg''' UpperCamelCase :int = Image.open(requests.get(SCREAMING_SNAKE_CASE__ , stream=SCREAMING_SNAKE_CASE__ ).raw ) return im def _A ( SCREAMING_SNAKE_CASE__ : int ): if swiftformer_name == "swiftformer_xs": return torch.tensor([-2.1_7_0_3e0_0, 2.1_1_0_7e0_0, -2.0_8_1_1e0_0, 8.8_6_8_5e-0_1, 2.4_3_6_0e-0_1] ) elif swiftformer_name == "swiftformer_s": return torch.tensor([3.9_6_3_6e-0_1, 2.3_4_7_8e-0_1, -1.6_9_6_3e0_0, -1.7_3_8_1e0_0, -8.6_3_3_7e-0_1] ) elif swiftformer_name == "swiftformer_l1": return torch.tensor([-4.2_7_6_8e-0_1, -4.7_4_2_9e-0_1, -1.0_8_9_7e0_0, -1.0_2_4_8e0_0, 3.5_5_2_3e-0_2] ) elif swiftformer_name == "swiftformer_l3": return torch.tensor([-2.5_3_3_0e-0_1, 2.4_2_1_1e-0_1, -6.0_1_8_5e-0_1, -8.2_7_8_9e-0_1, -6.0_4_4_6e-0_2] ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Tuple ): UpperCamelCase :Optional[int] = dct.pop(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = val def _A ( SCREAMING_SNAKE_CASE__ : List[Any] ): UpperCamelCase :Dict = [] for k in state_dict.keys(): UpperCamelCase :Dict = k if ".pwconv" in k: UpperCamelCase :int = k_new.replace('''.pwconv''' , '''.point_wise_conv''' ) if ".dwconv" in k: UpperCamelCase :int = k_new.replace('''.dwconv''' , '''.depth_wise_conv''' ) if ".Proj." in k: UpperCamelCase :Union[str, Any] = k_new.replace('''.Proj.''' , '''.proj.''' ) if "patch_embed" in k_new: UpperCamelCase :Tuple = k_new.replace('''patch_embed''' , '''swiftformer.patch_embed.patch_embedding''' ) if "network" in k_new: UpperCamelCase :Union[str, Any] = k_new.split('''.''' ) if ls[2].isdigit(): UpperCamelCase :int = '''swiftformer.encoder.network.''' + ls[1] + '''.blocks.''' + ls[2] + '''.''' + '''.'''.join(ls[3:] ) else: UpperCamelCase :int = k_new.replace('''network''' , '''swiftformer.encoder.network''' ) rename_keys.append((k, k_new) ) return rename_keys @torch.no_grad() def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Union[str, Any] = SwiftFormerConfig() # dataset (ImageNet-21k only or also fine-tuned on ImageNet 2012), patch_size and image_size UpperCamelCase :str = 1000 UpperCamelCase :Union[str, Any] = '''huggingface/label-files''' UpperCamelCase :Dict = '''imagenet-1k-id2label.json''' UpperCamelCase :Any = json.load(open(hf_hub_download(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) , '''r''' ) ) UpperCamelCase :List[str] = {int(SCREAMING_SNAKE_CASE__ ): v for k, v in idalabel.items()} UpperCamelCase :Any = idalabel UpperCamelCase :str = {v: k for k, v in idalabel.items()} # size of the architecture if swiftformer_name == "swiftformer_xs": UpperCamelCase :Dict = [3, 3, 6, 4] UpperCamelCase :Optional[int] = [48, 56, 112, 220] elif swiftformer_name == "swiftformer_s": UpperCamelCase :Dict = [3, 3, 9, 6] UpperCamelCase :Union[str, Any] = [48, 64, 168, 224] elif swiftformer_name == "swiftformer_l1": UpperCamelCase :Union[str, Any] = [4, 3, 10, 5] UpperCamelCase :Any = [48, 96, 192, 384] elif swiftformer_name == "swiftformer_l3": UpperCamelCase :int = [4, 4, 12, 6] UpperCamelCase :List[Any] = [64, 128, 320, 512] # load state_dict of original model, remove and rename some keys if original_ckpt: if original_ckpt.startswith('''https''' ): UpperCamelCase :Tuple = torch.hub.load_state_dict_from_url(SCREAMING_SNAKE_CASE__ , map_location='''cpu''' , check_hash=SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :str = torch.load(SCREAMING_SNAKE_CASE__ , map_location='''cpu''' ) UpperCamelCase :Any = checkpoint UpperCamelCase :int = create_rename_keys(SCREAMING_SNAKE_CASE__ ) for rename_key_src, rename_key_dest in rename_keys: rename_key(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # load HuggingFace model UpperCamelCase :Any = SwiftFormerForImageClassification(SCREAMING_SNAKE_CASE__ ).eval() hf_model.load_state_dict(SCREAMING_SNAKE_CASE__ ) # prepare test inputs UpperCamelCase :int = prepare_img() UpperCamelCase :Any = ViTImageProcessor.from_pretrained('''preprocessor_config''' ) UpperCamelCase :str = processor(images=SCREAMING_SNAKE_CASE__ , return_tensors='''pt''' ) # compare outputs from both models UpperCamelCase :Dict = get_expected_output(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = hf_model(inputs['''pixel_values'''] ).logits assert hf_logits.shape == torch.Size([1, 1000] ) assert torch.allclose(hf_logits[0, 0:5] , SCREAMING_SNAKE_CASE__ , atol=1e-3 ) Path(SCREAMING_SNAKE_CASE__ ).mkdir(exist_ok=SCREAMING_SNAKE_CASE__ ) print(F'''Saving model {swiftformer_name} to {pytorch_dump_folder_path}''' ) hf_model.save_pretrained(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--swiftformer_name""", default="""swiftformer_xs""", choices=["""swiftformer_xs""", """swiftformer_s""", """swiftformer_l1""", """swiftformer_l3"""], type=str, help="""Name of the SwiftFormer model you'd like to convert.""", ) parser.add_argument( """--pytorch_dump_folder_path""", default="""./converted_outputs/""", type=str, help="""Path to the output PyTorch model directory.""", ) parser.add_argument("""--original_ckpt""", default=None, type=str, help="""Path to the original model checkpoint.""") __snake_case = parser.parse_args() convert_swiftformer_checkpoint(args.swiftformer_name, args.pytorch_dump_folder_path, args.original_ckpt)
259
import json import os import shutil import tempfile import unittest import numpy as np import pytest from transformers import MgpstrTokenizer from transformers.models.mgp_str.tokenization_mgp_str import VOCAB_FILES_NAMES from transformers.testing_utils import require_torch, require_vision from transformers.utils import IMAGE_PROCESSOR_NAME, is_torch_available, is_vision_available if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import MgpstrProcessor, ViTImageProcessor @require_torch @require_vision class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Union[str, Any] =ViTImageProcessor if is_vision_available() else None @property def UpperCAmelCase ( self ) -> Dict: return self.image_processor_tester.prepare_image_processor_dict() def UpperCAmelCase ( self ) -> int: UpperCamelCase :Union[str, Any] = (3, 32, 128) UpperCamelCase :Any = tempfile.mkdtemp() # fmt: off UpperCamelCase :int = ['''[GO]''', '''[s]''', '''0''', '''1''', '''2''', '''3''', '''4''', '''5''', '''6''', '''7''', '''8''', '''9''', '''a''', '''b''', '''c''', '''d''', '''e''', '''f''', '''g''', '''h''', '''i''', '''j''', '''k''', '''l''', '''m''', '''n''', '''o''', '''p''', '''q''', '''r''', '''s''', '''t''', '''u''', '''v''', '''w''', '''x''', '''y''', '''z'''] # fmt: on UpperCamelCase :Optional[int] = dict(zip(SCREAMING_SNAKE_CASE_ , range(len(SCREAMING_SNAKE_CASE_ ) ) ) ) UpperCamelCase :Optional[int] = os.path.join(self.tmpdirname , VOCAB_FILES_NAMES['''vocab_file'''] ) with open(self.vocab_file , '''w''' , encoding='''utf-8''' ) as fp: fp.write(json.dumps(SCREAMING_SNAKE_CASE_ ) + '''\n''' ) UpperCamelCase :Tuple = { '''do_normalize''': False, '''do_resize''': True, '''image_processor_type''': '''ViTImageProcessor''', '''resample''': 3, '''size''': {'''height''': 32, '''width''': 128}, } UpperCamelCase :str = os.path.join(self.tmpdirname , SCREAMING_SNAKE_CASE_ ) with open(self.image_processor_file , '''w''' , encoding='''utf-8''' ) as fp: json.dump(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> int: return MgpstrTokenizer.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: return ViTImageProcessor.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: shutil.rmtree(self.tmpdirname ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = np.random.randint(255 , size=(3, 30, 400) , dtype=np.uinta ) UpperCamelCase :List[Any] = Image.fromarray(np.moveaxis(SCREAMING_SNAKE_CASE_ , 0 , -1 ) ) return image_input def UpperCAmelCase ( self ) -> str: UpperCamelCase :str = self.get_tokenizer() UpperCamelCase :Union[str, Any] = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Dict = MgpstrProcessor.from_pretrained(self.tmpdirname , use_fast=SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[int] = self.get_tokenizer() UpperCamelCase :Dict = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Optional[int] = self.get_tokenizer(bos_token='''(BOS)''' , eos_token='''(EOS)''' ) UpperCamelCase :Optional[Any] = self.get_image_processor(do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) UpperCamelCase :int = MgpstrProcessor.from_pretrained( self.tmpdirname , bos_token='''(BOS)''' , eos_token='''(EOS)''' , do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer_add_kwargs.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor_add_kwargs.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.get_image_processor() UpperCamelCase :List[str] = self.get_tokenizer() UpperCamelCase :str = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = self.prepare_image_inputs() UpperCamelCase :List[str] = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) UpperCamelCase :Optional[Any] = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) for key in input_image_proc.keys(): self.assertAlmostEqual(input_image_proc[key].sum() , input_processor[key].sum() , delta=1e-2 ) def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Union[str, Any] = self.get_tokenizer() UpperCamelCase :int = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = '''test''' UpperCamelCase :Optional[int] = processor(text=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = tokenizer(SCREAMING_SNAKE_CASE_ ) for key in encoded_tok.keys(): self.assertListEqual(encoded_tok[key] , encoded_processor[key] ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :List[str] = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = '''test''' UpperCamelCase :str = self.prepare_image_inputs() UpperCamelCase :Dict = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , ['''pixel_values''', '''labels'''] ) # test if it raises when no input is passed with pytest.raises(SCREAMING_SNAKE_CASE_ ): processor() def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Any = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = [[1, 4, 5, 8, 1, 0, 8], [3, 4, 3, 1, 1, 8, 9], [3, 4, 3, 1, 1, 8, 9]] UpperCamelCase :Union[str, Any] = processor.char_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = tokenizer.batch_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = [seq.replace(''' ''' , '''''' ) for seq in decoded_tok] self.assertListEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :List[Any] = self.get_image_processor() UpperCamelCase :Optional[Any] = self.get_tokenizer() UpperCamelCase :Any = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = None UpperCamelCase :List[Any] = self.prepare_image_inputs() UpperCamelCase :Union[str, Any] = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , processor.model_input_names ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Optional[int] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.randn(1 , 27 , 38 ) UpperCamelCase :Union[str, Any] = torch.randn(1 , 27 , 5_0257 ) UpperCamelCase :Optional[Any] = torch.randn(1 , 27 , 3_0522 ) UpperCamelCase :Optional[Any] = processor.batch_decode([char_input, bpe_input, wp_input] ) self.assertListEqual(list(results.keys() ) , ['''generated_text''', '''scores''', '''char_preds''', '''bpe_preds''', '''wp_preds'''] )
259
1
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available __snake_case = { """configuration_tapas""": ["""TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP""", """TapasConfig"""], """tokenization_tapas""": ["""TapasTokenizer"""], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: __snake_case = [ """TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST""", """TapasForMaskedLM""", """TapasForQuestionAnswering""", """TapasForSequenceClassification""", """TapasModel""", """TapasPreTrainedModel""", """load_tf_weights_in_tapas""", ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: __snake_case = [ """TF_TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST""", """TFTapasForMaskedLM""", """TFTapasForQuestionAnswering""", """TFTapasForSequenceClassification""", """TFTapasModel""", """TFTapasPreTrainedModel""", ] if TYPE_CHECKING: from .configuration_tapas import TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP, TapasConfig from .tokenization_tapas import TapasTokenizer try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tapas import ( TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST, TapasForMaskedLM, TapasForQuestionAnswering, TapasForSequenceClassification, TapasModel, TapasPreTrainedModel, load_tf_weights_in_tapas, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_tapas import ( TF_TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST, TFTapasForMaskedLM, TFTapasForQuestionAnswering, TFTapasForSequenceClassification, TFTapasModel, TFTapasPreTrainedModel, ) else: import sys __snake_case = _LazyModule(__name__, globals()["""__file__"""], _import_structure, module_spec=__spec__)
259
import math def _A ( SCREAMING_SNAKE_CASE__ : int = 100 ): UpperCamelCase :Dict = sum(i * i for i in range(1 , n + 1 ) ) UpperCamelCase :List[str] = int(math.pow(sum(range(1 , n + 1 ) ) , 2 ) ) return square_of_sum - sum_of_squares if __name__ == "__main__": print(f'''{solution() = }''')
259
1
import json import sys import tempfile import unittest from pathlib import Path import transformers from transformers import ( CONFIG_MAPPING, IMAGE_PROCESSOR_MAPPING, AutoConfig, AutoImageProcessor, CLIPConfig, CLIPImageProcessor, ) from transformers.testing_utils import DUMMY_UNKNOWN_IDENTIFIER sys.path.append(str(Path(__file__).parent.parent.parent.parent / """utils""")) from test_module.custom_configuration import CustomConfig # noqa E402 from test_module.custom_image_processing import CustomImageProcessor # noqa E402 class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[Any] = 0 def UpperCAmelCase ( self ) -> int: UpperCamelCase :List[Any] = AutoImageProcessor.from_pretrained('''openai/clip-vit-base-patch32''' ) self.assertIsInstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> int: with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Tuple = Path(SCREAMING_SNAKE_CASE_ ) / '''preprocessor_config.json''' UpperCamelCase :List[Any] = Path(SCREAMING_SNAKE_CASE_ ) / '''config.json''' json.dump( {'''image_processor_type''': '''CLIPImageProcessor''', '''processor_class''': '''CLIPProcessor'''} , open(SCREAMING_SNAKE_CASE_ , '''w''' ) , ) json.dump({'''model_type''': '''clip'''} , open(SCREAMING_SNAKE_CASE_ , '''w''' ) ) UpperCamelCase :Optional[int] = AutoImageProcessor.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[int]: # Ensure we can load the image processor from the feature extractor config with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Optional[Any] = Path(SCREAMING_SNAKE_CASE_ ) / '''preprocessor_config.json''' UpperCamelCase :str = Path(SCREAMING_SNAKE_CASE_ ) / '''config.json''' json.dump( {'''feature_extractor_type''': '''CLIPFeatureExtractor''', '''processor_class''': '''CLIPProcessor'''} , open(SCREAMING_SNAKE_CASE_ , '''w''' ) , ) json.dump({'''model_type''': '''clip'''} , open(SCREAMING_SNAKE_CASE_ , '''w''' ) ) UpperCamelCase :str = AutoImageProcessor.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Optional[Any] = CLIPConfig() # Create a dummy config file with image_proceesor_type UpperCamelCase :Optional[int] = Path(SCREAMING_SNAKE_CASE_ ) / '''preprocessor_config.json''' UpperCamelCase :Optional[Any] = Path(SCREAMING_SNAKE_CASE_ ) / '''config.json''' json.dump( {'''image_processor_type''': '''CLIPImageProcessor''', '''processor_class''': '''CLIPProcessor'''} , open(SCREAMING_SNAKE_CASE_ , '''w''' ) , ) json.dump({'''model_type''': '''clip'''} , open(SCREAMING_SNAKE_CASE_ , '''w''' ) ) # remove image_processor_type to make sure config.json alone is enough to load image processor locally UpperCamelCase :List[Any] = AutoImageProcessor.from_pretrained(SCREAMING_SNAKE_CASE_ ).to_dict() config_dict.pop('''image_processor_type''' ) UpperCamelCase :Dict = CLIPImageProcessor(**SCREAMING_SNAKE_CASE_ ) # save in new folder model_config.save_pretrained(SCREAMING_SNAKE_CASE_ ) config.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = AutoImageProcessor.from_pretrained(SCREAMING_SNAKE_CASE_ ) # make sure private variable is not incorrectly saved UpperCamelCase :Optional[int] = json.loads(config.to_json_string() ) self.assertTrue('''_processor_class''' not in dict_as_saved ) self.assertIsInstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Optional[Any] = Path(SCREAMING_SNAKE_CASE_ ) / '''preprocessor_config.json''' json.dump( {'''image_processor_type''': '''CLIPImageProcessor''', '''processor_class''': '''CLIPProcessor'''} , open(SCREAMING_SNAKE_CASE_ , '''w''' ) , ) UpperCamelCase :str = AutoImageProcessor.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: with self.assertRaisesRegex( SCREAMING_SNAKE_CASE_ , '''clip-base is not a local folder and is not a valid model identifier''' ): UpperCamelCase :Dict = AutoImageProcessor.from_pretrained('''clip-base''' ) def UpperCAmelCase ( self ) -> Optional[Any]: with self.assertRaisesRegex( SCREAMING_SNAKE_CASE_ , r'''aaaaaa is not a valid git identifier \(branch name, tag name or commit id\)''' ): UpperCamelCase :List[Any] = AutoImageProcessor.from_pretrained(SCREAMING_SNAKE_CASE_ , revision='''aaaaaa''' ) def UpperCAmelCase ( self ) -> str: with self.assertRaisesRegex( SCREAMING_SNAKE_CASE_ , '''hf-internal-testing/config-no-model does not appear to have a file named preprocessor_config.json.''' , ): UpperCamelCase :Union[str, Any] = AutoImageProcessor.from_pretrained('''hf-internal-testing/config-no-model''' ) def UpperCAmelCase ( self ) -> Dict: # If remote code is not set, we will time out when asking whether to load the model. with self.assertRaises(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :Dict = AutoImageProcessor.from_pretrained('''hf-internal-testing/test_dynamic_image_processor''' ) # If remote code is disabled, we can't load this config. with self.assertRaises(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :Tuple = AutoImageProcessor.from_pretrained( '''hf-internal-testing/test_dynamic_image_processor''' , trust_remote_code=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = AutoImageProcessor.from_pretrained( '''hf-internal-testing/test_dynamic_image_processor''' , trust_remote_code=SCREAMING_SNAKE_CASE_ ) self.assertEqual(image_processor.__class__.__name__ , '''NewImageProcessor''' ) # Test image processor can be reloaded. with tempfile.TemporaryDirectory() as tmp_dir: image_processor.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = AutoImageProcessor.from_pretrained(SCREAMING_SNAKE_CASE_ , trust_remote_code=SCREAMING_SNAKE_CASE_ ) self.assertEqual(reloaded_image_processor.__class__.__name__ , '''NewImageProcessor''' ) def UpperCAmelCase ( self ) -> int: try: AutoConfig.register('''custom''' , SCREAMING_SNAKE_CASE_ ) AutoImageProcessor.register(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Trying to register something existing in the Transformers library will raise an error with self.assertRaises(SCREAMING_SNAKE_CASE_ ): AutoImageProcessor.register(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :List[str] = Path(SCREAMING_SNAKE_CASE_ ) / '''preprocessor_config.json''' UpperCamelCase :Optional[Any] = Path(SCREAMING_SNAKE_CASE_ ) / '''config.json''' json.dump( {'''feature_extractor_type''': '''CLIPFeatureExtractor''', '''processor_class''': '''CLIPProcessor'''} , open(SCREAMING_SNAKE_CASE_ , '''w''' ) , ) json.dump({'''model_type''': '''clip'''} , open(SCREAMING_SNAKE_CASE_ , '''w''' ) ) UpperCamelCase :str = CustomImageProcessor.from_pretrained(SCREAMING_SNAKE_CASE_ ) # Now that the config is registered, it can be used as any other config with the auto-API with tempfile.TemporaryDirectory() as tmp_dir: image_processor.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = AutoImageProcessor.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) finally: if "custom" in CONFIG_MAPPING._extra_content: del CONFIG_MAPPING._extra_content["custom"] if CustomConfig in IMAGE_PROCESSOR_MAPPING._extra_content: del IMAGE_PROCESSOR_MAPPING._extra_content[CustomConfig] def UpperCAmelCase ( self ) -> Optional[Any]: class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[Any] =True try: AutoConfig.register('''custom''' , SCREAMING_SNAKE_CASE_ ) AutoImageProcessor.register(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # If remote code is not set, the default is to use local UpperCamelCase :int = AutoImageProcessor.from_pretrained('''hf-internal-testing/test_dynamic_image_processor''' ) self.assertEqual(image_processor.__class__.__name__ , '''NewImageProcessor''' ) self.assertTrue(image_processor.is_local ) # If remote code is disabled, we load the local one. UpperCamelCase :Dict = AutoImageProcessor.from_pretrained( '''hf-internal-testing/test_dynamic_image_processor''' , trust_remote_code=SCREAMING_SNAKE_CASE_ ) self.assertEqual(image_processor.__class__.__name__ , '''NewImageProcessor''' ) self.assertTrue(image_processor.is_local ) # If remote is enabled, we load from the Hub UpperCamelCase :List[Any] = AutoImageProcessor.from_pretrained( '''hf-internal-testing/test_dynamic_image_processor''' , trust_remote_code=SCREAMING_SNAKE_CASE_ ) self.assertEqual(image_processor.__class__.__name__ , '''NewImageProcessor''' ) self.assertTrue(not hasattr(SCREAMING_SNAKE_CASE_ , '''is_local''' ) ) finally: if "custom" in CONFIG_MAPPING._extra_content: del CONFIG_MAPPING._extra_content["custom"] if CustomConfig in IMAGE_PROCESSOR_MAPPING._extra_content: del IMAGE_PROCESSOR_MAPPING._extra_content[CustomConfig]
259
def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Any = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = [[False for _ in range(m + 1 )] for _ in range(n + 1 )] UpperCamelCase :List[str] = True for i in range(SCREAMING_SNAKE_CASE__ ): for j in range(m + 1 ): if dp[i][j]: if j < m and a[i].upper() == b[j]: UpperCamelCase :List[Any] = True if a[i].islower(): UpperCamelCase :List[Any] = True return dp[n][m] if __name__ == "__main__": import doctest doctest.testmod()
259
1
import re def _A ( SCREAMING_SNAKE_CASE__ : str ): return [char.split() for char in re.split(R'''[^ a-z A-Z 0-9 \s]''' , str_ )] def _A ( SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Any = split_input(str_ ) return "".join( [''''''.join([char.capitalize() for char in sub_str] ) for sub_str in string_split] ) def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : bool , SCREAMING_SNAKE_CASE__ : str ): try: UpperCamelCase :str = split_input(SCREAMING_SNAKE_CASE__ ) if upper: UpperCamelCase :List[Any] = ''''''.join( [ separator.join([char.upper() for char in sub_str] ) for sub_str in string_split ] ) else: UpperCamelCase :Dict = ''''''.join( [ separator.join([char.lower() for char in sub_str] ) for sub_str in string_split ] ) return res_str except IndexError: return "not valid string" def _A ( SCREAMING_SNAKE_CASE__ : str ): return to_simple_case(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : str ): try: UpperCamelCase :Dict = to_simple_case(SCREAMING_SNAKE_CASE__ ) return res_str[0].lower() + res_str[1:] except IndexError: return "not valid string" def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : bool ): return to_complex_case(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''_''' ) def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : bool ): return to_complex_case(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''-''' ) if __name__ == "__main__": __import__("""doctest""").testmod()
259
from math import factorial __snake_case = {str(digit): factorial(digit) for digit in range(10)} def _A ( SCREAMING_SNAKE_CASE__ : int ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''Parameter number must be int''' ) if number < 0: raise ValueError('''Parameter number must be greater than or equal to 0''' ) # Converts number in string to iterate on its digits and adds its factorial. return sum(DIGIT_FACTORIAL[digit] for digit in str(SCREAMING_SNAKE_CASE__ ) ) def _A ( SCREAMING_SNAKE_CASE__ : int = 60 , SCREAMING_SNAKE_CASE__ : int = 1000000 ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) or not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''Parameters chain_length and number_limit must be int''' ) if chain_length <= 0 or number_limit <= 0: raise ValueError( '''Parameters chain_length and number_limit must be greater than 0''' ) # the counter for the chains with the exact desired length UpperCamelCase :Any = 0 # the cached sizes of the previous chains UpperCamelCase :dict[int, int] = {} for start_chain_element in range(1 , SCREAMING_SNAKE_CASE__ ): # The temporary set will contain the elements of the chain UpperCamelCase :List[Any] = set() UpperCamelCase :Any = 0 # Stop computing the chain when you find a cached size, a repeating item or the # length is greater then the desired one. UpperCamelCase :Optional[Any] = start_chain_element while ( chain_element not in chain_sets_lengths and chain_element not in chain_set and chain_set_length <= chain_length ): chain_set.add(SCREAMING_SNAKE_CASE__ ) chain_set_length += 1 UpperCamelCase :List[Any] = digit_factorial_sum(SCREAMING_SNAKE_CASE__ ) if chain_element in chain_sets_lengths: chain_set_length += chain_sets_lengths[chain_element] UpperCamelCase :Any = chain_set_length # If chain contains the exact amount of elements increase the counter if chain_set_length == chain_length: chains_counter += 1 return chains_counter if __name__ == "__main__": import doctest doctest.testmod() print(f'''{solution()}''')
259
1
from __future__ import annotations def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :Union[str, Any] = str(SCREAMING_SNAKE_CASE__ ) return len(SCREAMING_SNAKE_CASE__ ) == 9 and set(SCREAMING_SNAKE_CASE__ ) == set('''123456789''' ) def _A ( ): for base_num in range(9999 , 4999 , -1 ): UpperCamelCase :Any = 100002 * base_num if is_9_pandigital(SCREAMING_SNAKE_CASE__ ): return candidate for base_num in range(333 , 99 , -1 ): UpperCamelCase :List[Any] = 1002003 * base_num if is_9_pandigital(SCREAMING_SNAKE_CASE__ ): return candidate return None if __name__ == "__main__": print(f'''{solution() = }''')
259
import unittest import numpy as np import torch from diffusers import DDIMPipeline, DDIMScheduler, UNetaDModel from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu, slow, torch_device from ..pipeline_params import UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS, UNCONDITIONAL_IMAGE_GENERATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin enable_full_determinism() class UpperCAmelCase_ ( lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : int =DDIMPipeline UpperCamelCase_ : str =UNCONDITIONAL_IMAGE_GENERATION_PARAMS UpperCamelCase_ : str =PipelineTesterMixin.required_optional_params - { 'num_images_per_prompt', 'latents', 'callback', 'callback_steps', } UpperCamelCase_ : Optional[Any] =UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS UpperCamelCase_ : List[str] =False def UpperCAmelCase ( self ) -> Any: torch.manual_seed(0 ) UpperCamelCase :Optional[int] = UNetaDModel( block_out_channels=(32, 64) , layers_per_block=2 , sample_size=32 , in_channels=3 , out_channels=3 , down_block_types=('''DownBlock2D''', '''AttnDownBlock2D''') , up_block_types=('''AttnUpBlock2D''', '''UpBlock2D''') , ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Any = {'''unet''': unet, '''scheduler''': scheduler} return components def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=0 ) -> Any: if str(SCREAMING_SNAKE_CASE_ ).startswith('''mps''' ): UpperCamelCase :List[Any] = torch.manual_seed(SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :List[Any] = torch.Generator(device=SCREAMING_SNAKE_CASE_ ).manual_seed(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = { '''batch_size''': 1, '''generator''': generator, '''num_inference_steps''': 2, '''output_type''': '''numpy''', } return inputs def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Optional[int] = '''cpu''' UpperCamelCase :Union[str, Any] = self.get_dummy_components() UpperCamelCase :Optional[Any] = self.pipeline_class(**SCREAMING_SNAKE_CASE_ ) pipe.to(SCREAMING_SNAKE_CASE_ ) pipe.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = self.get_dummy_inputs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = pipe(**SCREAMING_SNAKE_CASE_ ).images UpperCamelCase :str = image[0, -3:, -3:, -1] self.assertEqual(image.shape , (1, 32, 32, 3) ) UpperCamelCase :Tuple = np.array( [1.000e00, 5.717e-01, 4.717e-01, 1.000e00, 0.000e00, 1.000e00, 3.000e-04, 0.000e00, 9.000e-04] ) UpperCamelCase :List[str] = np.abs(image_slice.flatten() - expected_slice ).max() self.assertLessEqual(SCREAMING_SNAKE_CASE_ , 1e-3 ) def UpperCAmelCase ( self ) -> int: super().test_dict_tuple_outputs_equivalent(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Optional[int]: super().test_save_load_local(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Any: super().test_save_load_optional_components(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> str: super().test_inference_batch_single_identical(expected_max_diff=3e-3 ) @slow @require_torch_gpu class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :int = '''google/ddpm-cifar10-32''' UpperCamelCase :Union[str, Any] = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Tuple = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddim.to(SCREAMING_SNAKE_CASE_ ) ddim.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddim(generator=SCREAMING_SNAKE_CASE_ , eta=0.0 , output_type='''numpy''' ).images UpperCamelCase :int = image[0, -3:, -3:, -1] assert image.shape == (1, 32, 32, 3) UpperCamelCase :Tuple = np.array([0.1723, 0.1617, 0.1600, 0.1626, 0.1497, 0.1513, 0.1505, 0.1442, 0.1453] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2 def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = '''google/ddpm-ema-bedroom-256''' UpperCamelCase :Any = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = DDIMScheduler.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddpm.to(SCREAMING_SNAKE_CASE_ ) ddpm.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddpm(generator=SCREAMING_SNAKE_CASE_ , output_type='''numpy''' ).images UpperCamelCase :Optional[int] = image[0, -3:, -3:, -1] assert image.shape == (1, 256, 256, 3) UpperCamelCase :Dict = np.array([0.0060, 0.0201, 0.0344, 0.0024, 0.0018, 0.0002, 0.0022, 0.0000, 0.0069] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2
259
1
import shutil import tempfile import unittest import numpy as np from transformers.testing_utils import ( is_pt_tf_cross_test, require_tf, require_torch, require_torchvision, require_vision, ) from transformers.utils import is_tf_available, is_torch_available, is_vision_available if is_vision_available(): from PIL import Image from transformers import AutoProcessor, SamImageProcessor, SamProcessor if is_torch_available(): import torch if is_tf_available(): import tensorflow as tf @require_vision @require_torchvision class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Optional[Any] = tempfile.mkdtemp() UpperCamelCase :Optional[Any] = SamImageProcessor() UpperCamelCase :Dict = SamProcessor(SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> Optional[Any]: return AutoProcessor.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ).image_processor def UpperCAmelCase ( self ) -> str: shutil.rmtree(self.tmpdirname ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[str] = [np.random.randint(255 , size=(3, 30, 400) , dtype=np.uinta )] UpperCamelCase :List[str] = [Image.fromarray(np.moveaxis(SCREAMING_SNAKE_CASE_ , 0 , -1 ) ) for x in image_inputs] return image_inputs def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Any = SamProcessor(image_processor=self.get_image_processor() ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :List[Any] = self.get_image_processor(do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) UpperCamelCase :List[Any] = SamProcessor.from_pretrained(self.tmpdirname , do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) self.assertEqual(processor.image_processor.to_json_string() , image_processor_add_kwargs.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: UpperCamelCase :List[Any] = self.get_image_processor() UpperCamelCase :Optional[int] = SamProcessor(image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = self.prepare_image_inputs() UpperCamelCase :int = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) UpperCamelCase :Union[str, Any] = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) input_feat_extract.pop('''original_sizes''' ) # pop original_sizes as it is popped in the processor input_feat_extract.pop('''reshaped_input_sizes''' ) # pop original_sizes as it is popped in the processor for key in input_feat_extract.keys(): self.assertAlmostEqual(input_feat_extract[key].sum() , input_processor[key].sum() , delta=1e-2 ) @require_torch def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = self.get_image_processor() UpperCamelCase :Union[str, Any] = SamProcessor(image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = [torch.ones((1, 3, 5, 5) )] UpperCamelCase :Any = [[1764, 2646]] UpperCamelCase :str = [[683, 1024]] UpperCamelCase :Optional[int] = processor.post_process_masks(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(masks[0].shape , (1, 3, 1764, 2646) ) UpperCamelCase :Dict = processor.post_process_masks( SCREAMING_SNAKE_CASE_ , torch.tensor(SCREAMING_SNAKE_CASE_ ) , torch.tensor(SCREAMING_SNAKE_CASE_ ) ) self.assertEqual(masks[0].shape , (1, 3, 1764, 2646) ) # should also work with np UpperCamelCase :Any = [np.ones((1, 3, 5, 5) )] UpperCamelCase :List[Any] = processor.post_process_masks(SCREAMING_SNAKE_CASE_ , np.array(SCREAMING_SNAKE_CASE_ ) , np.array(SCREAMING_SNAKE_CASE_ ) ) self.assertEqual(masks[0].shape , (1, 3, 1764, 2646) ) UpperCamelCase :int = [[1, 0], [0, 1]] with self.assertRaises(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :Any = processor.post_process_masks(SCREAMING_SNAKE_CASE_ , np.array(SCREAMING_SNAKE_CASE_ ) , np.array(SCREAMING_SNAKE_CASE_ ) ) @require_vision @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = tempfile.mkdtemp() UpperCamelCase :Optional[int] = SamImageProcessor() UpperCamelCase :str = SamProcessor(SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> Optional[Any]: return AutoProcessor.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ).image_processor def UpperCAmelCase ( self ) -> List[Any]: shutil.rmtree(self.tmpdirname ) def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :str = [np.random.randint(255 , size=(3, 30, 400) , dtype=np.uinta )] UpperCamelCase :Union[str, Any] = [Image.fromarray(np.moveaxis(SCREAMING_SNAKE_CASE_ , 0 , -1 ) ) for x in image_inputs] return image_inputs def UpperCAmelCase ( self ) -> int: UpperCamelCase :List[Any] = SamProcessor(image_processor=self.get_image_processor() ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Any = self.get_image_processor(do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) UpperCamelCase :Any = SamProcessor.from_pretrained(self.tmpdirname , do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) self.assertEqual(processor.image_processor.to_json_string() , image_processor_add_kwargs.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = self.get_image_processor() UpperCamelCase :int = SamProcessor(image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = self.prepare_image_inputs() UpperCamelCase :str = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) UpperCamelCase :Union[str, Any] = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) input_feat_extract.pop('''original_sizes''' ) # pop original_sizes as it is popped in the processor input_feat_extract.pop('''reshaped_input_sizes''' ) # pop reshaped_input_sizes as it is popped in the processor for key in input_feat_extract.keys(): self.assertAlmostEqual(input_feat_extract[key].sum() , input_processor[key].sum() , delta=1e-2 ) @require_tf def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Dict = self.get_image_processor() UpperCamelCase :List[str] = SamProcessor(image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = [tf.ones((1, 3, 5, 5) )] UpperCamelCase :Optional[Any] = [[1764, 2646]] UpperCamelCase :int = [[683, 1024]] UpperCamelCase :Optional[Any] = processor.post_process_masks(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_tensors='''tf''' ) self.assertEqual(masks[0].shape , (1, 3, 1764, 2646) ) UpperCamelCase :List[Any] = processor.post_process_masks( SCREAMING_SNAKE_CASE_ , tf.convert_to_tensor(SCREAMING_SNAKE_CASE_ ) , tf.convert_to_tensor(SCREAMING_SNAKE_CASE_ ) , return_tensors='''tf''' , ) self.assertEqual(masks[0].shape , (1, 3, 1764, 2646) ) # should also work with np UpperCamelCase :Optional[Any] = [np.ones((1, 3, 5, 5) )] UpperCamelCase :List[str] = processor.post_process_masks( SCREAMING_SNAKE_CASE_ , np.array(SCREAMING_SNAKE_CASE_ ) , np.array(SCREAMING_SNAKE_CASE_ ) , return_tensors='''tf''' ) self.assertEqual(masks[0].shape , (1, 3, 1764, 2646) ) UpperCamelCase :List[str] = [[1, 0], [0, 1]] with self.assertRaises(tf.errors.InvalidArgumentError ): UpperCamelCase :List[Any] = processor.post_process_masks( SCREAMING_SNAKE_CASE_ , np.array(SCREAMING_SNAKE_CASE_ ) , np.array(SCREAMING_SNAKE_CASE_ ) , return_tensors='''tf''' ) @require_vision @require_torchvision class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[str] = tempfile.mkdtemp() UpperCamelCase :Optional[Any] = SamImageProcessor() UpperCamelCase :Any = SamProcessor(SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> List[Any]: return AutoProcessor.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ).image_processor def UpperCAmelCase ( self ) -> Any: shutil.rmtree(self.tmpdirname ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Dict = [np.random.randint(255 , size=(3, 30, 400) , dtype=np.uinta )] UpperCamelCase :Optional[int] = [Image.fromarray(np.moveaxis(SCREAMING_SNAKE_CASE_ , 0 , -1 ) ) for x in image_inputs] return image_inputs @is_pt_tf_cross_test def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Tuple = self.get_image_processor() UpperCamelCase :List[str] = SamProcessor(image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = np.random.randint(0 , 2 , size=(1, 3, 5, 5) ).astype(np.floataa ) UpperCamelCase :List[str] = [tf.convert_to_tensor(SCREAMING_SNAKE_CASE_ )] UpperCamelCase :List[Any] = [torch.tensor(SCREAMING_SNAKE_CASE_ )] UpperCamelCase :str = [[1764, 2646]] UpperCamelCase :Optional[Any] = [[683, 1024]] UpperCamelCase :Dict = processor.post_process_masks( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_tensors='''tf''' ) UpperCamelCase :Optional[Any] = processor.post_process_masks( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' ) self.assertTrue(np.all(tf_masks[0].numpy() == pt_masks[0].numpy() ) ) @is_pt_tf_cross_test def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[Any] = self.get_image_processor() UpperCamelCase :Tuple = SamProcessor(image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = self.prepare_image_inputs() UpperCamelCase :str = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' )['''pixel_values'''].numpy() UpperCamelCase :Optional[int] = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' )['''pixel_values'''].numpy() UpperCamelCase :List[str] = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''tf''' )['''pixel_values'''].numpy() UpperCamelCase :Optional[Any] = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''tf''' )['''pixel_values'''].numpy() self.assertTrue(np.allclose(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) self.assertTrue(np.allclose(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) self.assertTrue(np.allclose(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) )
259
from json import JSONDecodeError # Workaround for requests.exceptions.JSONDecodeError import requests def _A ( SCREAMING_SNAKE_CASE__ : str = "isbn/0140328726" ): UpperCamelCase :Optional[int] = olid.strip().strip('''/''' ) # Remove leading/trailing whitespace & slashes if new_olid.count('''/''' ) != 1: UpperCamelCase :str = F'''{olid} is not a valid Open Library olid''' raise ValueError(SCREAMING_SNAKE_CASE__ ) return requests.get(F'''https://openlibrary.org/{new_olid}.json''' ).json() def _A ( SCREAMING_SNAKE_CASE__ : dict ): UpperCamelCase :str = { '''title''': '''Title''', '''publish_date''': '''Publish date''', '''authors''': '''Authors''', '''number_of_pages''': '''Number of pages:''', '''first_sentence''': '''First sentence''', '''isbn_10''': '''ISBN (10)''', '''isbn_13''': '''ISBN (13)''', } UpperCamelCase :Optional[Any] = {better_key: ol_book_data[key] for key, better_key in desired_keys.items()} UpperCamelCase :List[str] = [ get_openlibrary_data(author['''key'''] )['''name'''] for author in data['''Authors'''] ] UpperCamelCase :int = data['''First sentence''']['''value'''] for key, value in data.items(): if isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = ''', '''.join(SCREAMING_SNAKE_CASE__ ) return data if __name__ == "__main__": import doctest doctest.testmod() while True: __snake_case = input("""\nEnter the ISBN code to search (or 'quit' to stop): """).strip() if isbn.lower() in ("", "q", "quit", "exit", "stop"): break if len(isbn) not in (10, 13) or not isbn.isdigit(): print(f'''Sorry, {isbn} is not a valid ISBN. Please, input a valid ISBN.''') continue print(f'''\nSearching Open Library for ISBN: {isbn}...\n''') try: __snake_case = summarize_book(get_openlibrary_data(f'''isbn/{isbn}''')) print("""\n""".join(f'''{key}: {value}''' for key, value in book_summary.items())) except JSONDecodeError: # Workaround for requests.exceptions.RequestException: print(f'''Sorry, there are no results for ISBN: {isbn}.''')
259
1
import os import socket from contextlib import contextmanager import torch from ..commands.config.default import write_basic_config # noqa: F401 from ..state import PartialState from .dataclasses import DistributedType from .imports import is_deepspeed_available, is_tpu_available from .transformer_engine import convert_model from .versions import is_torch_version if is_deepspeed_available(): from deepspeed import DeepSpeedEngine if is_tpu_available(check_device=False): import torch_xla.core.xla_model as xm def _A ( SCREAMING_SNAKE_CASE__ : Dict ): if is_torch_version('''<''' , '''2.0.0''' ) or not hasattr(SCREAMING_SNAKE_CASE__ , '''_dynamo''' ): return False return isinstance(SCREAMING_SNAKE_CASE__ , torch._dynamo.eval_frame.OptimizedModule ) def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : bool = True ): UpperCamelCase :int = (torch.nn.parallel.DistributedDataParallel, torch.nn.DataParallel) UpperCamelCase :Dict = is_compiled_module(SCREAMING_SNAKE_CASE__ ) if is_compiled: UpperCamelCase :List[str] = model UpperCamelCase :Dict = model._orig_mod if is_deepspeed_available(): options += (DeepSpeedEngine,) while isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Union[str, Any] = model.module if not keep_fpaa_wrapper: UpperCamelCase :Dict = getattr(SCREAMING_SNAKE_CASE__ , '''forward''' ) UpperCamelCase :Optional[int] = model.__dict__.pop('''_original_forward''' , SCREAMING_SNAKE_CASE__ ) if original_forward is not None: while hasattr(SCREAMING_SNAKE_CASE__ , '''__wrapped__''' ): UpperCamelCase :Union[str, Any] = forward.__wrapped__ if forward == original_forward: break UpperCamelCase :Dict = forward if getattr(SCREAMING_SNAKE_CASE__ , '''_converted_to_transformer_engine''' , SCREAMING_SNAKE_CASE__ ): convert_model(SCREAMING_SNAKE_CASE__ , to_transformer_engine=SCREAMING_SNAKE_CASE__ ) if is_compiled: UpperCamelCase :str = model UpperCamelCase :int = compiled_model return model def _A ( ): PartialState().wait_for_everyone() def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Tuple ): if PartialState().distributed_type == DistributedType.TPU: xm.save(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif PartialState().local_process_index == 0: torch.save(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) @contextmanager def _A ( **SCREAMING_SNAKE_CASE__ : Dict ): for key, value in kwargs.items(): UpperCamelCase :Any = str(SCREAMING_SNAKE_CASE__ ) yield for key in kwargs: if key.upper() in os.environ: del os.environ[key.upper()] def _A ( SCREAMING_SNAKE_CASE__ : int ): if not hasattr(SCREAMING_SNAKE_CASE__ , '''__qualname__''' ) and not hasattr(SCREAMING_SNAKE_CASE__ , '''__name__''' ): UpperCamelCase :Union[str, Any] = getattr(SCREAMING_SNAKE_CASE__ , '''__class__''' , SCREAMING_SNAKE_CASE__ ) if hasattr(SCREAMING_SNAKE_CASE__ , '''__qualname__''' ): return obj.__qualname__ if hasattr(SCREAMING_SNAKE_CASE__ , '''__name__''' ): return obj.__name__ return str(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Tuple ): for key, value in source.items(): if isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :str = destination.setdefault(SCREAMING_SNAKE_CASE__ , {} ) merge_dicts(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :Any = value return destination def _A ( SCREAMING_SNAKE_CASE__ : int = None ): if port is None: UpperCamelCase :Any = 29500 with socket.socket(socket.AF_INET , socket.SOCK_STREAM ) as s: return s.connect_ex(('''localhost''', port) ) == 0
259
import inspect import tempfile import unittest from huggingface_hub import hf_hub_download from transformers import is_torch_available from transformers.testing_utils import is_flaky, require_torch, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin __snake_case = 1E-4 if is_torch_available(): import torch from transformers import AutoformerConfig, AutoformerForPrediction, AutoformerModel from transformers.models.autoformer.modeling_autoformer import AutoformerDecoder, AutoformerEncoder @require_torch class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=14 , SCREAMING_SNAKE_CASE_=10 , SCREAMING_SNAKE_CASE_=19 , SCREAMING_SNAKE_CASE_=5 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=[1, 2, 3, 4, 5] , SCREAMING_SNAKE_CASE_=25 , SCREAMING_SNAKE_CASE_=5 , ) -> str: UpperCamelCase :Any = d_model UpperCamelCase :List[str] = parent UpperCamelCase :List[Any] = batch_size UpperCamelCase :str = prediction_length UpperCamelCase :str = context_length UpperCamelCase :int = cardinality UpperCamelCase :Optional[Any] = num_time_features UpperCamelCase :Optional[Any] = lags_sequence UpperCamelCase :str = embedding_dimension UpperCamelCase :str = is_training UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :Tuple = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :List[Any] = attention_probs_dropout_prob UpperCamelCase :Optional[int] = context_length UpperCamelCase :Tuple = prediction_length + label_length UpperCamelCase :Optional[Any] = label_length UpperCamelCase :Optional[int] = moving_average UpperCamelCase :Union[str, Any] = autocorrelation_factor def UpperCAmelCase ( self ) -> Optional[int]: return AutoformerConfig( d_model=self.d_model , encoder_layers=self.num_hidden_layers , decoder_layers=self.num_hidden_layers , encoder_attention_heads=self.num_attention_heads , decoder_attention_heads=self.num_attention_heads , encoder_ffn_dim=self.intermediate_size , decoder_ffn_dim=self.intermediate_size , dropout=self.hidden_dropout_prob , attention_dropout=self.attention_probs_dropout_prob , prediction_length=self.prediction_length , context_length=self.context_length , label_length=self.label_length , lags_sequence=self.lags_sequence , num_time_features=self.num_time_features , num_static_categorical_features=1 , cardinality=[self.cardinality] , embedding_dimension=[self.embedding_dimension] , moving_average=self.moving_average , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :Optional[Any] = config.context_length + max(config.lags_sequence ) UpperCamelCase :Union[str, Any] = ids_tensor([self.batch_size, 1] , config.cardinality[0] ) UpperCamelCase :List[str] = floats_tensor([self.batch_size, _past_length, config.num_time_features] ) UpperCamelCase :Union[str, Any] = floats_tensor([self.batch_size, _past_length] ) UpperCamelCase :Any = floats_tensor([self.batch_size, _past_length] ) > 0.5 # decoder inputs UpperCamelCase :Tuple = floats_tensor([self.batch_size, config.prediction_length, config.num_time_features] ) UpperCamelCase :int = floats_tensor([self.batch_size, config.prediction_length] ) UpperCamelCase :Union[str, Any] = { '''past_values''': past_values, '''static_categorical_features''': static_categorical_features, '''past_time_features''': past_time_features, '''past_observed_mask''': past_observed_mask, '''future_time_features''': future_time_features, '''future_values''': future_values, } return inputs_dict def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.get_config() UpperCamelCase :Union[str, Any] = self.prepare_autoformer_inputs_dict(SCREAMING_SNAKE_CASE_ ) return config, inputs_dict def UpperCAmelCase ( self ) -> Any: UpperCamelCase , UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() return config, inputs_dict def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[Any]: UpperCamelCase :int = AutoformerModel(config=SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ).eval() UpperCamelCase :Any = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = outputs.encoder_last_hidden_state UpperCamelCase :str = outputs.last_hidden_state with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Any = model.get_encoder() encoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = AutoformerEncoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = model.create_network_inputs(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Tuple = model.decomposition_layer(transformer_inputs[:, : config.context_length, ...] ) UpperCamelCase :Tuple = torch.cat( (transformer_inputs[:, : config.context_length, ...], feature[:, : config.context_length, ...]) , dim=-1 , ) UpperCamelCase :Optional[Any] = encoder(inputs_embeds=SCREAMING_SNAKE_CASE_ )[0] self.parent.assertTrue((encoder_last_hidden_state_a - encoder_last_hidden_state).abs().max().item() < 1e-3 ) UpperCamelCase :Optional[Any] = ( torch.mean(transformer_inputs[:, : config.context_length, ...] , dim=1 ) .unsqueeze(1 ) .repeat(1 , config.prediction_length , 1 ) ) UpperCamelCase :Union[str, Any] = torch.zeros( [transformer_inputs.shape[0], config.prediction_length, transformer_inputs.shape[2]] , device=enc_input.device , ) UpperCamelCase :Tuple = torch.cat( ( torch.cat((seasonal_input[:, -config.label_length :, ...], zeros) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) UpperCamelCase :Optional[Any] = torch.cat( ( torch.cat((trend_input[:, -config.label_length :, ...], mean) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Union[str, Any] = model.get_decoder() decoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = AutoformerDecoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = decoder( trend=SCREAMING_SNAKE_CASE_ , inputs_embeds=SCREAMING_SNAKE_CASE_ , encoder_hidden_states=SCREAMING_SNAKE_CASE_ , )[0] self.parent.assertTrue((last_hidden_state_a - last_hidden_state).abs().max().item() < 1e-3 ) @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[str] =(AutoformerModel, AutoformerForPrediction) if is_torch_available() else () UpperCamelCase_ : List[str] =(AutoformerForPrediction,) if is_torch_available() else () UpperCamelCase_ : Optional[Any] ={'feature-extraction': AutoformerModel} if is_torch_available() else {} UpperCamelCase_ : Any =False UpperCamelCase_ : List[str] =False UpperCamelCase_ : Dict =False UpperCamelCase_ : Dict =False UpperCamelCase_ : int =False UpperCamelCase_ : Optional[int] =False def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = AutoformerModelTester(self ) UpperCamelCase :int = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase , UpperCamelCase :str = self.model_tester.prepare_config_and_inputs() for model_class in self.all_model_classes: UpperCamelCase :Optional[int] = model_class(SCREAMING_SNAKE_CASE_ ) with tempfile.TemporaryDirectory() as tmpdirname: model.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :List[str] = model_class.from_pretrained(SCREAMING_SNAKE_CASE_ , output_loading_info=SCREAMING_SNAKE_CASE_ ) self.assertEqual(info['''missing_keys'''] , [] ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_encoder_decoder_model_standalone(*SCREAMING_SNAKE_CASE_ ) @unittest.skip(reason='''Model has no tokens embeddings''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = inspect.signature(getattr(SCREAMING_SNAKE_CASE_ , '''forward''' ) ) # The main input is the name of the argument after `self` UpperCamelCase :List[str] = list(model_signature.parameters.keys() )[1] self.assertEqual(AutoformerModel.main_input_name , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase , UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Optional[Any] = [ '''past_values''', '''past_time_features''', '''past_observed_mask''', '''static_categorical_features''', '''static_real_features''', '''future_values''', '''future_time_features''', ] if model.__class__.__name__ in ["AutoformerForPrediction"]: expected_arg_names.append('''future_observed_mask''' ) expected_arg_names.extend( [ '''decoder_attention_mask''', '''head_mask''', '''decoder_head_mask''', '''cross_attn_head_mask''', '''encoder_outputs''', '''past_key_values''', '''output_hidden_states''', '''output_attentions''', '''use_cache''', '''return_dict''', ] ) self.assertListEqual(arg_names[: len(SCREAMING_SNAKE_CASE_ )] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :List[Any] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = True UpperCamelCase :Dict = getattr(self.model_tester , '''seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = getattr(self.model_tester , '''decoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = getattr(self.model_tester , '''encoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = getattr(self.model_tester , '''d_model''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = getattr(self.model_tester , '''num_attention_heads''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = d_model // num_attention_heads for model_class in self.all_model_classes: UpperCamelCase :Tuple = True UpperCamelCase :Tuple = False UpperCamelCase :Any = True UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :int = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) # check that output_attentions also work using config del inputs_dict["output_attentions"] UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) UpperCamelCase :List[str] = len(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = 7 if "last_hidden_state" in outputs: correct_outlen += 1 if "trend" in outputs: correct_outlen += 1 if "past_key_values" in outputs: correct_outlen += 1 # past_key_values have been returned if "loss" in outputs: correct_outlen += 1 if "params" in outputs: correct_outlen += 1 self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # decoder attentions UpperCamelCase :Union[str, Any] = outputs.decoder_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(decoder_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # cross attentions UpperCamelCase :Union[str, Any] = outputs.cross_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(cross_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # Check attention is always last and order is fine UpperCamelCase :Any = True UpperCamelCase :int = True UpperCamelCase :Any = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) self.assertEqual(out_len + 2 , len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(self_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) @is_flaky() def UpperCAmelCase ( self ) -> List[Any]: super().test_retain_grad_hidden_states_attentions() def _A ( SCREAMING_SNAKE_CASE__ : int="train-batch.pt" ): UpperCamelCase :Union[str, Any] = hf_hub_download(repo_id='''hf-internal-testing/tourism-monthly-batch''' , filename=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) UpperCamelCase :Tuple = torch.load(SCREAMING_SNAKE_CASE__ , map_location=SCREAMING_SNAKE_CASE__ ) return batch @require_torch @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :int = AutoformerModel.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = prepare_batch() with torch.no_grad(): UpperCamelCase :Optional[Any] = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , future_values=batch['''future_values'''] , future_time_features=batch['''future_time_features'''] , )[0] UpperCamelCase :Union[str, Any] = torch.Size( (64, model.config.prediction_length + model.config.label_length, model.config.feature_size) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = torch.tensor( [[0.3593, -1.3398, 0.6330], [0.2279, 1.5396, -0.1792], [0.0450, 1.3225, -0.2335]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Dict = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , ).encoder_last_hidden_state UpperCamelCase :Union[str, Any] = torch.Size((64, model.config.context_length, model.config.d_model) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = torch.tensor( [[-0.0734, -0.9036, 0.8358], [4.7186, 2.4113, 1.9581], [1.7953, 2.3558, 1.2970]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Tuple = model.generate( static_categorical_features=batch['''static_categorical_features'''] , past_time_features=batch['''past_time_features'''] , past_values=batch['''past_values'''] , future_time_features=batch['''future_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , ) UpperCamelCase :Optional[int] = torch.Size((64, model.config.num_parallel_samples, model.config.prediction_length) ) self.assertEqual(outputs.sequences.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor([3130.6763, 4056.5293, 7053.0786] , device=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = outputs.sequences.mean(dim=1 ) self.assertTrue(torch.allclose(mean_prediction[0, -3:] , SCREAMING_SNAKE_CASE_ , rtol=1e-1 ) )
259
1
import json import os from typing import Optional import numpy as np from ...feature_extraction_utils import BatchFeature from ...processing_utils import ProcessorMixin from ...utils import logging from ...utils.hub import get_file_from_repo from ..auto import AutoTokenizer __snake_case = logging.get_logger(__name__) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : str ='AutoTokenizer' UpperCamelCase_ : int =['tokenizer'] UpperCamelCase_ : Union[str, Any] ={ 'semantic_prompt': 1, 'coarse_prompt': 2, 'fine_prompt': 2, } def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=None ) -> List[Any]: super().__init__(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = speaker_embeddings @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_="speaker_embeddings_path.json" , **SCREAMING_SNAKE_CASE_ ) -> List[Any]: if speaker_embeddings_dict_path is not None: UpperCamelCase :Optional[Any] = get_file_from_repo( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , subfolder=kwargs.pop('''subfolder''' , SCREAMING_SNAKE_CASE_ ) , cache_dir=kwargs.pop('''cache_dir''' , SCREAMING_SNAKE_CASE_ ) , force_download=kwargs.pop('''force_download''' , SCREAMING_SNAKE_CASE_ ) , proxies=kwargs.pop('''proxies''' , SCREAMING_SNAKE_CASE_ ) , resume_download=kwargs.pop('''resume_download''' , SCREAMING_SNAKE_CASE_ ) , local_files_only=kwargs.pop('''local_files_only''' , SCREAMING_SNAKE_CASE_ ) , use_auth_token=kwargs.pop('''use_auth_token''' , SCREAMING_SNAKE_CASE_ ) , revision=kwargs.pop('''revision''' , SCREAMING_SNAKE_CASE_ ) , ) if speaker_embeddings_path is None: logger.warning( F'''`{os.path.join(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ )}` does not exists , no preloaded speaker embeddings will be used - Make sure to provide a correct path to the json dictionnary if wanted, otherwise set `speaker_embeddings_dict_path=None`.''' ) UpperCamelCase :Optional[int] = None else: with open(SCREAMING_SNAKE_CASE_ ) as speaker_embeddings_json: UpperCamelCase :Optional[Any] = json.load(SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :Optional[int] = None UpperCamelCase :Union[str, Any] = AutoTokenizer.from_pretrained(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) return cls(tokenizer=SCREAMING_SNAKE_CASE_ , speaker_embeddings=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_="speaker_embeddings_path.json" , SCREAMING_SNAKE_CASE_="speaker_embeddings" , SCREAMING_SNAKE_CASE_ = False , **SCREAMING_SNAKE_CASE_ , ) -> Any: if self.speaker_embeddings is not None: os.makedirs(os.path.join(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , '''v2''' ) , exist_ok=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = {} UpperCamelCase :Optional[int] = save_directory for prompt_key in self.speaker_embeddings: if prompt_key != "repo_or_path": UpperCamelCase :Optional[Any] = self._load_voice_preset(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = {} for key in self.speaker_embeddings[prompt_key]: np.save( os.path.join( embeddings_dict['''repo_or_path'''] , SCREAMING_SNAKE_CASE_ , F'''{prompt_key}_{key}''' ) , voice_preset[key] , allow_pickle=SCREAMING_SNAKE_CASE_ , ) UpperCamelCase :str = os.path.join(SCREAMING_SNAKE_CASE_ , F'''{prompt_key}_{key}.npy''' ) UpperCamelCase :List[Any] = tmp_dict with open(os.path.join(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) , '''w''' ) as fp: json.dump(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) super().save_pretrained(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :Dict = self.speaker_embeddings[voice_preset] UpperCamelCase :int = {} for key in ["semantic_prompt", "coarse_prompt", "fine_prompt"]: if key not in voice_preset_paths: raise ValueError( F'''Voice preset unrecognized, missing {key} as a key in self.speaker_embeddings[{voice_preset}].''' ) UpperCamelCase :Optional[int] = get_file_from_repo( self.speaker_embeddings.get('''repo_or_path''' , '''/''' ) , voice_preset_paths[key] , subfolder=kwargs.pop('''subfolder''' , SCREAMING_SNAKE_CASE_ ) , cache_dir=kwargs.pop('''cache_dir''' , SCREAMING_SNAKE_CASE_ ) , force_download=kwargs.pop('''force_download''' , SCREAMING_SNAKE_CASE_ ) , proxies=kwargs.pop('''proxies''' , SCREAMING_SNAKE_CASE_ ) , resume_download=kwargs.pop('''resume_download''' , SCREAMING_SNAKE_CASE_ ) , local_files_only=kwargs.pop('''local_files_only''' , SCREAMING_SNAKE_CASE_ ) , use_auth_token=kwargs.pop('''use_auth_token''' , SCREAMING_SNAKE_CASE_ ) , revision=kwargs.pop('''revision''' , SCREAMING_SNAKE_CASE_ ) , ) if path is None: raise ValueError( F'''`{os.path.join(self.speaker_embeddings.get("repo_or_path" , "/" ) , voice_preset_paths[key] )}` does not exists , no preloaded voice preset will be used - Make sure to provide correct paths to the {voice_preset} embeddings.''' ) UpperCamelCase :int = np.load(SCREAMING_SNAKE_CASE_ ) return voice_preset_dict def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ = None ) -> List[str]: for key in ["semantic_prompt", "coarse_prompt", "fine_prompt"]: if key not in voice_preset: raise ValueError(F'''Voice preset unrecognized, missing {key} as a key.''' ) if not isinstance(voice_preset[key] , np.ndarray ): raise ValueError(F'''{key} voice preset must be a {str(self.preset_shape[key] )}D ndarray.''' ) if len(voice_preset[key].shape ) != self.preset_shape[key]: raise ValueError(F'''{key} voice preset must be a {str(self.preset_shape[key] )}D ndarray.''' ) def __call__( self , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_="pt" , SCREAMING_SNAKE_CASE_=256 , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=False , **SCREAMING_SNAKE_CASE_ , ) -> int: if voice_preset is not None and not isinstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ): if ( isinstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) and self.speaker_embeddings is not None and voice_preset in self.speaker_embeddings ): UpperCamelCase :List[str] = self._load_voice_preset(SCREAMING_SNAKE_CASE_ ) else: if isinstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) and not voice_preset.endswith('''.npz''' ): UpperCamelCase :Tuple = voice_preset + '''.npz''' UpperCamelCase :Optional[Any] = np.load(SCREAMING_SNAKE_CASE_ ) if voice_preset is not None: self._validate_voice_preset_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = BatchFeature(data=SCREAMING_SNAKE_CASE_ , tensor_type=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = self.tokenizer( SCREAMING_SNAKE_CASE_ , return_tensors=SCREAMING_SNAKE_CASE_ , padding='''max_length''' , max_length=SCREAMING_SNAKE_CASE_ , return_attention_mask=SCREAMING_SNAKE_CASE_ , return_token_type_ids=SCREAMING_SNAKE_CASE_ , add_special_tokens=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ , ) if voice_preset is not None: UpperCamelCase :int = voice_preset return encoded_text
259
import inspect import logging import os import random import shutil import tempfile import unittest import pytest import torch from torch import nn from torch.utils.data import DataLoader, TensorDataset from accelerate import Accelerator from accelerate.test_utils import execute_subprocess_async, require_cuda from accelerate.utils import ProjectConfiguration, set_seed __snake_case = logging.getLogger(__name__) def _A ( SCREAMING_SNAKE_CASE__ : Dict=2 , SCREAMING_SNAKE_CASE__ : Dict=3 , SCREAMING_SNAKE_CASE__ : Any=16 , SCREAMING_SNAKE_CASE__ : int = 10 , SCREAMING_SNAKE_CASE__ : int = 2 ): def get_dataset(SCREAMING_SNAKE_CASE__ : List[Any] ): UpperCamelCase :Union[str, Any] = torch.randn(batch_size * n_batches , 1 ) return TensorDataset(SCREAMING_SNAKE_CASE__ , a * x + b + 0.1 * torch.randn(batch_size * n_batches , 1 ) ) UpperCamelCase :str = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) return (train_dataloader, valid_dataloader) def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Any=None ): UpperCamelCase :Dict = [] for epoch in range(SCREAMING_SNAKE_CASE__ ): # Train quickly model.train() for batch in dataloader: UpperCamelCase , UpperCamelCase :Optional[Any] = batch UpperCamelCase :int = model(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = torch.nn.functional.mse_loss(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) accelerator.backward(SCREAMING_SNAKE_CASE__ ) optimizer.step() optimizer.zero_grad() rands.append(random.random() ) # Introduce some randomness if scheduler is not None: scheduler.step() return rands class UpperCAmelCase_ ( nn.Module ): """simple docstring""" def __init__( self ) -> str: super().__init__() UpperCamelCase :Optional[int] = nn.Parameter(torch.randn(1 ) ) UpperCamelCase :int = nn.Parameter(torch.randn(1 ) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> int: return x * self.a + self.b class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Dict: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Tuple = ProjectConfiguration(total_limit=1 , project_dir=SCREAMING_SNAKE_CASE_ , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Dict = Accelerator(project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Union[str, Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() # Save second state accelerator.save_state() self.assertEqual(len(os.listdir(accelerator.project_dir ) ) , 1 ) def UpperCAmelCase ( self ) -> str: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[str] = DummyModel() UpperCamelCase :Union[str, Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Dict = dummy_dataloaders() # Train baseline UpperCamelCase :Dict = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial UpperCamelCase :int = os.path.join(SCREAMING_SNAKE_CASE_ , '''initial''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[int] = optimizer.state_dict() UpperCamelCase :Optional[int] = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Any = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :List[Any] = dummy_dataloaders() UpperCamelCase :List[str] = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Tuple = model.a.item(), model.b.item() UpperCamelCase :Tuple = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything UpperCamelCase :Optional[int] = os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoint''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) # Load everything back in and make sure all states work accelerator.load_state(SCREAMING_SNAKE_CASE_ ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Union[str, Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :Optional[int] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :int = dummy_dataloaders() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() ((UpperCamelCase) , (UpperCamelCase)) :List[str] = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() UpperCamelCase :Any = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[int] = model.a.item(), model.b.item() UpperCamelCase :Any = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Union[str, Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Optional[Any] = ProjectConfiguration(iteration=1 , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything accelerator.save_state() # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_1''' ) ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :str = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[Any] = torch.tensor([1, 2, 3] ) UpperCamelCase :Any = torch.tensor([2, 3, 4] ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :Optional[Any] = torch.optim.Adam(net.parameters() ) UpperCamelCase :Optional[Any] = Accelerator() with self.assertRaises(SCREAMING_SNAKE_CASE_ ) as ve: accelerator.register_for_checkpointing(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = str(ve.exception ) self.assertTrue('''Item at index 0''' in message ) self.assertTrue('''Item at index 1''' in message ) self.assertFalse('''Item at index 2''' in message ) self.assertFalse('''Item at index 3''' in message ) def UpperCAmelCase ( self ) -> Any: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :List[str] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase :Any = torch.optim.lr_scheduler.StepLR(SCREAMING_SNAKE_CASE_ , step_size=1 , gamma=0.99 ) UpperCamelCase , UpperCamelCase :Any = dummy_dataloaders() UpperCamelCase :Optional[int] = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :str = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() UpperCamelCase :int = scheduler.state_dict() train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertNotEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) self.assertEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) def UpperCAmelCase ( self ) -> Union[str, Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ , total_limit=2 ) # Train baseline UpperCamelCase :Tuple = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = accelerator.prepare(SCREAMING_SNAKE_CASE_ ) # Save 3 states: for _ in range(11 ): accelerator.save_state() self.assertTrue(not os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_9''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_10''' ) ) ) @require_cuda def UpperCAmelCase ( self ) -> int: UpperCamelCase :int = ['''torchrun''', F'''--nproc_per_node={torch.cuda.device_count()}''', inspect.getfile(self.__class__ )] execute_subprocess_async(SCREAMING_SNAKE_CASE_ , env=os.environ.copy() ) if __name__ == "__main__": __snake_case = """/tmp/accelerate/state_checkpointing""" __snake_case = DummyModel() __snake_case = torch.optim.Adam(params=model.parameters(), lr=1E-3) __snake_case = torch.optim.lr_scheduler.StepLR(optimizer, step_size=1, gamma=0.9_9) __snake_case , __snake_case = dummy_dataloaders() __snake_case = ProjectConfiguration(automatic_checkpoint_naming=True) # Train baseline __snake_case = Accelerator(project_dir=savedir, project_config=project_config, mixed_precision="""no""") if accelerator.process_index == 0: if os.path.exists(savedir): shutil.rmtree(savedir) os.makedirs(savedir) __snake_case , __snake_case , __snake_case , __snake_case , __snake_case = accelerator.prepare( model, optimizer, train_dataloader, valid_dataloader, scheduler ) __snake_case , __snake_case = accelerator.prepare(model, optimizer) train(3, model, train_dataloader, optimizer, accelerator, scheduler) # Check that the intial optimizer is loaded on the GPU for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert param_device.type == accelerator.device.type __snake_case = model.cpu() accelerator.wait_for_everyone() accelerator.save_state() accelerator.wait_for_everyone() # Check CPU state accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""cpu""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == torch.device("""cpu""").type ), f"Loaded optimizer states did not match, expected to be loaded on the CPU but got {param_device}" # Check device state model.to(accelerator.device) accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""on_device""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == accelerator.device.type ), f"Loaded optimizer states did not match, expected to be loaded on {accelerator.device} but got {param_device}" # Check error with pytest.raises(TypeError, match="""Unsupported optimizer map location passed"""): accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""invalid""") accelerator.wait_for_everyone() if accelerator.process_index == 0: shutil.rmtree(savedir) accelerator.wait_for_everyone()
259
1
# DISCLAIMER: This file is strongly influenced by https://github.com/yang-song/score_sde_pytorch import math from dataclasses import dataclass from typing import Optional, Tuple, Union import torch from ..configuration_utils import ConfigMixin, register_to_config from ..utils import BaseOutput, randn_tensor from .scheduling_utils import SchedulerMixin, SchedulerOutput @dataclass class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : torch.FloatTensor UpperCamelCase_ : torch.FloatTensor class UpperCAmelCase_ ( lowercase, lowercase ): """simple docstring""" UpperCamelCase_ : Any =1 @register_to_config def __init__( self , SCREAMING_SNAKE_CASE_ = 2000 , SCREAMING_SNAKE_CASE_ = 0.15 , SCREAMING_SNAKE_CASE_ = 0.01 , SCREAMING_SNAKE_CASE_ = 1348.0 , SCREAMING_SNAKE_CASE_ = 1e-5 , SCREAMING_SNAKE_CASE_ = 1 , ) -> Tuple: # standard deviation of the initial noise distribution UpperCamelCase :Any = sigma_max # setable values UpperCamelCase :Optional[int] = None self.set_sigmas(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None ) -> torch.FloatTensor: return sample def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None ) -> List[str]: UpperCamelCase :Optional[int] = sampling_eps if sampling_eps is not None else self.config.sampling_eps UpperCamelCase :Dict = torch.linspace(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , device=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None ) -> Union[str, Any]: UpperCamelCase :Any = sigma_min if sigma_min is not None else self.config.sigma_min UpperCamelCase :Tuple = sigma_max if sigma_max is not None else self.config.sigma_max UpperCamelCase :Optional[int] = sampling_eps if sampling_eps is not None else self.config.sampling_eps if self.timesteps is None: self.set_timesteps(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = sigma_min * (sigma_max / sigma_min) ** (self.timesteps / sampling_eps) UpperCamelCase :Optional[Any] = torch.exp(torch.linspace(math.log(SCREAMING_SNAKE_CASE_ ) , math.log(SCREAMING_SNAKE_CASE_ ) , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Any = torch.tensor([sigma_min * (sigma_max / sigma_min) ** t for t in self.timesteps] ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> str: return torch.where( timesteps == 0 , torch.zeros_like(t.to(timesteps.device ) ) , self.discrete_sigmas[timesteps - 1].to(timesteps.device ) , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = True , ) -> Union[SdeVeOutput, Tuple]: if self.timesteps is None: raise ValueError( '''`self.timesteps` is not set, you need to run \'set_timesteps\' after creating the scheduler''' ) UpperCamelCase :Any = timestep * torch.ones( sample.shape[0] , device=sample.device ) # torch.repeat_interleave(timestep, sample.shape[0]) UpperCamelCase :int = (timestep * (len(self.timesteps ) - 1)).long() # mps requires indices to be in the same device, so we use cpu as is the default with cuda UpperCamelCase :Any = timesteps.to(self.discrete_sigmas.device ) UpperCamelCase :Dict = self.discrete_sigmas[timesteps].to(sample.device ) UpperCamelCase :Tuple = self.get_adjacent_sigma(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ).to(sample.device ) UpperCamelCase :Any = torch.zeros_like(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = (sigma**2 - adjacent_sigma**2) ** 0.5 # equation 6 in the paper: the model_output modeled by the network is grad_x log pt(x) # also equation 47 shows the analog from SDE models to ancestral sampling methods UpperCamelCase :Any = diffusion.flatten() while len(diffusion.shape ) < len(sample.shape ): UpperCamelCase :int = diffusion.unsqueeze(-1 ) UpperCamelCase :Dict = drift - diffusion**2 * model_output # equation 6: sample noise for the diffusion term of UpperCamelCase :Dict = randn_tensor( sample.shape , layout=sample.layout , generator=SCREAMING_SNAKE_CASE_ , device=sample.device , dtype=sample.dtype ) UpperCamelCase :Optional[Any] = sample - drift # subtract because `dt` is a small negative timestep # TODO is the variable diffusion the correct scaling term for the noise? UpperCamelCase :Dict = prev_sample_mean + diffusion * noise # add impact of diffusion field g if not return_dict: return (prev_sample, prev_sample_mean) return SdeVeOutput(prev_sample=SCREAMING_SNAKE_CASE_ , prev_sample_mean=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = True , ) -> Union[SchedulerOutput, Tuple]: if self.timesteps is None: raise ValueError( '''`self.timesteps` is not set, you need to run \'set_timesteps\' after creating the scheduler''' ) # For small batch sizes, the paper "suggest replacing norm(z) with sqrt(d), where d is the dim. of z" # sample noise for correction UpperCamelCase :int = randn_tensor(sample.shape , layout=sample.layout , generator=SCREAMING_SNAKE_CASE_ ).to(sample.device ) # compute step size from the model_output, the noise, and the snr UpperCamelCase :Optional[int] = torch.norm(model_output.reshape(model_output.shape[0] , -1 ) , dim=-1 ).mean() UpperCamelCase :Union[str, Any] = torch.norm(noise.reshape(noise.shape[0] , -1 ) , dim=-1 ).mean() UpperCamelCase :Optional[Any] = (self.config.snr * noise_norm / grad_norm) ** 2 * 2 UpperCamelCase :Optional[Any] = step_size * torch.ones(sample.shape[0] ).to(sample.device ) # self.repeat_scalar(step_size, sample.shape[0]) # compute corrected sample: model_output term and noise term UpperCamelCase :Optional[Any] = step_size.flatten() while len(step_size.shape ) < len(sample.shape ): UpperCamelCase :Tuple = step_size.unsqueeze(-1 ) UpperCamelCase :Optional[Any] = sample + step_size * model_output UpperCamelCase :List[str] = prev_sample_mean + ((step_size * 2) ** 0.5) * noise if not return_dict: return (prev_sample,) return SchedulerOutput(prev_sample=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , ) -> torch.FloatTensor: # Make sure sigmas and timesteps have the same device and dtype as original_samples UpperCamelCase :Optional[int] = timesteps.to(original_samples.device ) UpperCamelCase :Optional[int] = self.discrete_sigmas.to(original_samples.device )[timesteps] UpperCamelCase :Dict = ( noise * sigmas[:, None, None, None] if noise is not None else torch.randn_like(SCREAMING_SNAKE_CASE_ ) * sigmas[:, None, None, None] ) UpperCamelCase :Dict = noise + original_samples return noisy_samples def __len__( self ) -> Dict: return self.config.num_train_timesteps
259
import numpy as np __snake_case = [ ["""a""", """b""", """c""", """d""", """e"""], ["""f""", """g""", """h""", """i""", """k"""], ["""l""", """m""", """n""", """o""", """p"""], ["""q""", """r""", """s""", """t""", """u"""], ["""v""", """w""", """x""", """y""", """z"""], ] class UpperCAmelCase_ : """simple docstring""" def __init__( self ) -> None: UpperCamelCase :Dict = np.array(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> np.ndarray: UpperCamelCase , UpperCamelCase :Tuple = np.where(letter == self.SQUARE ) UpperCamelCase :List[Any] = np.concatenate([indexa + 1, indexa + 1] ) return indexes def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :int = self.SQUARE[indexa - 1, indexa - 1] return letter def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() UpperCamelCase :int = message.replace(''' ''' , '''''' ) UpperCamelCase :Dict = message.replace('''j''' , '''i''' ) UpperCamelCase :str = np.empty((2, len(SCREAMING_SNAKE_CASE_ )) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Union[str, Any] = numbers[0] UpperCamelCase :Dict = numbers[1] UpperCamelCase :Any = first_step.reshape(2 * len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = int(second_step[numbers_index * 2] ) UpperCamelCase :List[str] = int(second_step[(numbers_index * 2) + 1] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = encoded_message + letter return encoded_message def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() message.replace(''' ''' , '''''' ) UpperCamelCase :Optional[int] = np.empty(2 * len(SCREAMING_SNAKE_CASE_ ) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :List[str] = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Dict = numbers[0] UpperCamelCase :List[str] = numbers[1] UpperCamelCase :int = first_step.reshape((2, len(SCREAMING_SNAKE_CASE_ )) ) UpperCamelCase :Any = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Any = int(second_step[0, numbers_index] ) UpperCamelCase :List[Any] = int(second_step[1, numbers_index] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = decoded_message + letter return decoded_message
259
1
import requests __snake_case = """https://newsapi.org/v1/articles?source=bbc-news&sortBy=top&apiKey=""" def _A ( SCREAMING_SNAKE_CASE__ : str ): # fetching a list of articles in json format UpperCamelCase :Tuple = requests.get(_NEWS_API + bbc_news_api_key ).json() # each article in the list is a dict for i, article in enumerate(bbc_news_page['''articles'''] , 1 ): print(F'''{i}.) {article["title"]}''' ) if __name__ == "__main__": fetch_bbc_news(bbc_news_api_key="""<Your BBC News API key goes here>""")
259
import argparse import collections import numpy as np import torch from flax import traverse_util from tax import checkpoints from transformers import MTaConfig, UMTaEncoderModel, UMTaForConditionalGeneration from transformers.utils import logging logging.set_verbosity_info() def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Tuple ): return params[F'''{prefix}/{prefix}/relpos_bias/rel_embedding'''][:, i, :] def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Any="attention" ): UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/key/kernel'''][:, i, :, :] ) UpperCamelCase :Optional[Any] = k_tmp.reshape(k_tmp.shape[0] , k_tmp.shape[1] * k_tmp.shape[2] ) UpperCamelCase :Optional[int] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/out/kernel'''][:, i, :, :] ) UpperCamelCase :List[Any] = o_tmp.reshape(o_tmp.shape[0] * o_tmp.shape[1] , o_tmp.shape[2] ) UpperCamelCase :Union[str, Any] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/query/kernel'''][:, i, :, :] ) UpperCamelCase :Any = q_tmp.reshape(q_tmp.shape[0] , q_tmp.shape[1] * q_tmp.shape[2] ) UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/value/kernel'''][:, i, :, :] ) UpperCamelCase :str = v_tmp.reshape(v_tmp.shape[0] , v_tmp.shape[1] * v_tmp.shape[2] ) return k, o, q, v def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : List[str]=False ): if split_mlp_wi: UpperCamelCase :List[Any] = params[F'''{prefix}/{prefix}/mlp/wi_0/kernel'''][:, i, :] UpperCamelCase :int = params[F'''{prefix}/{prefix}/mlp/wi_1/kernel'''][:, i, :] UpperCamelCase :str = (wi_a, wi_a) else: UpperCamelCase :Optional[Any] = params[F'''{prefix}/{prefix}/mlp/wi/kernel'''][:, i, :] UpperCamelCase :Optional[int] = params[F'''{prefix}/{prefix}/mlp/wo/kernel'''][:, i, :] return wi, wo def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[int] ): return params[F'''{prefix}/{prefix}/{layer_name}/scale'''][:, i] def _A ( SCREAMING_SNAKE_CASE__ : dict , *, SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : bool , SCREAMING_SNAKE_CASE__ : bool = False ): UpperCamelCase :Tuple = traverse_util.flatten_dict(variables['''target'''] ) UpperCamelCase :List[Any] = {'''/'''.join(SCREAMING_SNAKE_CASE__ ): v for k, v in old.items()} # v1.1 models have a gated GeLU with wi_0 and wi_1 instead of wi UpperCamelCase :int = '''encoder/encoder/mlp/wi_0/kernel''' in old print('''Split MLP:''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = collections.OrderedDict() # Shared embeddings. UpperCamelCase :int = old['''token_embedder/embedding'''] # Encoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :Dict = k.T UpperCamelCase :Optional[Any] = o.T UpperCamelCase :int = q.T UpperCamelCase :Any = v.T # Block i, layer 1 (MLP). UpperCamelCase :Tuple = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Any = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[Any] = wi[0].T UpperCamelCase :Tuple = wi[1].T else: UpperCamelCase :Optional[Any] = wi.T UpperCamelCase :Dict = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :List[str] = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' ).T UpperCamelCase :Optional[Any] = old['''encoder/encoder_norm/scale'''] if not scalable_attention: UpperCamelCase :str = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''encoder''' ).T UpperCamelCase :Any = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''decoder''' ).T if not is_encoder_only: # Decoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :Union[str, Any] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_self_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Dict = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''self_attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :int = k.T UpperCamelCase :Optional[int] = o.T UpperCamelCase :Tuple = q.T UpperCamelCase :List[str] = v.T # Block i, layer 1 (Cross Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_cross_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[Any] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''encoder_decoder_attention''' ) UpperCamelCase :Tuple = layer_norm UpperCamelCase :Optional[Any] = k.T UpperCamelCase :List[str] = o.T UpperCamelCase :List[str] = q.T UpperCamelCase :str = v.T # Block i, layer 2 (MLP). UpperCamelCase :List[str] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Optional[int] = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[str] = wi[0].T UpperCamelCase :str = wi[1].T else: UpperCamelCase :Dict = wi.T UpperCamelCase :Optional[Any] = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :Tuple = tax_relpos_bias_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' ).T UpperCamelCase :Union[str, Any] = old['''decoder/decoder_norm/scale'''] # LM Head (only in v1.1 checkpoints, in v1.0 embeddings are used instead) if "decoder/logits_dense/kernel" in old: UpperCamelCase :Union[str, Any] = old['''decoder/logits_dense/kernel'''].T return new def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : bool ): UpperCamelCase :Optional[int] = collections.OrderedDict([(k, torch.from_numpy(v.copy() )) for (k, v) in converted_params.items()] ) # Add what is missing. if "encoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if not is_encoder_only: if "decoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if "lm_head.weight" not in state_dict: # For old 1.0 models. print('''Using shared word embeddings as lm_head.''' ) UpperCamelCase :List[Any] = state_dict['''shared.weight'''] return state_dict def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Dict = checkpoints.load_tax_checkpoint(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = convert_tax_to_pytorch( SCREAMING_SNAKE_CASE__ , num_layers=config.num_layers , is_encoder_only=SCREAMING_SNAKE_CASE__ , scalable_attention=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = make_state_dict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) model.load_state_dict(SCREAMING_SNAKE_CASE__ , strict=SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : bool = False , SCREAMING_SNAKE_CASE__ : bool = False , ): UpperCamelCase :Any = MTaConfig.from_json_file(SCREAMING_SNAKE_CASE__ ) print(F'''Building PyTorch model from configuration: {config}''' ) # Non-v1.1 checkpoints could also use T5Model, but this works for all. # The v1.0 checkpoints will simply have an LM head that is the word embeddings. if is_encoder_only: UpperCamelCase :List[str] = UMTaEncoderModel(SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :Any = UMTaForConditionalGeneration(SCREAMING_SNAKE_CASE__ ) # Load weights from tf checkpoint load_tax_weights_in_ta(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Save pytorch-model print(F'''Save PyTorch model to {pytorch_dump_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) # Verify that we can load the checkpoint. model.from_pretrained(SCREAMING_SNAKE_CASE__ ) print('''Done''' ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser(description="""Converts a native T5X checkpoint into a PyTorch checkpoint.""") # Required parameters parser.add_argument( """--t5x_checkpoint_path""", default=None, type=str, required=True, help="""Path to the T5X checkpoint.""" ) parser.add_argument( """--config_file""", default=None, type=str, required=True, help="""The config json file corresponding to the pre-trained T5 model.\nThis specifies the model architecture.""", ) parser.add_argument( """--pytorch_dump_path""", default=None, type=str, required=True, help="""Path to the output PyTorch model.""" ) parser.add_argument( """--is_encoder_only""", action="""store_true""", help="""Check if the model is encoder-decoder model""", default=False ) parser.add_argument( """--scalable_attention""", action="""store_true""", help="""Whether the model uses scaled attention (umt5 model)""", default=False, ) __snake_case = parser.parse_args() convert_tax_checkpoint_to_pytorch( args.tax_checkpoint_path, args.config_file, args.pytorch_dump_path, args.is_encoder_only, args.scalable_attention, )
259
1
import unittest from transformers import TrOCRConfig from transformers.testing_utils import is_torch_available, require_torch, torch_device from ...generation.test_utils import GenerationTesterMixin from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from transformers.models.trocr.modeling_trocr import TrOCRDecoder, TrOCRForCausalLM @require_torch class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=99 , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=30 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_=1 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=None , ) -> List[Any]: UpperCamelCase :Union[str, Any] = parent UpperCamelCase :Dict = batch_size UpperCamelCase :Dict = decoder_seq_length # For common tests UpperCamelCase :str = self.decoder_seq_length UpperCamelCase :Optional[Any] = is_training UpperCamelCase :int = use_attention_mask UpperCamelCase :List[Any] = use_labels UpperCamelCase :Optional[Any] = vocab_size UpperCamelCase :List[Any] = d_model UpperCamelCase :List[str] = d_model UpperCamelCase :List[Any] = decoder_layers UpperCamelCase :Any = decoder_layers UpperCamelCase :Any = decoder_ffn_dim UpperCamelCase :Optional[int] = decoder_attention_heads UpperCamelCase :Dict = decoder_attention_heads UpperCamelCase :Optional[Any] = eos_token_id UpperCamelCase :List[str] = bos_token_id UpperCamelCase :Optional[int] = pad_token_id UpperCamelCase :List[str] = decoder_start_token_id UpperCamelCase :Optional[Any] = use_cache UpperCamelCase :Tuple = max_position_embeddings UpperCamelCase :Tuple = None UpperCamelCase :Optional[int] = decoder_seq_length UpperCamelCase :Any = 2 UpperCamelCase :int = 1 def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :int = ids_tensor([self.batch_size, self.decoder_seq_length] , self.vocab_size ) UpperCamelCase :int = None if self.use_attention_mask: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size, self.decoder_seq_length] , vocab_size=2 ) UpperCamelCase :Tuple = None if self.use_labels: UpperCamelCase :Any = ids_tensor([self.batch_size, self.decoder_seq_length] , self.vocab_size ) UpperCamelCase :Optional[Any] = TrOCRConfig( vocab_size=self.vocab_size , d_model=self.d_model , decoder_layers=self.decoder_layers , decoder_ffn_dim=self.decoder_ffn_dim , decoder_attention_heads=self.decoder_attention_heads , eos_token_id=self.eos_token_id , bos_token_id=self.bos_token_id , use_cache=self.use_cache , pad_token_id=self.pad_token_id , decoder_start_token_id=self.decoder_start_token_id , max_position_embeddings=self.max_position_embeddings , ) return (config, input_ids, attention_mask, lm_labels) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , ) -> int: UpperCamelCase :List[Any] = True UpperCamelCase :Any = TrOCRDecoder(config=SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ).eval() UpperCamelCase :int = input_ids[:2] input_ids[input_ids == 0] += 1 # first forward pass UpperCamelCase :Optional[Any] = model(SCREAMING_SNAKE_CASE_ , use_cache=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = model(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = model(SCREAMING_SNAKE_CASE_ , use_cache=SCREAMING_SNAKE_CASE_ ) self.parent.assertTrue(len(SCREAMING_SNAKE_CASE_ ) == len(SCREAMING_SNAKE_CASE_ ) ) self.parent.assertTrue(len(SCREAMING_SNAKE_CASE_ ) == len(SCREAMING_SNAKE_CASE_ ) + 1 ) UpperCamelCase :str = outputs['''past_key_values'''] # create hypothetical next token and extent to next_input_ids UpperCamelCase :Union[str, Any] = ids_tensor((2, 1) , config.vocab_size - 1 ) + 1 # append to next input_ids and UpperCamelCase :Optional[int] = torch.cat([input_ids, next_tokens] , dim=-1 ) UpperCamelCase :str = model(SCREAMING_SNAKE_CASE_ )['''last_hidden_state'''] UpperCamelCase :int = model(SCREAMING_SNAKE_CASE_ , past_key_values=SCREAMING_SNAKE_CASE_ )['''last_hidden_state'''] # select random slice UpperCamelCase :Any = ids_tensor((1,) , output_from_past.shape[-1] ).item() UpperCamelCase :Tuple = output_from_no_past[:, next_input_ids.shape[-1] - 1, random_slice_idx].detach() UpperCamelCase :Optional[int] = output_from_past[:, 0, random_slice_idx].detach() # test that outputs are equal for slice assert torch.allclose(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=1e-3 ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Union[str, Any] = config_and_inputs UpperCamelCase :List[str] = {'''input_ids''': input_ids, '''attention_mask''': attention_mask} return config, inputs_dict @require_torch class UpperCAmelCase_ ( lowercase, lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Optional[Any] =(TrOCRDecoder, TrOCRForCausalLM) if is_torch_available() else () UpperCamelCase_ : Any =(TrOCRForCausalLM,) if is_torch_available() else () UpperCamelCase_ : List[Any] ={'text-generation': TrOCRForCausalLM} if is_torch_available() else {} UpperCamelCase_ : Union[str, Any] =True UpperCamelCase_ : Any =False def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Union[str, Any] = TrOCRStandaloneDecoderModelTester(self , is_training=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: pass def UpperCAmelCase ( self ) -> List[str]: pass def UpperCAmelCase ( self ) -> List[str]: pass def UpperCAmelCase ( self ) -> Dict: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_decoder_model_past(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: return @unittest.skip('''The model doesn\'t support left padding''' ) # and it's not used enough to be worth fixing :) def UpperCAmelCase ( self ) -> Optional[int]: pass
259
def _A ( SCREAMING_SNAKE_CASE__ : list[int] , SCREAMING_SNAKE_CASE__ : list[int] ): UpperCamelCase :Tuple = len(SCREAMING_SNAKE_CASE__ ) print('''The following activities are selected:''' ) # The first activity is always selected UpperCamelCase :Dict = 0 print(SCREAMING_SNAKE_CASE__ , end=''',''' ) # Consider rest of the activities for j in range(SCREAMING_SNAKE_CASE__ ): # If this activity has start time greater than # or equal to the finish time of previously # selected activity, then select it if start[j] >= finish[i]: print(SCREAMING_SNAKE_CASE__ , end=''',''' ) UpperCamelCase :List[str] = j if __name__ == "__main__": import doctest doctest.testmod() __snake_case = [1, 3, 0, 5, 8, 5] __snake_case = [2, 4, 6, 7, 9, 9] print_max_activities(start, finish)
259
1
import unittest import numpy as np import torch from diffusers import DDIMPipeline, DDIMScheduler, UNetaDModel from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu, slow, torch_device from ..pipeline_params import UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS, UNCONDITIONAL_IMAGE_GENERATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin enable_full_determinism() class UpperCAmelCase_ ( lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : int =DDIMPipeline UpperCamelCase_ : str =UNCONDITIONAL_IMAGE_GENERATION_PARAMS UpperCamelCase_ : str =PipelineTesterMixin.required_optional_params - { 'num_images_per_prompt', 'latents', 'callback', 'callback_steps', } UpperCamelCase_ : Optional[Any] =UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS UpperCamelCase_ : List[str] =False def UpperCAmelCase ( self ) -> Any: torch.manual_seed(0 ) UpperCamelCase :Optional[int] = UNetaDModel( block_out_channels=(32, 64) , layers_per_block=2 , sample_size=32 , in_channels=3 , out_channels=3 , down_block_types=('''DownBlock2D''', '''AttnDownBlock2D''') , up_block_types=('''AttnUpBlock2D''', '''UpBlock2D''') , ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Any = {'''unet''': unet, '''scheduler''': scheduler} return components def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=0 ) -> Any: if str(SCREAMING_SNAKE_CASE_ ).startswith('''mps''' ): UpperCamelCase :List[Any] = torch.manual_seed(SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :List[Any] = torch.Generator(device=SCREAMING_SNAKE_CASE_ ).manual_seed(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = { '''batch_size''': 1, '''generator''': generator, '''num_inference_steps''': 2, '''output_type''': '''numpy''', } return inputs def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Optional[int] = '''cpu''' UpperCamelCase :Union[str, Any] = self.get_dummy_components() UpperCamelCase :Optional[Any] = self.pipeline_class(**SCREAMING_SNAKE_CASE_ ) pipe.to(SCREAMING_SNAKE_CASE_ ) pipe.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = self.get_dummy_inputs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = pipe(**SCREAMING_SNAKE_CASE_ ).images UpperCamelCase :str = image[0, -3:, -3:, -1] self.assertEqual(image.shape , (1, 32, 32, 3) ) UpperCamelCase :Tuple = np.array( [1.000e00, 5.717e-01, 4.717e-01, 1.000e00, 0.000e00, 1.000e00, 3.000e-04, 0.000e00, 9.000e-04] ) UpperCamelCase :List[str] = np.abs(image_slice.flatten() - expected_slice ).max() self.assertLessEqual(SCREAMING_SNAKE_CASE_ , 1e-3 ) def UpperCAmelCase ( self ) -> int: super().test_dict_tuple_outputs_equivalent(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Optional[int]: super().test_save_load_local(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Any: super().test_save_load_optional_components(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> str: super().test_inference_batch_single_identical(expected_max_diff=3e-3 ) @slow @require_torch_gpu class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :int = '''google/ddpm-cifar10-32''' UpperCamelCase :Union[str, Any] = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Tuple = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddim.to(SCREAMING_SNAKE_CASE_ ) ddim.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddim(generator=SCREAMING_SNAKE_CASE_ , eta=0.0 , output_type='''numpy''' ).images UpperCamelCase :int = image[0, -3:, -3:, -1] assert image.shape == (1, 32, 32, 3) UpperCamelCase :Tuple = np.array([0.1723, 0.1617, 0.1600, 0.1626, 0.1497, 0.1513, 0.1505, 0.1442, 0.1453] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2 def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = '''google/ddpm-ema-bedroom-256''' UpperCamelCase :Any = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = DDIMScheduler.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddpm.to(SCREAMING_SNAKE_CASE_ ) ddpm.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddpm(generator=SCREAMING_SNAKE_CASE_ , output_type='''numpy''' ).images UpperCamelCase :Optional[int] = image[0, -3:, -3:, -1] assert image.shape == (1, 256, 256, 3) UpperCamelCase :Dict = np.array([0.0060, 0.0201, 0.0344, 0.0024, 0.0018, 0.0002, 0.0022, 0.0000, 0.0069] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2
259
import copy import os from typing import Union from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """microsoft/git-base""": """https://huggingface.co/microsoft/git-base/resolve/main/config.json""", } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Dict ='git_vision_model' def __init__( self , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=224 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_="quick_gelu" , SCREAMING_SNAKE_CASE_=1e-5 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.02 , **SCREAMING_SNAKE_CASE_ , ) -> Tuple: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :Dict = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :List[str] = num_channels UpperCamelCase :Optional[int] = patch_size UpperCamelCase :Optional[int] = image_size UpperCamelCase :List[Any] = initializer_range UpperCamelCase :Union[str, Any] = attention_dropout UpperCamelCase :Tuple = layer_norm_eps UpperCamelCase :Optional[Any] = hidden_act @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> "PretrainedConfig": cls._set_token_in_kwargs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Dict = cls.get_config_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) # get the vision config dict if we are loading from GITConfig if config_dict.get('''model_type''' ) == "git": UpperCamelCase :Tuple = config_dict['''vision_config'''] if "model_type" in config_dict and hasattr(cls , '''model_type''' ) and config_dict["model_type"] != cls.model_type: logger.warning( F'''You are using a model of type {config_dict["model_type"]} to instantiate a model of type ''' F'''{cls.model_type}. This is not supported for all configurations of models and can yield errors.''' ) return cls.from_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Optional[Any] ='git' def __init__( self , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=3_0522 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=6 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=1024 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-12 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_="absolute" , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=101 , SCREAMING_SNAKE_CASE_=102 , SCREAMING_SNAKE_CASE_=None , **SCREAMING_SNAKE_CASE_ , ) -> int: super().__init__(bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , pad_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) if vision_config is None: UpperCamelCase :Tuple = {} logger.info('''vision_config is None. initializing the GitVisionConfig with default values.''' ) UpperCamelCase :Union[str, Any] = GitVisionConfig(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = vocab_size UpperCamelCase :Optional[Any] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :List[Any] = num_attention_heads UpperCamelCase :Dict = hidden_act UpperCamelCase :List[str] = intermediate_size UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :Optional[int] = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = max_position_embeddings UpperCamelCase :Tuple = initializer_range UpperCamelCase :Any = layer_norm_eps UpperCamelCase :int = position_embedding_type UpperCamelCase :Dict = use_cache UpperCamelCase :Tuple = tie_word_embeddings UpperCamelCase :Union[str, Any] = num_image_with_embedding UpperCamelCase :Optional[int] = bos_token_id UpperCamelCase :List[Any] = eos_token_id def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Union[str, Any] = copy.deepcopy(self.__dict__ ) UpperCamelCase :Optional[int] = self.vision_config.to_dict() UpperCamelCase :int = self.__class__.model_type return output
259
1
import string def _A ( SCREAMING_SNAKE_CASE__ : str ): for key in range(len(string.ascii_uppercase ) ): UpperCamelCase :str = '''''' for symbol in message: if symbol in string.ascii_uppercase: UpperCamelCase :List[str] = string.ascii_uppercase.find(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = num - key if num < 0: UpperCamelCase :str = num + len(string.ascii_uppercase ) UpperCamelCase :Dict = translated + string.ascii_uppercase[num] else: UpperCamelCase :Optional[Any] = translated + symbol print(F'''Decryption using Key #{key}: {translated}''' ) def _A ( ): UpperCamelCase :int = input('''Encrypted message: ''' ) UpperCamelCase :Optional[int] = message.upper() decrypt(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": import doctest doctest.testmod() main()
259
import time from contextlib import contextmanager from pathlib import Path import pytest import requests from huggingface_hub.hf_api import HfApi, HfFolder __snake_case = """__DUMMY_TRANSFORMERS_USER__""" __snake_case = """Dummy User""" __snake_case = """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""" __snake_case = """https://hub-ci.huggingface.co""" __snake_case = CI_HUB_ENDPOINT + """/datasets/{repo_id}/resolve/{revision}/{path}""" __snake_case = CI_HUB_ENDPOINT + """/{repo_id}/resolve/{revision}/{filename}""" __snake_case = Path("""~/.huggingface/hub_ci_token""").expanduser() @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): monkeypatch.setattr( '''huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any ): monkeypatch.setattr('''datasets.config.HF_ENDPOINT''' , SCREAMING_SNAKE_CASE__ ) monkeypatch.setattr('''datasets.config.HUB_DATASETS_URL''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): monkeypatch.setattr('''huggingface_hub.hf_api.HfFolder.path_token''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : List[Any] ): HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield HfFolder.delete_token() @pytest.fixture(scope='''session''' ) def _A ( ): return HfApi(endpoint=SCREAMING_SNAKE_CASE__ ) @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi ): UpperCamelCase :Tuple = HfFolder.get_token() HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield CI_HUB_USER_TOKEN if previous_token is not None: HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Dict ): def _cleanup_repo(SCREAMING_SNAKE_CASE__ : Tuple ): hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) return _cleanup_repo @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): @contextmanager def _temporary_repo(SCREAMING_SNAKE_CASE__ : Any ): try: yield repo_id finally: cleanup_repo(SCREAMING_SNAKE_CASE__ ) return _temporary_repo @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :Union[str, Any] = F'''repo_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :int = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data/text_data.txt''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Dict ): return hf_private_dataset_repo_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Optional[int] = F'''repo_zipped_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Any = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str] ): return hf_private_dataset_repo_zipped_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Dict = F'''repo_zipped_img_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Dict = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple ): return hf_private_dataset_repo_zipped_img_data_
259
1
import argparse import pickle import numpy as np import torch from torch import nn from transformers import ReformerConfig, ReformerModelWithLMHead from transformers.utils import logging logging.set_verbosity_info() def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Union[str, Any]=None ): # set parameter of one layer assert torch_layer.weight.shape == weight.shape, F'''{torch_layer} layer.weight does not match''' UpperCamelCase :str = nn.Parameter(SCREAMING_SNAKE_CASE__ ) if bias is not None: assert torch_layer.bias.shape == bias.shape, F'''{torch_layer} layer.bias does not match''' UpperCamelCase :Tuple = nn.Parameter(SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : int ): # set torch weights for 1-to-1 comparison UpperCamelCase :Optional[int] = np.asarray(weights[0] ) UpperCamelCase :Tuple = np.asarray(weights[1] ) UpperCamelCase :Optional[Any] = np.asarray(weights[2] ) set_param( torch_layer.self_attention.query_key , torch.tensor(SCREAMING_SNAKE_CASE__ ).transpose(1 , 2 ).contiguous().view(-1 , SCREAMING_SNAKE_CASE__ ) , ) set_param( torch_layer.self_attention.value , torch.tensor(SCREAMING_SNAKE_CASE__ ).transpose(1 , 2 ).contiguous().view(-1 , SCREAMING_SNAKE_CASE__ ) , ) set_param( torch_layer.output.dense , torch.tensor(SCREAMING_SNAKE_CASE__ ).view(-1 , SCREAMING_SNAKE_CASE__ ).contiguous().transpose(0 , 1 ) , ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Tuple ): # set torch weights for 1-to-1 comparison UpperCamelCase :int = np.asarray(weights[0] ) UpperCamelCase :Any = np.asarray(weights[1] ) UpperCamelCase :List[str] = np.asarray(weights[2] ) UpperCamelCase :List[str] = np.asarray(weights[3] ) set_param( torch_layer.self_attention.query , torch.tensor(SCREAMING_SNAKE_CASE__ ).transpose(1 , 2 ).contiguous().view(-1 , SCREAMING_SNAKE_CASE__ ) , ) set_param( torch_layer.self_attention.key , torch.tensor(SCREAMING_SNAKE_CASE__ ).transpose(1 , 2 ).contiguous().view(-1 , SCREAMING_SNAKE_CASE__ ) , ) set_param( torch_layer.self_attention.value , torch.tensor(SCREAMING_SNAKE_CASE__ ).transpose(1 , 2 ).contiguous().view(-1 , SCREAMING_SNAKE_CASE__ ) , ) set_param( torch_layer.output.dense , torch.tensor(SCREAMING_SNAKE_CASE__ ).view(-1 , SCREAMING_SNAKE_CASE__ ).contiguous().transpose(0 , 1 ) , ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : List[str] ): # layernorm 1 UpperCamelCase :List[Any] = weights[0][0][0] UpperCamelCase :Union[str, Any] = np.asarray(layer_norm_a[0] ) UpperCamelCase :str = np.asarray(layer_norm_a[1] ) set_param( torch_block.attention.layer_norm , torch.tensor(SCREAMING_SNAKE_CASE__ ) , torch.tensor(SCREAMING_SNAKE_CASE__ ) , ) # lsh weights + output UpperCamelCase :int = weights[0][1] if len(SCREAMING_SNAKE_CASE__ ) < 4: set_layer_weights_in_torch_lsh(SCREAMING_SNAKE_CASE__ , torch_block.attention , SCREAMING_SNAKE_CASE__ ) else: set_layer_weights_in_torch_local(SCREAMING_SNAKE_CASE__ , torch_block.attention , SCREAMING_SNAKE_CASE__ ) # intermediate weighs UpperCamelCase :Dict = weights[2][0][1][2] # Chunked Feed Forward if len(SCREAMING_SNAKE_CASE__ ) == 4: UpperCamelCase :List[str] = intermediate_weights[2] # layernorm 2 UpperCamelCase :Dict = np.asarray(intermediate_weights[0][0] ) UpperCamelCase :Optional[Any] = np.asarray(intermediate_weights[0][1] ) set_param( torch_block.feed_forward.layer_norm , torch.tensor(SCREAMING_SNAKE_CASE__ ) , torch.tensor(SCREAMING_SNAKE_CASE__ ) , ) # intermediate dense UpperCamelCase :str = np.asarray(intermediate_weights[1][0] ) UpperCamelCase :Tuple = np.asarray(intermediate_weights[1][1] ) set_param( torch_block.feed_forward.dense.dense , torch.tensor(SCREAMING_SNAKE_CASE__ ).transpose(0 , 1 ).contiguous() , torch.tensor(SCREAMING_SNAKE_CASE__ ) , ) # intermediate out UpperCamelCase :List[str] = np.asarray(intermediate_weights[4][0] ) UpperCamelCase :Optional[int] = np.asarray(intermediate_weights[4][1] ) set_param( torch_block.feed_forward.output.dense , torch.tensor(SCREAMING_SNAKE_CASE__ ).transpose(0 , 1 ).contiguous() , torch.tensor(SCREAMING_SNAKE_CASE__ ) , ) def _A ( SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[int] ): # reformer model UpperCamelCase :Any = torch_model.reformer # word embeds UpperCamelCase :List[str] = np.asarray(weights[1] ) set_param( torch_model_reformer.embeddings.word_embeddings , torch.tensor(SCREAMING_SNAKE_CASE__ ) , ) if isinstance(weights[3] , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Union[str, Any] = torch_model_reformer.embeddings.position_embeddings for emb_idx in range(len(position_embeddings.weights ) ): UpperCamelCase :Any = np.asarray(weights[3][emb_idx][0] ) assert ( position_embeddings.weights[emb_idx].shape == emb_weights.shape ), F'''{position_embeddings[emb_idx]} emb does not match''' UpperCamelCase :List[Any] = nn.Parameter(torch.tensor(SCREAMING_SNAKE_CASE__ ) ) UpperCamelCase :str = weights[5] assert len(torch_model_reformer.encoder.layers ) * 4 == len( SCREAMING_SNAKE_CASE__ ), "HF and trax model do not have the same number of layers" for layer_idx, layer in enumerate(torch_model_reformer.encoder.layers ): UpperCamelCase :List[Any] = trax_layer_weights[4 * layer_idx : 4 * (layer_idx + 1)] set_block_weights_in_torch(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # output layer norm UpperCamelCase :Union[str, Any] = np.asarray(weights[7][0] ) UpperCamelCase :Union[str, Any] = np.asarray(weights[7][1] ) set_param( torch_model_reformer.encoder.layer_norm , torch.tensor(SCREAMING_SNAKE_CASE__ ) , torch.tensor(SCREAMING_SNAKE_CASE__ ) , ) # output embeddings UpperCamelCase :Optional[int] = np.asarray(weights[9][0] ) UpperCamelCase :str = np.asarray(weights[9][1] ) set_param( torch_model.lm_head.decoder , torch.tensor(SCREAMING_SNAKE_CASE__ ).transpose(0 , 1 ).contiguous() , torch.tensor(SCREAMING_SNAKE_CASE__ ) , ) def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : int ): # Initialise PyTorch model UpperCamelCase :Union[str, Any] = ReformerConfig.from_json_file(SCREAMING_SNAKE_CASE__ ) print(F'''Building PyTorch model from configuration: {config}''' ) UpperCamelCase :int = ReformerModelWithLMHead(SCREAMING_SNAKE_CASE__ ) with open(SCREAMING_SNAKE_CASE__ , '''rb''' ) as f: UpperCamelCase :Optional[int] = pickle.load(SCREAMING_SNAKE_CASE__ )['''weights'''] set_model_weights_in_torch(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , config.hidden_size ) # Save pytorch-model print(F'''Save PyTorch model to {pytorch_dump_path}''' ) torch.save(model.state_dict() , SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--trax_model_pkl_path""", default=None, type=str, required=True, help="""Path to the TensorFlow checkpoint path.""" ) parser.add_argument( """--config_file""", default=None, type=str, required=True, help=( """The config json file corresponding to the pre-trained Reformer model. \n""" """This specifies the model architecture.""" ), ) parser.add_argument( """--pytorch_dump_path""", default=None, type=str, required=True, help="""Path to the output PyTorch model.""" ) __snake_case = parser.parse_args() convert_trax_checkpoint_to_pytorch(args.trax_model_pkl_path, args.config_file, args.pytorch_dump_path)
259
from __future__ import annotations import unittest from transformers import RoFormerConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask from ...test_pipeline_mixin import PipelineTesterMixin if is_tf_available(): import tensorflow as tf from transformers import ( TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerModel, ) from transformers.models.roformer.modeling_tf_roformer import ( TFRoFormerSelfAttention, TFRoFormerSinusoidalPositionalEmbedding, ) class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=99 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=512 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=None , ) -> Dict: UpperCamelCase :Any = parent UpperCamelCase :Dict = 13 UpperCamelCase :List[Any] = 7 UpperCamelCase :List[Any] = True UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = True UpperCamelCase :List[str] = True UpperCamelCase :Dict = 99 UpperCamelCase :Any = 32 UpperCamelCase :Tuple = 2 UpperCamelCase :Union[str, Any] = 4 UpperCamelCase :List[str] = 37 UpperCamelCase :Dict = '''gelu''' UpperCamelCase :Dict = 0.1 UpperCamelCase :Tuple = 0.1 UpperCamelCase :Dict = 512 UpperCamelCase :str = 16 UpperCamelCase :Optional[Any] = 2 UpperCamelCase :Dict = 0.02 UpperCamelCase :Optional[int] = 3 UpperCamelCase :int = 4 UpperCamelCase :Dict = None def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size, self.seq_length] , self.vocab_size ) UpperCamelCase :Optional[int] = None if self.use_input_mask: UpperCamelCase :Dict = random_attention_mask([self.batch_size, self.seq_length] ) UpperCamelCase :Dict = None if self.use_token_type_ids: UpperCamelCase :List[Any] = ids_tensor([self.batch_size, self.seq_length] , self.type_vocab_size ) UpperCamelCase :Union[str, Any] = None UpperCamelCase :Optional[int] = None UpperCamelCase :Any = None if self.use_labels: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size] , self.type_sequence_label_size ) UpperCamelCase :Optional[Any] = ids_tensor([self.batch_size, self.seq_length] , self.num_labels ) UpperCamelCase :int = ids_tensor([self.batch_size] , self.num_choices ) UpperCamelCase :Union[str, Any] = RoFormerConfig( vocab_size=self.vocab_size , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , max_position_embeddings=self.max_position_embeddings , type_vocab_size=self.type_vocab_size , initializer_range=self.initializer_range , return_dict=SCREAMING_SNAKE_CASE_ , ) return config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[Any] = TFRoFormerModel(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = {'''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids} UpperCamelCase :int = [input_ids, input_mask] UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = True UpperCamelCase :Union[str, Any] = TFRoFormerForCausalLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Any = model(SCREAMING_SNAKE_CASE_ )['''logits'''] self.parent.assertListEqual( list(prediction_scores.numpy().shape ) , [self.batch_size, self.seq_length, self.vocab_size] ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :str = TFRoFormerForMaskedLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.vocab_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :List[Any] = self.num_labels UpperCamelCase :int = TFRoFormerForSequenceClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Optional[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = self.num_choices UpperCamelCase :Any = TFRoFormerForMultipleChoice(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :int = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :Any = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :List[Any] = { '''input_ids''': multiple_choice_inputs_ids, '''attention_mask''': multiple_choice_input_mask, '''token_type_ids''': multiple_choice_token_type_ids, } UpperCamelCase :Dict = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_choices) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :Union[str, Any] = self.num_labels UpperCamelCase :Dict = TFRoFormerForTokenClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Tuple = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :Union[str, Any] = TFRoFormerForQuestionAnswering(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.start_logits.shape , (self.batch_size, self.seq_length) ) self.parent.assertEqual(result.end_logits.shape , (self.batch_size, self.seq_length) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() ( ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ) :Union[str, Any] = config_and_inputs UpperCamelCase :Any = {'''input_ids''': input_ids, '''token_type_ids''': token_type_ids, '''attention_mask''': input_mask} return config, inputs_dict @require_tf class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : str =( ( TFRoFormerModel, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerForMultipleChoice, ) if is_tf_available() else () ) UpperCamelCase_ : Tuple =( { 'feature-extraction': TFRoFormerModel, 'fill-mask': TFRoFormerForMaskedLM, 'question-answering': TFRoFormerForQuestionAnswering, 'text-classification': TFRoFormerForSequenceClassification, 'text-generation': TFRoFormerForCausalLM, 'token-classification': TFRoFormerForTokenClassification, 'zero-shot': TFRoFormerForSequenceClassification, } if is_tf_available() else {} ) UpperCamelCase_ : Tuple =False UpperCamelCase_ : Optional[Any] =False def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: if pipeline_test_casse_name == "TextGenerationPipelineTests": return True return False def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = TFRoFormerModelTester(self ) UpperCamelCase :Optional[int] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> List[str]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_masked_lm(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_lm_head(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_multiple_choice(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_question_answering(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_sequence_classification(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_token_classification(*SCREAMING_SNAKE_CASE_ ) @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = TFRoFormerModel.from_pretrained('''junnyu/roformer_chinese_base''' ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Tuple = TFRoFormerForMaskedLM.from_pretrained('''junnyu/roformer_chinese_base''' ) UpperCamelCase :Union[str, Any] = tf.constant([[0, 1, 2, 3, 4, 5]] ) UpperCamelCase :str = model(SCREAMING_SNAKE_CASE_ )[0] # TODO Replace vocab size UpperCamelCase :Tuple = 5_0000 UpperCamelCase :Optional[Any] = [1, 6, vocab_size] self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) print(output[:, :3, :3] ) # TODO Replace values below with what was printed above. UpperCamelCase :int = tf.constant( [ [ [-0.1205_3341, -1.026_4901, 0.2922_1946], [-1.513_3783, 0.19_7433, 0.1519_0607], [-5.013_5403, -3.90_0256, -0.8403_8764], ] ] ) tf.debugging.assert_near(output[:, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Optional[int] =1E-4 def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = tf.constant([[4, 10]] ) UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=6 , embedding_dim=6 ) UpperCamelCase :str = emba(input_ids.shape ) UpperCamelCase :List[str] = tf.constant( [[0.0000, 0.0000, 0.0000, 1.0000, 1.0000, 1.0000], [0.8415, 0.0464, 0.0022, 0.5403, 0.9989, 1.0000]] ) tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Dict = tf.constant( [ [0.0000, 0.0000, 0.0000, 0.0000, 0.0000], [0.8415, 0.8219, 0.8020, 0.7819, 0.7617], [0.9093, 0.9364, 0.9581, 0.9749, 0.9870], ] ) UpperCamelCase :Dict = TFRoFormerSinusoidalPositionalEmbedding(num_positions=512 , embedding_dim=512 ) emba([2, 16, 512] ) UpperCamelCase :Any = emba.weight[:3, :5] tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[Any] =1E-4 def UpperCAmelCase ( self ) -> List[str]: # 2,12,16,64 UpperCamelCase :List[Any] = tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = -tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=32 , embedding_dim=64 ) UpperCamelCase :int = embed_positions([2, 16, 768] )[None, None, :, :] UpperCamelCase , UpperCamelCase :List[str] = TFRoFormerSelfAttention.apply_rotary_position_embeddings( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = tf.constant( [ [0.0000, 0.0100, 0.0200, 0.0300, 0.0400, 0.0500, 0.0600, 0.0700], [-0.2012, 0.8897, 0.0263, 0.9401, 0.2074, 0.9463, 0.3481, 0.9343], [-1.7057, 0.6271, -1.2145, 1.3897, -0.6303, 1.7647, -0.1173, 1.8985], [-2.1731, -1.6397, -2.7358, 0.2854, -2.1840, 1.7183, -1.3018, 2.4871], [0.2717, -3.6173, -2.9206, -2.1988, -3.6638, 0.3858, -2.9155, 2.2980], [3.9859, -2.1580, -0.7984, -4.4904, -4.1181, -2.0252, -4.4782, 1.1253], ] ) UpperCamelCase :Optional[int] = tf.constant( [ [0.0000, -0.0100, -0.0200, -0.0300, -0.0400, -0.0500, -0.0600, -0.0700], [0.2012, -0.8897, -0.0263, -0.9401, -0.2074, -0.9463, -0.3481, -0.9343], [1.7057, -0.6271, 1.2145, -1.3897, 0.6303, -1.7647, 0.1173, -1.8985], [2.1731, 1.6397, 2.7358, -0.2854, 2.1840, -1.7183, 1.3018, -2.4871], [-0.2717, 3.6173, 2.9206, 2.1988, 3.6638, -0.3858, 2.9155, -2.2980], [-3.9859, 2.1580, 0.7984, 4.4904, 4.1181, 2.0252, 4.4782, -1.1253], ] ) tf.debugging.assert_near(query_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) tf.debugging.assert_near(key_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance )
259
1
import unittest import numpy as np import torch from torch import nn from transformers import ( CLIPImageProcessor, CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer, CLIPVisionConfig, CLIPVisionModelWithProjection, ) from diffusers import KandinskyVaaPriorPipeline, PriorTransformer, UnCLIPScheduler from diffusers.utils import torch_device from diffusers.utils.testing_utils import enable_full_determinism, skip_mps from ..test_pipelines_common import PipelineTesterMixin enable_full_determinism() class UpperCAmelCase_ ( lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[Any] =KandinskyVaaPriorPipeline UpperCamelCase_ : Union[str, Any] =['prompt'] UpperCamelCase_ : int =['prompt', 'negative_prompt'] UpperCamelCase_ : str =[ 'num_images_per_prompt', 'generator', 'num_inference_steps', 'latents', 'negative_prompt', 'guidance_scale', 'output_type', 'return_dict', ] UpperCamelCase_ : Union[str, Any] =False @property def UpperCAmelCase ( self ) -> Optional[int]: return 32 @property def UpperCAmelCase ( self ) -> Any: return 32 @property def UpperCAmelCase ( self ) -> Optional[int]: return self.time_input_dim @property def UpperCAmelCase ( self ) -> List[str]: return self.time_input_dim * 4 @property def UpperCAmelCase ( self ) -> Optional[int]: return 100 @property def UpperCAmelCase ( self ) -> str: UpperCamelCase :int = CLIPTokenizer.from_pretrained('''hf-internal-testing/tiny-random-clip''' ) return tokenizer @property def UpperCAmelCase ( self ) -> Optional[Any]: torch.manual_seed(0 ) UpperCamelCase :Optional[int] = CLIPTextConfig( bos_token_id=0 , eos_token_id=2 , hidden_size=self.text_embedder_hidden_size , projection_dim=self.text_embedder_hidden_size , intermediate_size=37 , layer_norm_eps=1e-05 , num_attention_heads=4 , num_hidden_layers=5 , pad_token_id=1 , vocab_size=1000 , ) return CLIPTextModelWithProjection(SCREAMING_SNAKE_CASE_ ) @property def UpperCAmelCase ( self ) -> List[str]: torch.manual_seed(0 ) UpperCamelCase :int = { '''num_attention_heads''': 2, '''attention_head_dim''': 12, '''embedding_dim''': self.text_embedder_hidden_size, '''num_layers''': 1, } UpperCamelCase :Optional[Any] = PriorTransformer(**SCREAMING_SNAKE_CASE_ ) # clip_std and clip_mean is initialized to be 0 so PriorTransformer.post_process_latents will always return 0 - set clip_std to be 1 so it won't return 0 UpperCamelCase :Optional[Any] = nn.Parameter(torch.ones(model.clip_std.shape ) ) return model @property def UpperCAmelCase ( self ) -> int: torch.manual_seed(0 ) UpperCamelCase :Any = CLIPVisionConfig( hidden_size=self.text_embedder_hidden_size , image_size=224 , projection_dim=self.text_embedder_hidden_size , intermediate_size=37 , num_attention_heads=4 , num_channels=3 , num_hidden_layers=5 , patch_size=14 , ) UpperCamelCase :List[Any] = CLIPVisionModelWithProjection(SCREAMING_SNAKE_CASE_ ) return model @property def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Dict = CLIPImageProcessor( crop_size=224 , do_center_crop=SCREAMING_SNAKE_CASE_ , do_normalize=SCREAMING_SNAKE_CASE_ , do_resize=SCREAMING_SNAKE_CASE_ , image_mean=[0.4814_5466, 0.457_8275, 0.4082_1073] , image_std=[0.2686_2954, 0.2613_0258, 0.2757_7711] , resample=3 , size=224 , ) return image_processor def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Any = self.dummy_prior UpperCamelCase :Optional[Any] = self.dummy_image_encoder UpperCamelCase :Any = self.dummy_text_encoder UpperCamelCase :int = self.dummy_tokenizer UpperCamelCase :Union[str, Any] = self.dummy_image_processor UpperCamelCase :List[Any] = UnCLIPScheduler( variance_type='''fixed_small_log''' , prediction_type='''sample''' , num_train_timesteps=1000 , clip_sample=SCREAMING_SNAKE_CASE_ , clip_sample_range=10.0 , ) UpperCamelCase :Union[str, Any] = { '''prior''': prior, '''image_encoder''': image_encoder, '''text_encoder''': text_encoder, '''tokenizer''': tokenizer, '''scheduler''': scheduler, '''image_processor''': image_processor, } return components def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=0 ) -> Dict: if str(SCREAMING_SNAKE_CASE_ ).startswith('''mps''' ): UpperCamelCase :Optional[int] = torch.manual_seed(SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :Dict = torch.Generator(device=SCREAMING_SNAKE_CASE_ ).manual_seed(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = { '''prompt''': '''horse''', '''generator''': generator, '''guidance_scale''': 4.0, '''num_inference_steps''': 2, '''output_type''': '''np''', } return inputs def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :int = '''cpu''' UpperCamelCase :Union[str, Any] = self.get_dummy_components() UpperCamelCase :str = self.pipeline_class(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = pipe.to(SCREAMING_SNAKE_CASE_ ) pipe.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = pipe(**self.get_dummy_inputs(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Dict = output.image_embeds UpperCamelCase :Optional[int] = pipe( **self.get_dummy_inputs(SCREAMING_SNAKE_CASE_ ) , return_dict=SCREAMING_SNAKE_CASE_ , )[0] UpperCamelCase :Any = image[0, -10:] UpperCamelCase :str = image_from_tuple[0, -10:] assert image.shape == (1, 32) UpperCamelCase :Optional[Any] = np.array( [-0.0532, 1.7120, 0.3656, -1.0852, -0.8946, -1.1756, 0.4348, 0.2482, 0.5146, -0.1156] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2 assert np.abs(image_from_tuple_slice.flatten() - expected_slice ).max() < 1e-2 @skip_mps def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = torch_device == '''cpu''' UpperCamelCase :str = True UpperCamelCase :Optional[int] = False self._test_inference_batch_single_identical( test_max_difference=SCREAMING_SNAKE_CASE_ , relax_max_difference=SCREAMING_SNAKE_CASE_ , test_mean_pixel_difference=SCREAMING_SNAKE_CASE_ , ) @skip_mps def UpperCAmelCase ( self ) -> int: UpperCamelCase :int = torch_device == '''cpu''' UpperCamelCase :Tuple = False self._test_attention_slicing_forward_pass( test_max_difference=SCREAMING_SNAKE_CASE_ , test_mean_pixel_difference=SCREAMING_SNAKE_CASE_ , )
259
import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, _config_zero_init, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from torch import nn from transformers import MODEL_MAPPING, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTModel from transformers.models.dpt.modeling_dpt import DPT_PRETRAINED_MODEL_ARCHIVE_LIST if is_vision_available(): from PIL import Image from transformers import DPTImageProcessor class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=[0, 1, 2, 3] , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=[1, 384, 24, 24] , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=None , ) -> int: UpperCamelCase :List[Any] = parent UpperCamelCase :List[str] = batch_size UpperCamelCase :Optional[Any] = image_size UpperCamelCase :Optional[Any] = patch_size UpperCamelCase :Optional[Any] = num_channels UpperCamelCase :Union[str, Any] = is_training UpperCamelCase :Dict = use_labels UpperCamelCase :List[Any] = hidden_size UpperCamelCase :Optional[int] = num_hidden_layers UpperCamelCase :Any = backbone_out_indices UpperCamelCase :int = num_attention_heads UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :Optional[int] = hidden_dropout_prob UpperCamelCase :int = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = initializer_range UpperCamelCase :List[Any] = num_labels UpperCamelCase :Any = backbone_featmap_shape UpperCamelCase :Optional[int] = scope UpperCamelCase :Optional[int] = is_hybrid # sequence length of DPT = num_patches + 1 (we add 1 for the [CLS] token) UpperCamelCase :Tuple = (image_size // patch_size) ** 2 UpperCamelCase :int = num_patches + 1 def UpperCAmelCase ( self ) -> str: UpperCamelCase :Tuple = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size] ) UpperCamelCase :int = None if self.use_labels: UpperCamelCase :str = ids_tensor([self.batch_size, self.image_size, self.image_size] , self.num_labels ) UpperCamelCase :Any = self.get_config() return config, pixel_values, labels def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Tuple = { '''global_padding''': '''same''', '''layer_type''': '''bottleneck''', '''depths''': [3, 4, 9], '''out_features''': ['''stage1''', '''stage2''', '''stage3'''], '''embedding_dynamic_padding''': True, '''hidden_sizes''': [96, 192, 384, 768], '''num_groups''': 2, } return DPTConfig( image_size=self.image_size , patch_size=self.patch_size , num_channels=self.num_channels , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , backbone_out_indices=self.backbone_out_indices , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , is_decoder=SCREAMING_SNAKE_CASE_ , initializer_range=self.initializer_range , is_hybrid=self.is_hybrid , backbone_config=SCREAMING_SNAKE_CASE_ , backbone_featmap_shape=self.backbone_featmap_shape , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[int] = DPTModel(config=SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Optional[int] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :Tuple = self.num_labels UpperCamelCase :Any = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Union[str, Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.predicted_depth.shape , (self.batch_size, self.image_size, self.image_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :int = self.num_labels UpperCamelCase :str = DPTForSemanticSegmentation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :List[str] = model(SCREAMING_SNAKE_CASE_ , labels=SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual( result.logits.shape , (self.batch_size, self.num_labels, self.image_size, self.image_size) ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[Any] = self.prepare_config_and_inputs() UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = config_and_inputs UpperCamelCase :List[Any] = {'''pixel_values''': pixel_values} return config, inputs_dict @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Tuple =(DPTModel, DPTForDepthEstimation, DPTForSemanticSegmentation) if is_torch_available() else () UpperCamelCase_ : Optional[Any] =( { 'depth-estimation': DPTForDepthEstimation, 'feature-extraction': DPTModel, 'image-segmentation': DPTForSemanticSegmentation, } if is_torch_available() else {} ) UpperCamelCase_ : List[Any] =False UpperCamelCase_ : Optional[int] =False UpperCamelCase_ : Union[str, Any] =False def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = DPTModelTester(self ) UpperCamelCase :List[Any] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() @unittest.skip(reason='''DPT does not use inputs_embeds''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(model.get_input_embeddings() , (nn.Module) ) UpperCamelCase :Optional[int] = model.get_output_embeddings() self.assertTrue(x is None or isinstance(SCREAMING_SNAKE_CASE_ , nn.Linear ) ) def UpperCAmelCase ( self ) -> int: UpperCamelCase , UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Optional[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Any = ['''pixel_values'''] self.assertListEqual(arg_names[:1] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Any = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_depth_estimation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_semantic_segmentation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :int = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ): continue UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.train() UpperCamelCase :Union[str, Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Optional[int]: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Union[str, Any] = False UpperCamelCase :Dict = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ) or not model_class.supports_gradient_checkpointing: continue UpperCamelCase :Tuple = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.gradient_checkpointing_enable() model.train() UpperCamelCase :List[Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = _config_zero_init(SCREAMING_SNAKE_CASE_ ) for model_class in self.all_model_classes: UpperCamelCase :Tuple = model_class(config=SCREAMING_SNAKE_CASE_ ) # Skip the check for the backbone UpperCamelCase :List[str] = [] for name, module in model.named_modules(): if module.__class__.__name__ == "DPTViTHybridEmbeddings": UpperCamelCase :Tuple = [F'''{name}.{key}''' for key in module.state_dict().keys()] break for name, param in model.named_parameters(): if param.requires_grad: if name in backbone_params: continue self.assertIn( ((param.data.mean() * 1e9).round() / 1e9).item() , [0.0, 1.0] , msg=F'''Parameter {name} of model {model_class} seems not properly initialized''' , ) @unittest.skip('''Will be fixed soon by reducing the size of the model used for common tests.''' ) def UpperCAmelCase ( self ) -> Tuple: pass @slow def UpperCAmelCase ( self ) -> Any: for model_name in DPT_PRETRAINED_MODEL_ARCHIVE_LIST[1:]: UpperCamelCase :int = DPTModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: # We do this test only for DPTForDepthEstimation since it is the only model that uses readout_type UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Optional[Any] = '''add''' with self.assertRaises(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :int = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) def _A ( ): UpperCamelCase :List[Any] = Image.open('''./tests/fixtures/tests_samples/COCO/000000039769.png''' ) return image @require_torch @require_vision @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> str: UpperCamelCase :Any = DPTImageProcessor.from_pretrained('''Intel/dpt-hybrid-midas''' ) UpperCamelCase :int = DPTForDepthEstimation.from_pretrained('''Intel/dpt-hybrid-midas''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = prepare_img() UpperCamelCase :Union[str, Any] = image_processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' ).to(SCREAMING_SNAKE_CASE_ ) # forward pass with torch.no_grad(): UpperCamelCase :Union[str, Any] = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = outputs.predicted_depth # verify the predicted depth UpperCamelCase :List[str] = torch.Size((1, 384, 384) ) self.assertEqual(predicted_depth.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor( [[[5.6437, 5.6146, 5.6511], [5.4371, 5.5649, 5.5958], [5.5215, 5.5184, 5.5293]]] ).to(SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(outputs.predicted_depth[:3, :3, :3] / 100 , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) )
259
1
def _A ( SCREAMING_SNAKE_CASE__ : list[int] , SCREAMING_SNAKE_CASE__ : list[int] ): UpperCamelCase :Tuple = len(SCREAMING_SNAKE_CASE__ ) print('''The following activities are selected:''' ) # The first activity is always selected UpperCamelCase :Dict = 0 print(SCREAMING_SNAKE_CASE__ , end=''',''' ) # Consider rest of the activities for j in range(SCREAMING_SNAKE_CASE__ ): # If this activity has start time greater than # or equal to the finish time of previously # selected activity, then select it if start[j] >= finish[i]: print(SCREAMING_SNAKE_CASE__ , end=''',''' ) UpperCamelCase :List[str] = j if __name__ == "__main__": import doctest doctest.testmod() __snake_case = [1, 3, 0, 5, 8, 5] __snake_case = [2, 4, 6, 7, 9, 9] print_max_activities(start, finish)
259
def _A ( ): for n in range(1 , 1000000 ): yield n * (n + 1) // 2 def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :Optional[int] = 1 UpperCamelCase :List[Any] = 2 while i * i <= n: UpperCamelCase :str = 0 while n % i == 0: n //= i multiplicity += 1 divisors_count *= multiplicity + 1 i += 1 if n > 1: divisors_count *= 2 return divisors_count def _A ( ): return next(i for i in triangle_number_generator() if count_divisors(SCREAMING_SNAKE_CASE__ ) > 500 ) if __name__ == "__main__": print(solution())
259
1
def _A ( SCREAMING_SNAKE_CASE__ : int ): assert isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ), F'''The input value of [n={number}] is not an integer''' if number == 1: return 2 elif number < 1: UpperCamelCase :Optional[int] = F'''The input value of [n={number}] has to be > 0''' raise ValueError(SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :List[Any] = sylvester(number - 1 ) UpperCamelCase :Any = num - 1 UpperCamelCase :Optional[int] = num return lower * upper + 1 if __name__ == "__main__": print(f'''The 8th number in Sylvester\'s sequence: {sylvester(8)}''')
259
def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] ): # Return True if there is node that has not iterated. UpperCamelCase :Tuple = [False] * len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = [] queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = True while queue: UpperCamelCase :Optional[Any] = queue.pop(0 ) for ind in range(len(graph[u] ) ): if visited[ind] is False and graph[u][ind] > 0: queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = True UpperCamelCase :Optional[int] = u return visited[t] def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : str ): # This array is filled by BFS and to store path UpperCamelCase :Optional[int] = [-1] * (len(SCREAMING_SNAKE_CASE__ )) UpperCamelCase :Optional[int] = 0 while bfs(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Dict = float('''Inf''' ) UpperCamelCase :str = sink while s != source: # Find the minimum value in select path UpperCamelCase :Optional[Any] = min(SCREAMING_SNAKE_CASE__ , graph[parent[s]][s] ) UpperCamelCase :Any = parent[s] max_flow += path_flow UpperCamelCase :Tuple = sink while v != source: UpperCamelCase :List[str] = parent[v] graph[u][v] -= path_flow graph[v][u] += path_flow UpperCamelCase :Any = parent[v] return max_flow __snake_case = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] __snake_case , __snake_case = 0, 5 print(ford_fulkerson(graph, source, sink))
259
1
# Copyright 2023 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ..models.auto import AutoModelForVisionaSeq from ..utils import requires_backends from .base import PipelineTool if TYPE_CHECKING: from PIL import Image class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Dict ='Salesforce/blip-image-captioning-base' UpperCamelCase_ : List[str] =( 'This is a tool that generates a description of an image. It takes an input named `image` which should be the ' 'image to caption, and returns a text that contains the description in English.' ) UpperCamelCase_ : str ='image_captioner' UpperCamelCase_ : str =AutoModelForVisionaSeq UpperCamelCase_ : Optional[int] =['image'] UpperCamelCase_ : List[str] =['text'] def __init__( self , *SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> List[str]: requires_backends(self , ['''vision'''] ) super().__init__(*SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> Tuple: return self.pre_processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: return self.model.generate(**SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> Tuple: return self.pre_processor.batch_decode(SCREAMING_SNAKE_CASE_ , skip_special_tokens=SCREAMING_SNAKE_CASE_ )[0].strip()
259
from __future__ import annotations from typing import Any def _A ( SCREAMING_SNAKE_CASE__ : list[Any] ): create_state_space_tree(SCREAMING_SNAKE_CASE__ , [] , 0 ) def _A ( SCREAMING_SNAKE_CASE__ : list[Any] , SCREAMING_SNAKE_CASE__ : list[Any] , SCREAMING_SNAKE_CASE__ : int ): if index == len(SCREAMING_SNAKE_CASE__ ): print(SCREAMING_SNAKE_CASE__ ) return create_state_space_tree(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , index + 1 ) current_subsequence.append(sequence[index] ) create_state_space_tree(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , index + 1 ) current_subsequence.pop() if __name__ == "__main__": __snake_case = [3, 1, 2, 4] generate_all_subsequences(seq) seq.clear() seq.extend(["""A""", """B""", """C"""]) generate_all_subsequences(seq)
259
1
from typing import List, Optional, Union import numpy as np import tensorflow as tf from .utils import logging UpperCAmelCase__ = logging.get_logger(__name__) def _a ( a :Union[tf.Tensor, np.ndarray] ) -> List[int]: if isinstance(a , np.ndarray ): return list(tensor.shape ) a = tf.shape(a ) if tensor.shape == tf.TensorShape(a ): return dynamic a = tensor.shape.as_list() return [dynamic[i] if s is None else s for i, s in enumerate(a )] def _a ( a :tf.Tensor , a :Optional[int] = None , a :Optional[str] = None ) -> tf.Tensor: return tf.nn.softmax(logits=logits + 1e-9 , axis=a , name=a ) def _a ( a :Tuple , a :str , a :List[str] , a :str=1e-5 , a :List[str]=-1 ) -> Any: # This is a very simplified functional layernorm, designed to duplicate # the functionality of PyTorch nn.functional.layer_norm when this is needed to port # models in Transformers. if weight.shape.rank != 1 or bias.shape.rank != 1 or not isinstance(a , a ): raise NotImplementedError('''Only 1D weight and bias tensors are supported for now, with only a single axis.''' ) # Get mean and variance on the axis to be normalized a , a = tf.nn.moments(a , axes=[axis] , keepdims=a ) if axis != -1: # Reshape scale and weight to have the same rank as inputs, but with 1 dimensions # on every dimension except axis a = [1] * inputs.shape.rank a = shape_list(a )[axis] a = tf.reshape(a , a ) a = tf.reshape(a , a ) # Compute layer normalization using the batch_normalization # function. a = tf.nn.batch_normalization( a , a , a , offset=a , scale=a , variance_epsilon=a , ) return outputs def _a ( a :Optional[Any] , a :Dict=0 , a :Any=-1 ) -> List[Any]: # Replicates the behavior of torch.flatten in TF # If end_dim or start_dim is negative, count them from the end if end_dim < 0: end_dim += input.shape.rank if start_dim < 0: start_dim += input.shape.rank if start_dim == end_dim: return input a = tf.shape(a ) a = tf.math.reduce_prod(in_shape[start_dim : end_dim + 1] ) a = tf.concat([in_shape[:start_dim], [flattened_dim], in_shape[end_dim + 1 :]] , axis=0 ) return tf.reshape(a , a ) def _a ( a :tf.Tensor ) -> tf.Tensor: if not isinstance(a , tf.Tensor ): a = tf.convert_to_tensor(a ) # Catches stray NumPy inputs if encoder_attention_mask.shape.rank == 3: a = encoder_attention_mask[:, None, :, :] if encoder_attention_mask.shape.rank == 2: a = encoder_attention_mask[:, None, None, :] # T5 has a mask that can compare sequence ids, we can simulate this here with this transposition # Cf. https://github.com/tensorflow/mesh/blob/8d2465e9bc93129b913b5ccc6a59aa97abd96ec6/mesh_tensorflow # /transformer/transformer_layers.py#L270 # encoder_extended_attention_mask = (encoder_extended_attention_mask == # encoder_extended_attention_mask.transpose(-1, -2)) a = ( tf.cast(1 , encoder_attention_mask.dtype ) - encoder_extended_attention_mask ) * encoder_extended_attention_mask.dtype.min return encoder_extended_attention_mask def _a ( a :tf.Tensor , a :int , a :str = "input_ids" ) -> None: tf.debugging.assert_less( a , tf.cast(a , dtype=tensor.dtype ) , message=( F"""The maximum value of {tensor_name} ({tf.math.reduce_max(a )}) must be smaller than the embedding """ F"""layer's input dimension ({embed_dim}). The likely cause is some problem at tokenization time.""" ) , ) def _a ( a :Any , a :Optional[Any] , a :List[str] ) -> str: a = 64_512 # Check that no item in `data` is larger than `HDF5_OBJECT_HEADER_LIMIT` # because in that case even chunking the array would not make the saving # possible. a = [x for x in data if len(a ) > HDF5_OBJECT_HEADER_LIMIT] # Expecting this to never be true. if bad_attributes: raise RuntimeError( '''The following attributes cannot be saved to HDF5 file because ''' F"""they are larger than {HDF5_OBJECT_HEADER_LIMIT} """ F"""bytes: {bad_attributes}""" ) a = np.asarray(a ) a = 1 a = np.array_split(a , a ) # This will never loop forever thanks to the test above. while any(x.nbytes > HDF5_OBJECT_HEADER_LIMIT for x in chunked_data ): num_chunks += 1 a = np.array_split(a , a ) if num_chunks > 1: for chunk_id, chunk_data in enumerate(a ): a = chunk_data else: a = data def _a ( a :Optional[Any] , a :Dict ) -> Optional[int]: if name in group.attrs: a = [n.decode('''utf8''' ) if hasattr(a , '''decode''' ) else n for n in group.attrs[name]] else: a = [] a = 0 while "%s%d" % (name, chunk_id) in group.attrs: data.extend( [n.decode('''utf8''' ) if hasattr(a , '''decode''' ) else n for n in group.attrs['''%s%d''' % (name, chunk_id)]] ) chunk_id += 1 return data def _a ( a :Any ) -> str: def _expand_single_ad_tensor(a :Optional[int] ): if isinstance(a , tf.Tensor ) and t.shape.rank == 1: return tf.expand_dims(a , axis=-1 ) return t return tf.nest.map_structure(_expand_single_ad_tensor , a )
0
from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, get_resize_output_image_size, normalize, rescale, resize, to_channel_dimension_format, ) from ...image_utils import ( IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, ChannelDimension, ImageInput, PILImageResampling, is_batched, to_numpy_array, valid_images, ) from ...utils import TensorType, logging __snake_case = logging.get_logger(__name__) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : List[Any] =['pixel_values'] def __init__( self , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = PILImageResampling.BICUBIC , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = 1 / 255 , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = True , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> None: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = size if size is not None else {'''height''': 224, '''width''': 224} UpperCamelCase :Optional[Any] = get_size_dict(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = crop_size if crop_size is not None else {'''height''': 224, '''width''': 224} UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ , default_to_square=SCREAMING_SNAKE_CASE_ , param_name='''crop_size''' ) UpperCamelCase :Optional[int] = do_resize UpperCamelCase :int = do_rescale UpperCamelCase :Tuple = do_normalize UpperCamelCase :str = do_center_crop UpperCamelCase :int = crop_size UpperCamelCase :Tuple = size UpperCamelCase :List[str] = resample UpperCamelCase :Tuple = rescale_factor UpperCamelCase :Optional[Any] = image_mean if image_mean is not None else IMAGENET_DEFAULT_MEAN UpperCamelCase :Optional[int] = image_std if image_std is not None else IMAGENET_DEFAULT_STD def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = PILImageResampling.BILINEAR , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ ) if "shortest_edge" in size: UpperCamelCase :str = get_resize_output_image_size(SCREAMING_SNAKE_CASE_ , size=size['''shortest_edge'''] , default_to_square=SCREAMING_SNAKE_CASE_ ) # size = get_resize_output_image_size(image, size["shortest_edge"], size["longest_edge"]) elif "height" in size and "width" in size: UpperCamelCase :Optional[int] = (size['''height'''], size['''width''']) else: raise ValueError(F'''Size must contain \'height\' and \'width\' keys or \'shortest_edge\' key. Got {size.keys()}''' ) return resize(SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ , resample=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: UpperCamelCase :Union[str, Any] = get_size_dict(SCREAMING_SNAKE_CASE_ ) if "height" not in size or "width" not in size: raise ValueError(F'''The `size` parameter must contain the keys (height, width). Got {size.keys()}''' ) return center_crop(SCREAMING_SNAKE_CASE_ , size=(size['''height'''], size['''width''']) , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ ) -> np.ndarray: return rescale(SCREAMING_SNAKE_CASE_ , scale=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , **SCREAMING_SNAKE_CASE_ , ) -> np.ndarray: return normalize(SCREAMING_SNAKE_CASE_ , mean=SCREAMING_SNAKE_CASE_ , std=SCREAMING_SNAKE_CASE_ , data_format=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = None , SCREAMING_SNAKE_CASE_ = ChannelDimension.FIRST , **SCREAMING_SNAKE_CASE_ , ) -> BatchFeature: UpperCamelCase :Union[str, Any] = do_resize if do_resize is not None else self.do_resize UpperCamelCase :Optional[int] = do_rescale if do_rescale is not None else self.do_rescale UpperCamelCase :Optional[Any] = do_normalize if do_normalize is not None else self.do_normalize UpperCamelCase :Union[str, Any] = do_center_crop if do_center_crop is not None else self.do_center_crop UpperCamelCase :Optional[int] = crop_size if crop_size is not None else self.crop_size UpperCamelCase :Dict = get_size_dict(SCREAMING_SNAKE_CASE_ , param_name='''crop_size''' , default_to_square=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = resample if resample is not None else self.resample UpperCamelCase :List[str] = rescale_factor if rescale_factor is not None else self.rescale_factor UpperCamelCase :Optional[Any] = image_mean if image_mean is not None else self.image_mean UpperCamelCase :Dict = image_std if image_std is not None else self.image_std UpperCamelCase :Dict = size if size is not None else self.size UpperCamelCase :Optional[int] = get_size_dict(SCREAMING_SNAKE_CASE_ ) if not is_batched(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :str = [images] if not valid_images(SCREAMING_SNAKE_CASE_ ): raise ValueError( '''Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, ''' '''torch.Tensor, tf.Tensor or jax.ndarray.''' ) if do_resize and size is None: raise ValueError('''Size must be specified if do_resize is True.''' ) if do_center_crop and crop_size is None: raise ValueError('''Crop size must be specified if do_center_crop is True.''' ) if do_rescale and rescale_factor is None: raise ValueError('''Rescale factor must be specified if do_rescale is True.''' ) # All transformations expect numpy arrays. UpperCamelCase :Tuple = [to_numpy_array(SCREAMING_SNAKE_CASE_ ) for image in images] if do_resize: UpperCamelCase :List[Any] = [self.resize(image=SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ , resample=SCREAMING_SNAKE_CASE_ ) for image in images] if do_center_crop: UpperCamelCase :Tuple = [self.center_crop(image=SCREAMING_SNAKE_CASE_ , size=SCREAMING_SNAKE_CASE_ ) for image in images] if do_rescale: UpperCamelCase :Union[str, Any] = [self.rescale(image=SCREAMING_SNAKE_CASE_ , scale=SCREAMING_SNAKE_CASE_ ) for image in images] if do_normalize: UpperCamelCase :Union[str, Any] = [self.normalize(image=SCREAMING_SNAKE_CASE_ , mean=SCREAMING_SNAKE_CASE_ , std=SCREAMING_SNAKE_CASE_ ) for image in images] UpperCamelCase :List[str] = [to_channel_dimension_format(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) for image in images] UpperCamelCase :int = {'''pixel_values''': images} return BatchFeature(data=SCREAMING_SNAKE_CASE_ , tensor_type=SCREAMING_SNAKE_CASE_ )
259
0
'''simple docstring''' def lowerCAmelCase_ ( snake_case_ : float , snake_case_ : float , snake_case_ : int ) -> float: '''simple docstring''' if principal <= 0: raise Exception("Principal borrowed must be > 0" ) if rate_per_annum < 0: raise Exception("Rate of interest must be >= 0" ) if years_to_repay <= 0 or not isinstance(snake_case_ , snake_case_ ): raise Exception("Years to repay must be an integer > 0" ) # Yearly rate is divided by 12 to get monthly rate UpperCAmelCase_ = rate_per_annum / 12 # Years to repay is multiplied by 12 to get number of payments as payment is monthly UpperCAmelCase_ = years_to_repay * 12 return ( principal * rate_per_month * (1 + rate_per_month) ** number_of_payments / ((1 + rate_per_month) ** number_of_payments - 1) ) if __name__ == "__main__": import doctest doctest.testmod()
1
import os import sys import tempfile import torch from .state import AcceleratorState from .utils import PrecisionType, PrepareForLaunch, is_mps_available, patch_environment def _A ( SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str]=() , SCREAMING_SNAKE_CASE__ : List[Any]=None , SCREAMING_SNAKE_CASE__ : List[Any]="no" , SCREAMING_SNAKE_CASE__ : Dict="29500" ): UpperCamelCase :List[Any] = False UpperCamelCase :Tuple = False if any(key.startswith('''KAGGLE''' ) for key in os.environ.keys() ): UpperCamelCase :Dict = True elif "IPython" in sys.modules: UpperCamelCase :int = '''google.colab''' in str(sys.modules['''IPython'''].get_ipython() ) try: UpperCamelCase :Any = PrecisionType(mixed_precision.lower() ) except ValueError: raise ValueError( F'''Unknown mixed_precision mode: {args.mixed_precision.lower()}. Choose between {PrecisionType.list()}.''' ) if (in_colab or in_kaggle) and (os.environ.get('''TPU_NAME''' , SCREAMING_SNAKE_CASE__ ) is not None): # TPU launch import torch_xla.distributed.xla_multiprocessing as xmp if len(AcceleratorState._shared_state ) > 0: raise ValueError( '''To train on TPU in Colab or Kaggle Kernel, the `Accelerator` should only be initialized inside ''' '''your training function. Restart your notebook and make sure no cells initializes an ''' '''`Accelerator`.''' ) if num_processes is None: UpperCamelCase :Tuple = 8 UpperCamelCase :Optional[int] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , distributed_type='''TPU''' ) print(F'''Launching a training on {num_processes} TPU cores.''' ) xmp.spawn(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' ) elif in_colab: # No need for a distributed launch otherwise as it's either CPU or one GPU. if torch.cuda.is_available(): print('''Launching training on one GPU.''' ) else: print('''Launching training on one CPU.''' ) function(*SCREAMING_SNAKE_CASE__ ) else: if num_processes is None: raise ValueError( '''You have to specify the number of GPUs you would like to use, add `num_processes=...` to your call.''' ) if num_processes > 1: # Multi-GPU launch from torch.multiprocessing import start_processes from torch.multiprocessing.spawn import ProcessRaisedException if len(AcceleratorState._shared_state ) > 0: raise ValueError( '''To launch a multi-GPU training from your notebook, the `Accelerator` should only be initialized ''' '''inside your training function. Restart your notebook and make sure no cells initializes an ''' '''`Accelerator`.''' ) if torch.cuda.is_initialized(): raise ValueError( '''To launch a multi-GPU training from your notebook, you need to avoid running any instruction ''' '''using `torch.cuda` in any cell. Restart your notebook and make sure no cells use any CUDA ''' '''function.''' ) # torch.distributed will expect a few environment variable to be here. We set the ones common to each # process here (the other ones will be set be the launcher). with patch_environment( world_size=SCREAMING_SNAKE_CASE__ , master_addr='''127.0.01''' , master_port=SCREAMING_SNAKE_CASE__ , mixed_precision=SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , distributed_type='''MULTI_GPU''' ) print(F'''Launching training on {num_processes} GPUs.''' ) try: start_processes(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' ) except ProcessRaisedException as e: if "Cannot re-initialize CUDA in forked subprocess" in e.args[0]: raise RuntimeError( '''CUDA has been initialized before the `notebook_launcher` could create a forked subprocess. ''' '''This likely stems from an outside import causing issues once the `notebook_launcher()` is called. ''' '''Please review your imports and test them when running the `notebook_launcher()` to identify ''' '''which one is problematic.''' ) from e else: # No need for a distributed launch otherwise as it's either CPU, GPU or MPS. if is_mps_available(): UpperCamelCase :Any = '''1''' print('''Launching training on MPS.''' ) elif torch.cuda.is_available(): print('''Launching training on one GPU.''' ) else: print('''Launching training on CPU.''' ) function(*SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Tuple=() , SCREAMING_SNAKE_CASE__ : int=2 ): from torch.multiprocessing import start_processes with tempfile.NamedTemporaryFile() as tmp_file: # torch.distributed will expect a few environment variable to be here. We set the ones common to each # process here (the other ones will be set be the launcher). with patch_environment( world_size=SCREAMING_SNAKE_CASE__ , master_addr='''127.0.01''' , master_port='''29500''' , accelerate_mixed_precision='''no''' , accelerate_debug_rdv_file=tmp_file.name , accelerate_use_cpu='''yes''' , ): UpperCamelCase :Optional[int] = PrepareForLaunch(SCREAMING_SNAKE_CASE__ , debug=SCREAMING_SNAKE_CASE__ ) start_processes(SCREAMING_SNAKE_CASE__ , args=SCREAMING_SNAKE_CASE__ , nprocs=SCREAMING_SNAKE_CASE__ , start_method='''fork''' )
259
0
'''simple docstring''' from typing import Dict from .base import GenericTensor, Pipeline class __lowerCAmelCase (lowercase_ ): '''simple docstring''' def UpperCamelCase__ (self : Union[str, Any] , UpperCamelCase : int=None , UpperCamelCase : List[Any]=None , UpperCamelCase : Union[str, Any]=None , **UpperCamelCase : List[Any] ): '''simple docstring''' if tokenize_kwargs is None: lowercase__ = {} if truncation is not None: if "truncation" in tokenize_kwargs: raise ValueError( '''truncation parameter defined twice (given as keyword argument as well as in tokenize_kwargs)''' ) lowercase__ = truncation lowercase__ = tokenize_kwargs lowercase__ = {} if return_tensors is not None: lowercase__ = return_tensors return preprocess_params, {}, postprocess_params def UpperCamelCase__ (self : List[str] , UpperCamelCase : str , **UpperCamelCase : Union[str, Any] ): '''simple docstring''' lowercase__ = self.framework lowercase__ = self.tokenizer(UpperCamelCase , return_tensors=UpperCamelCase , **UpperCamelCase ) return model_inputs def UpperCamelCase__ (self : Dict , UpperCamelCase : List[str] ): '''simple docstring''' lowercase__ = self.model(**UpperCamelCase ) return model_outputs def UpperCamelCase__ (self : Optional[int] , UpperCamelCase : Optional[int] , UpperCamelCase : Any=False ): '''simple docstring''' if return_tensors: return model_outputs[0] if self.framework == "pt": return model_outputs[0].tolist() elif self.framework == "tf": return model_outputs[0].numpy().tolist() def __call__(self : str , *UpperCamelCase : Any , **UpperCamelCase : Optional[Any] ): '''simple docstring''' return super().__call__(*UpperCamelCase , **UpperCamelCase )
2
import sys def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Any = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] UpperCamelCase :List[Any] = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] for chain_length in range(2 , SCREAMING_SNAKE_CASE__ ): for a in range(1 , n - chain_length + 1 ): UpperCamelCase :Optional[Any] = a + chain_length - 1 UpperCamelCase :int = sys.maxsize for c in range(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Any = ( matrix[a][c] + matrix[c + 1][b] + array[a - 1] * array[c] * array[b] ) if cost < matrix[a][b]: UpperCamelCase :int = cost UpperCamelCase :List[str] = c return matrix, sol def _A ( SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] ): if i == j: print('''A''' + str(SCREAMING_SNAKE_CASE__ ) , end=''' ''' ) else: print('''(''' , end=''' ''' ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , optimal_solution[i][j] ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , optimal_solution[i][j] + 1 , SCREAMING_SNAKE_CASE__ ) print(''')''' , end=''' ''' ) def _A ( ): UpperCamelCase :Optional[int] = [30, 35, 15, 5, 10, 20, 25] UpperCamelCase :Optional[Any] = len(SCREAMING_SNAKE_CASE__ ) # Size of matrix created from above array will be # 30*35 35*15 15*5 5*10 10*20 20*25 UpperCamelCase , UpperCamelCase :Dict = matrix_chain_order(SCREAMING_SNAKE_CASE__ ) print('''No. of Operation required: ''' + str(matrix[1][n - 1] ) ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ , 1 , n - 1 ) if __name__ == "__main__": main()
259
0
'''simple docstring''' from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, get_resize_output_image_size, normalize, rescale, resize, to_channel_dimension_format, ) from ...image_utils import ( IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD, ChannelDimension, ImageInput, PILImageResampling, make_list_of_images, to_numpy_array, valid_images, ) from ...utils import TensorType, is_vision_available, logging if is_vision_available(): import PIL lowercase : Optional[Any] = logging.get_logger(__name__) class A ( __snake_case ): __magic_name__ = ['''pixel_values'''] def __init__( self , SCREAMING_SNAKE_CASE = True , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = PILImageResampling.BILINEAR , SCREAMING_SNAKE_CASE = True , SCREAMING_SNAKE_CASE = 1 / 255 , SCREAMING_SNAKE_CASE = True , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , **SCREAMING_SNAKE_CASE , ) -> None: """simple docstring""" super().__init__(**SCREAMING_SNAKE_CASE ) A : str = size if size is not None else {'''shortest_edge''': 384} A : Tuple = get_size_dict(SCREAMING_SNAKE_CASE , default_to_square=SCREAMING_SNAKE_CASE ) A : str = do_resize A : List[Any] = size # Default value set here for backwards compatibility where the value in config is None A : List[Any] = crop_pct if crop_pct is not None else 224 / 256 A : Optional[int] = resample A : Union[str, Any] = do_rescale A : List[str] = rescale_factor A : Union[str, Any] = do_normalize A : Any = image_mean if image_mean is not None else IMAGENET_STANDARD_MEAN A : Optional[int] = image_std if image_std is not None else IMAGENET_STANDARD_STD def __lowerCAmelCase ( self , SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE = PILImageResampling.BICUBIC , SCREAMING_SNAKE_CASE = None , **SCREAMING_SNAKE_CASE , ) -> np.ndarray: """simple docstring""" A : str = get_size_dict(SCREAMING_SNAKE_CASE , default_to_square=SCREAMING_SNAKE_CASE ) if "shortest_edge" not in size: raise ValueError(F'Size dictionary must contain \'shortest_edge\' key. Got {size.keys()}' ) A : Any = size['''shortest_edge'''] if shortest_edge < 384: # maintain same ratio, resizing shortest edge to shortest_edge/crop_pct A : Dict = int(shortest_edge / crop_pct ) A : str = get_resize_output_image_size(SCREAMING_SNAKE_CASE , size=SCREAMING_SNAKE_CASE , default_to_square=SCREAMING_SNAKE_CASE ) A : int = resize(image=SCREAMING_SNAKE_CASE , size=SCREAMING_SNAKE_CASE , resample=SCREAMING_SNAKE_CASE , data_format=SCREAMING_SNAKE_CASE , **SCREAMING_SNAKE_CASE ) # then crop to (shortest_edge, shortest_edge) return center_crop(image=SCREAMING_SNAKE_CASE , size=(shortest_edge, shortest_edge) , data_format=SCREAMING_SNAKE_CASE , **SCREAMING_SNAKE_CASE ) else: # warping (no cropping) when evaluated at 384 or larger return resize( SCREAMING_SNAKE_CASE , size=(shortest_edge, shortest_edge) , resample=SCREAMING_SNAKE_CASE , data_format=SCREAMING_SNAKE_CASE , **SCREAMING_SNAKE_CASE ) def __lowerCAmelCase ( self , SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE = None , **SCREAMING_SNAKE_CASE , ) -> List[str]: """simple docstring""" return rescale(SCREAMING_SNAKE_CASE , scale=SCREAMING_SNAKE_CASE , data_format=SCREAMING_SNAKE_CASE , **SCREAMING_SNAKE_CASE ) def __lowerCAmelCase ( self , SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE = None , **SCREAMING_SNAKE_CASE , ) -> np.ndarray: """simple docstring""" return normalize(SCREAMING_SNAKE_CASE , mean=SCREAMING_SNAKE_CASE , std=SCREAMING_SNAKE_CASE , data_format=SCREAMING_SNAKE_CASE , **SCREAMING_SNAKE_CASE ) def __lowerCAmelCase ( self , SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = None , SCREAMING_SNAKE_CASE = ChannelDimension.FIRST , **SCREAMING_SNAKE_CASE , ) -> PIL.Image.Image: """simple docstring""" A : int = do_resize if do_resize is not None else self.do_resize A : Tuple = crop_pct if crop_pct is not None else self.crop_pct A : Optional[Any] = resample if resample is not None else self.resample A : List[Any] = do_rescale if do_rescale is not None else self.do_rescale A : List[Any] = rescale_factor if rescale_factor is not None else self.rescale_factor A : Union[str, Any] = do_normalize if do_normalize is not None else self.do_normalize A : Union[str, Any] = image_mean if image_mean is not None else self.image_mean A : List[str] = image_std if image_std is not None else self.image_std A : Union[str, Any] = size if size is not None else self.size A : List[Any] = get_size_dict(SCREAMING_SNAKE_CASE , default_to_square=SCREAMING_SNAKE_CASE ) A : Any = make_list_of_images(SCREAMING_SNAKE_CASE ) if not valid_images(SCREAMING_SNAKE_CASE ): raise ValueError( '''Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, ''' '''torch.Tensor, tf.Tensor or jax.ndarray.''' ) if do_resize and size is None or resample is None: raise ValueError('''Size and resample must be specified if do_resize is True.''' ) if do_resize and size["shortest_edge"] < 384 and crop_pct is None: raise ValueError('''crop_pct must be specified if size < 384.''' ) if do_rescale and rescale_factor is None: raise ValueError('''Rescale factor must be specified if do_rescale is True.''' ) if do_normalize and (image_mean is None or image_std is None): raise ValueError('''Image mean and std must be specified if do_normalize is True.''' ) # All transformations expect numpy arrays. A : Optional[int] = [to_numpy_array(SCREAMING_SNAKE_CASE ) for image in images] if do_resize: A : Any = [self.resize(image=SCREAMING_SNAKE_CASE , size=SCREAMING_SNAKE_CASE , crop_pct=SCREAMING_SNAKE_CASE , resample=SCREAMING_SNAKE_CASE ) for image in images] if do_rescale: A : str = [self.rescale(image=SCREAMING_SNAKE_CASE , scale=SCREAMING_SNAKE_CASE ) for image in images] if do_normalize: A : Dict = [self.normalize(image=SCREAMING_SNAKE_CASE , mean=SCREAMING_SNAKE_CASE , std=SCREAMING_SNAKE_CASE ) for image in images] A : Any = [to_channel_dimension_format(SCREAMING_SNAKE_CASE , SCREAMING_SNAKE_CASE ) for image in images] A : Optional[int] = {'''pixel_values''': images} return BatchFeature(data=SCREAMING_SNAKE_CASE , tensor_type=SCREAMING_SNAKE_CASE )
3
import argparse import json import os from pathlib import Path import requests import torch from transformers import JukeboxConfig, JukeboxModel from transformers.utils import logging logging.set_verbosity_info() __snake_case = logging.get_logger(__name__) __snake_case = """https://openaipublic.azureedge.net/jukebox/models/""" __snake_case = { """jukebox-1b-lyrics""": [ """5b/vqvae.pth.tar""", """5b/prior_level_0.pth.tar""", """5b/prior_level_1.pth.tar""", """1b_lyrics/prior_level_2.pth.tar""", ], """jukebox-5b-lyrics""": [ """5b/vqvae.pth.tar""", """5b/prior_level_0.pth.tar""", """5b/prior_level_1.pth.tar""", """5b_lyrics/prior_level_2.pth.tar""", ], } def _A ( SCREAMING_SNAKE_CASE__ : List[Any] ): if key.endswith('''.model.1.bias''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :int = key.replace('''.model.1.bias''' , '''.conv1d_1.bias''' ) elif key.endswith('''.model.1.weight''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Union[str, Any] = key.replace('''.model.1.weight''' , '''.conv1d_1.weight''' ) elif key.endswith('''.model.3.bias''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Optional[Any] = key.replace('''.model.3.bias''' , '''.conv1d_2.bias''' ) elif key.endswith('''.model.3.weight''' ) and len(key.split('''.''' ) ) > 10: UpperCamelCase :Optional[int] = key.replace('''.model.3.weight''' , '''.conv1d_2.weight''' ) if "conditioner_blocks.0." in key: UpperCamelCase :Any = key.replace('''conditioner_blocks.0''' , '''conditioner_blocks''' ) if "prime_prior" in key: UpperCamelCase :int = key.replace('''prime_prior''' , '''encoder''' ) if ".emb." in key and "total" not in key and "absolute" not in key and "relative" not in key: UpperCamelCase :Any = key.replace('''.emb.''' , '''.''' ) if key.endswith('''k''' ): # replace vqvae.X.k with vqvae.X.codebook return key.replace('''.k''' , '''.codebook''' ) if "y_emb." in key: return key.replace('''y_emb.''' , '''metadata_embedding.''' ) if "x_emb.emb." in key: UpperCamelCase :str = key.replace('''0.x_emb.emb''' , '''embed_tokens''' ) if "prime_state_ln" in key: return key.replace('''prime_state_ln''' , '''encoder.final_layer_norm''' ) if ".ln" in key: return key.replace('''.ln''' , '''.layer_norm''' ) if "_ln" in key: return key.replace('''_ln''' , '''_layer_norm''' ) if "prime_state_proj" in key: return key.replace('''prime_state_proj''' , '''encoder.proj_in''' ) if "prime_x_out" in key: return key.replace('''prime_x_out''' , '''encoder.lm_head''' ) if "prior.x_out" in key: return key.replace('''x_out''' , '''fc_proj_out''' ) if "x_emb" in key: return key.replace('''x_emb''' , '''embed_tokens''' ) return key def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Optional[int] = {} import re UpperCamelCase :int = re.compile(R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :str = re.compile( R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :int = re.compile(R'''encoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Tuple = re.compile(R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :int = re.compile( R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Optional[int] = re.compile(R'''decoders.(\d*).level_blocks.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Optional[Any] = re.compile(R'''conditioner_blocks.(\d*).cond.model.(\d*).(\d).(bias|weight)''' ) UpperCamelCase :int = re.compile( R'''conditioner_blocks.(\d*).cond.model.(\d*).(\d).model.(\d*).model.(\d*).(bias|weight)''' ) UpperCamelCase :Tuple = re.compile(R'''conditioner_blocks.(\d*).cond.model.(\d*).(bias|weight)''' ) for original_key, value in state_dict.items(): # rename vqvae.encoder keys if re_encoder_block_conv_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = re_encoder_block_conv_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = regex_match.groups() UpperCamelCase :List[str] = int(groups[2] ) * 2 + int(groups[3] ) UpperCamelCase :List[Any] = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :int = re_encoder_block_conv_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_encoder_block_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_encoder_block_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[Any] = regex_match.groups() UpperCamelCase :Any = int(groups[2] ) * 2 + int(groups[3] ) UpperCamelCase :Any = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :str = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.downsample_block.{block_index}.''' UpperCamelCase :List[str] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Union[str, Any] = prefix + resnet_block UpperCamelCase :str = re_encoder_block_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_encoder_block_proj_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[int] = re_encoder_block_proj_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = regex_match.groups() UpperCamelCase :int = F'''encoders.{groups[0]}.level_blocks.{groups[1]}.proj_out.{groups[-1]}''' UpperCamelCase :str = re_encoder_block_proj_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # rename vqvae.decoder keys elif re_decoder_block_conv_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_decoder_block_conv_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = regex_match.groups() UpperCamelCase :str = int(groups[2] ) * 2 + int(groups[3] ) - 2 UpperCamelCase :List[Any] = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :Union[str, Any] = re_decoder_block_conv_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_decoder_block_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_decoder_block_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = regex_match.groups() UpperCamelCase :List[str] = int(groups[2] ) * 2 + int(groups[3] ) - 2 UpperCamelCase :Optional[int] = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :Any = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.upsample_block.{block_index}.''' UpperCamelCase :Optional[int] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Any = prefix + resnet_block UpperCamelCase :Optional[int] = re_decoder_block_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_decoder_block_proj_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[int] = re_decoder_block_proj_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[Any] = regex_match.groups() UpperCamelCase :List[Any] = F'''decoders.{groups[0]}.level_blocks.{groups[1]}.proj_in.{groups[-1]}''' UpperCamelCase :Any = re_decoder_block_proj_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # rename prior cond.model to upsampler.upsample_block and resnet elif re_prior_cond_conv_out.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Optional[Any] = re_prior_cond_conv_out.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = regex_match.groups() UpperCamelCase :str = int(groups[1] ) * 2 + int(groups[2] ) - 2 UpperCamelCase :Tuple = F'''conditioner_blocks.upsampler.upsample_block.{block_index}.{groups[-1]}''' UpperCamelCase :int = re_prior_cond_conv_out.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_prior_cond_resnet.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = re_prior_cond_resnet.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = regex_match.groups() UpperCamelCase :Optional[Any] = int(groups[1] ) * 2 + int(groups[2] ) - 2 UpperCamelCase :int = {'''1''': 1, '''3''': 2}[groups[-2]] UpperCamelCase :Tuple = F'''conditioner_blocks.upsampler.upsample_block.{block_index}.''' UpperCamelCase :List[Any] = F'''resnet_block.{groups[-3]}.conv1d_{conv_index}.{groups[-1]}''' UpperCamelCase :Any = prefix + resnet_block UpperCamelCase :Dict = re_prior_cond_resnet.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) elif re_prior_cond_proj_in.fullmatch(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = re_prior_cond_proj_in.match(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = regex_match.groups() UpperCamelCase :Dict = F'''conditioner_blocks.upsampler.proj_in.{groups[-1]}''' UpperCamelCase :Any = re_prior_cond_proj_in.sub(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # keep original key else: UpperCamelCase :List[str] = original_key UpperCamelCase :Any = replace_key(SCREAMING_SNAKE_CASE__ ) if F'''{key_prefix}.{key}''' not in model_state_dict or key is None: print(F'''failed converting {original_key} to {key}, does not match''' ) # handle missmatched shape elif value.shape != model_state_dict[F'''{key_prefix}.{key}'''].shape: UpperCamelCase :Union[str, Any] = model_state_dict[F'''{key_prefix}.{key}'''] print(F'''{original_key}-> {key} : \nshape {val.shape} and { value.shape}, do not match''' ) UpperCamelCase :List[Any] = original_key UpperCamelCase :Any = original_key UpperCamelCase :Optional[int] = value return new_dict @torch.no_grad() def _A ( SCREAMING_SNAKE_CASE__ : List[str]=None , SCREAMING_SNAKE_CASE__ : Dict=None ): for file in MODEL_MAPPING[model_name]: if not os.path.isfile(F'''{pytorch_dump_folder_path}/{file.split("/" )[-1]}''' ): UpperCamelCase :Dict = requests.get(F'''{PREFIX}{file}''' , allow_redirects=SCREAMING_SNAKE_CASE__ ) os.makedirs(F'''{pytorch_dump_folder_path}/''' , exist_ok=SCREAMING_SNAKE_CASE__ ) open(F'''{pytorch_dump_folder_path}/{file.split("/" )[-1]}''' , '''wb''' ).write(r.content ) UpperCamelCase :Optional[int] = MODEL_MAPPING[model_name.split('''/''' )[-1]] UpperCamelCase :Any = JukeboxConfig.from_pretrained(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :List[str] = JukeboxModel(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = [] UpperCamelCase :List[Any] = {} for i, dict_name in enumerate(SCREAMING_SNAKE_CASE__ ): UpperCamelCase :int = torch.load(F'''{pytorch_dump_folder_path}/{dict_name.split("/" )[-1]}''' )['''model'''] UpperCamelCase :Tuple = {} for k in old_dic.keys(): if k.endswith('''.b''' ): UpperCamelCase :Optional[int] = old_dic[k] elif k.endswith('''.w''' ): UpperCamelCase :Optional[Any] = old_dic[k] elif "level_2" not in dict_name and "cond.model." in k: UpperCamelCase :Optional[Any] = old_dic[k] else: UpperCamelCase :Any = old_dic[k] UpperCamelCase :Any = '''vqvae''' if i == 0 else F'''priors.{3 - i}''' UpperCamelCase :Dict = fix_jukebox_keys(SCREAMING_SNAKE_CASE__ , model.state_dict() , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) weight_dict.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = weight_dict.pop(0 ) model.vqvae.load_state_dict(SCREAMING_SNAKE_CASE__ ) for i in range(len(SCREAMING_SNAKE_CASE__ ) ): model.priors[i].load_state_dict(weight_dict[2 - i] ) Path(SCREAMING_SNAKE_CASE__ ).mkdir(exist_ok=SCREAMING_SNAKE_CASE__ ) with open(F'''{pytorch_dump_folder_path}/mapping.json''' , '''w''' ) as txtfile: json.dump(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) print(F'''Saving model {model_name} to {pytorch_dump_folder_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) return weight_dict if __name__ == "__main__": __snake_case = argparse.ArgumentParser() # Required parameters parser.add_argument( """--model_name""", default="""jukebox-5b-lyrics""", type=str, help="""Name of the model you'd like to convert.""", ) parser.add_argument( """--pytorch_dump_folder_path""", default="""jukebox-5b-lyrics-converted""", type=str, help="""Path to the output PyTorch model directory.""", ) __snake_case = parser.parse_args() convert_openai_checkpoint(args.model_name, args.pytorch_dump_folder_path)
259
0
'''simple docstring''' import logging import os from dataclasses import dataclass, field from typing import Dict, Optional import numpy as np from utils_multiple_choice import MultipleChoiceDataset, Split, processors import transformers from transformers import ( AutoConfig, AutoModelForMultipleChoice, AutoTokenizer, DataCollatorWithPadding, EvalPrediction, HfArgumentParser, Trainer, TrainingArguments, set_seed, ) from transformers.trainer_utils import is_main_process __snake_case =logging.getLogger(__name__) def a_ ( lowerCamelCase : List[Any] , lowerCamelCase : Dict ): return (preds == labels).mean() @dataclass class UpperCAmelCase_ : lowerCamelCase : str = field( metadata={'''help''': '''Path to pretrained model or model identifier from huggingface.co/models'''} ) lowerCamelCase : Optional[str] = field( default=__lowercase , metadata={'''help''': '''Pretrained config name or path if not the same as model_name'''} ) lowerCamelCase : Optional[str] = field( default=__lowercase , metadata={'''help''': '''Pretrained tokenizer name or path if not the same as model_name'''} ) lowerCamelCase : Optional[str] = field( default=__lowercase , metadata={'''help''': '''Where do you want to store the pretrained models downloaded from huggingface.co'''} , ) @dataclass class UpperCAmelCase_ : lowerCamelCase : str = field(metadata={'''help''': '''The name of the task to train on: ''' + ''', '''.join(processors.keys() )} ) lowerCamelCase : str = field(metadata={'''help''': '''Should contain the data files for the task.'''} ) lowerCamelCase : int = field( default=128 , metadata={ '''help''': ( '''The maximum total input sequence length after tokenization. Sequences longer ''' '''than this will be truncated, sequences shorter will be padded.''' ) } , ) lowerCamelCase : bool = field( default=__lowercase , metadata={'''help''': '''Overwrite the cached training and evaluation sets'''} ) def a_ ( ): # See all possible arguments in src/transformers/training_args.py # or by passing the --help flag to this script. # We now keep distinct sets of args, for a cleaner separation of concerns. lowerCAmelCase = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments) ) lowerCAmelCase , lowerCAmelCase , lowerCAmelCase = parser.parse_args_into_dataclasses() if ( os.path.exists(training_args.output_dir ) and os.listdir(training_args.output_dir ) and training_args.do_train and not training_args.overwrite_output_dir ): raise ValueError( f'''Output directory ({training_args.output_dir}) already exists and is not empty. Use''' ' --overwrite_output_dir to overcome.' ) # Setup logging logging.basicConfig( format='%(asctime)s - %(levelname)s - %(name)s - %(message)s' , datefmt='%m/%d/%Y %H:%M:%S' , level=logging.INFO if training_args.local_rank in [-1, 0] else logging.WARN , ) logger.warning( 'Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, 16-bits training: %s' , training_args.local_rank , training_args.device , training_args.n_gpu , bool(training_args.local_rank != -1 ) , training_args.fpaa , ) # Set the verbosity to info of the Transformers logger (on main process only): if is_main_process(training_args.local_rank ): transformers.utils.logging.set_verbosity_info() transformers.utils.logging.enable_default_handler() transformers.utils.logging.enable_explicit_format() logger.info('Training/evaluation parameters %s' , lowerCamelCase ) # Set seed set_seed(training_args.seed ) try: lowerCAmelCase = processors[data_args.task_name]() lowerCAmelCase = processor.get_labels() lowerCAmelCase = len(lowerCamelCase ) except KeyError: raise ValueError('Task not found: %s' % (data_args.task_name) ) # Load pretrained model and tokenizer # # Distributed training: # The .from_pretrained methods guarantee that only one local process can concurrently # download model & vocab. lowerCAmelCase = AutoConfig.from_pretrained( model_args.config_name if model_args.config_name else model_args.model_name_or_path , num_labels=lowerCamelCase , finetuning_task=data_args.task_name , cache_dir=model_args.cache_dir , ) lowerCAmelCase = AutoTokenizer.from_pretrained( model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path , cache_dir=model_args.cache_dir , ) lowerCAmelCase = AutoModelForMultipleChoice.from_pretrained( model_args.model_name_or_path , from_tf=bool('.ckpt' in model_args.model_name_or_path ) , config=lowerCamelCase , cache_dir=model_args.cache_dir , ) # Get datasets lowerCAmelCase = ( MultipleChoiceDataset( data_dir=data_args.data_dir , tokenizer=lowerCamelCase , task=data_args.task_name , max_seq_length=data_args.max_seq_length , overwrite_cache=data_args.overwrite_cache , mode=Split.train , ) if training_args.do_train else None ) lowerCAmelCase = ( MultipleChoiceDataset( data_dir=data_args.data_dir , tokenizer=lowerCamelCase , task=data_args.task_name , max_seq_length=data_args.max_seq_length , overwrite_cache=data_args.overwrite_cache , mode=Split.dev , ) if training_args.do_eval else None ) def compute_metrics(lowerCamelCase : EvalPrediction ) -> Dict: lowerCAmelCase = np.argmax(p.predictions , axis=1 ) return {"acc": simple_accuracy(lowerCamelCase , p.label_ids )} # Data collator lowerCAmelCase = DataCollatorWithPadding(lowerCamelCase , pad_to_multiple_of=8 ) if training_args.fpaa else None # Initialize our Trainer lowerCAmelCase = Trainer( model=lowerCamelCase , args=lowerCamelCase , train_dataset=lowerCamelCase , eval_dataset=lowerCamelCase , compute_metrics=lowerCamelCase , data_collator=lowerCamelCase , ) # Training if training_args.do_train: trainer.train( model_path=model_args.model_name_or_path if os.path.isdir(model_args.model_name_or_path ) else None ) trainer.save_model() # For convenience, we also re-save the tokenizer to the same directory, # so that you can share your model easily on huggingface.co/models =) if trainer.is_world_master(): tokenizer.save_pretrained(training_args.output_dir ) # Evaluation lowerCAmelCase = {} if training_args.do_eval: logger.info('*** Evaluate ***' ) lowerCAmelCase = trainer.evaluate() lowerCAmelCase = os.path.join(training_args.output_dir , 'eval_results.txt' ) if trainer.is_world_master(): with open(lowerCamelCase , 'w' ) as writer: logger.info('***** Eval results *****' ) for key, value in result.items(): logger.info(' %s = %s' , lowerCamelCase , lowerCamelCase ) writer.write('%s = %s\n' % (key, value) ) results.update(lowerCamelCase ) return results def a_ ( lowerCamelCase : Dict ): # For xla_spawn (TPUs) main() if __name__ == "__main__": main()
4
import json import os import shutil import tempfile import unittest import numpy as np import pytest from transformers import MgpstrTokenizer from transformers.models.mgp_str.tokenization_mgp_str import VOCAB_FILES_NAMES from transformers.testing_utils import require_torch, require_vision from transformers.utils import IMAGE_PROCESSOR_NAME, is_torch_available, is_vision_available if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import MgpstrProcessor, ViTImageProcessor @require_torch @require_vision class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Union[str, Any] =ViTImageProcessor if is_vision_available() else None @property def UpperCAmelCase ( self ) -> Dict: return self.image_processor_tester.prepare_image_processor_dict() def UpperCAmelCase ( self ) -> int: UpperCamelCase :Union[str, Any] = (3, 32, 128) UpperCamelCase :Any = tempfile.mkdtemp() # fmt: off UpperCamelCase :int = ['''[GO]''', '''[s]''', '''0''', '''1''', '''2''', '''3''', '''4''', '''5''', '''6''', '''7''', '''8''', '''9''', '''a''', '''b''', '''c''', '''d''', '''e''', '''f''', '''g''', '''h''', '''i''', '''j''', '''k''', '''l''', '''m''', '''n''', '''o''', '''p''', '''q''', '''r''', '''s''', '''t''', '''u''', '''v''', '''w''', '''x''', '''y''', '''z'''] # fmt: on UpperCamelCase :Optional[int] = dict(zip(SCREAMING_SNAKE_CASE_ , range(len(SCREAMING_SNAKE_CASE_ ) ) ) ) UpperCamelCase :Optional[int] = os.path.join(self.tmpdirname , VOCAB_FILES_NAMES['''vocab_file'''] ) with open(self.vocab_file , '''w''' , encoding='''utf-8''' ) as fp: fp.write(json.dumps(SCREAMING_SNAKE_CASE_ ) + '''\n''' ) UpperCamelCase :Tuple = { '''do_normalize''': False, '''do_resize''': True, '''image_processor_type''': '''ViTImageProcessor''', '''resample''': 3, '''size''': {'''height''': 32, '''width''': 128}, } UpperCamelCase :str = os.path.join(self.tmpdirname , SCREAMING_SNAKE_CASE_ ) with open(self.image_processor_file , '''w''' , encoding='''utf-8''' ) as fp: json.dump(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> int: return MgpstrTokenizer.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , **SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: return ViTImageProcessor.from_pretrained(self.tmpdirname , **SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: shutil.rmtree(self.tmpdirname ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = np.random.randint(255 , size=(3, 30, 400) , dtype=np.uinta ) UpperCamelCase :List[Any] = Image.fromarray(np.moveaxis(SCREAMING_SNAKE_CASE_ , 0 , -1 ) ) return image_input def UpperCAmelCase ( self ) -> str: UpperCamelCase :str = self.get_tokenizer() UpperCamelCase :Union[str, Any] = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Dict = MgpstrProcessor.from_pretrained(self.tmpdirname , use_fast=SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[int] = self.get_tokenizer() UpperCamelCase :Dict = self.get_image_processor() UpperCamelCase :List[Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) processor.save_pretrained(self.tmpdirname ) UpperCamelCase :Optional[int] = self.get_tokenizer(bos_token='''(BOS)''' , eos_token='''(EOS)''' ) UpperCamelCase :Optional[Any] = self.get_image_processor(do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) UpperCamelCase :int = MgpstrProcessor.from_pretrained( self.tmpdirname , bos_token='''(BOS)''' , eos_token='''(EOS)''' , do_normalize=SCREAMING_SNAKE_CASE_ , padding_value=1.0 ) self.assertEqual(processor.char_tokenizer.get_vocab() , tokenizer_add_kwargs.get_vocab() ) self.assertIsInstance(processor.char_tokenizer , SCREAMING_SNAKE_CASE_ ) self.assertEqual(processor.image_processor.to_json_string() , image_processor_add_kwargs.to_json_string() ) self.assertIsInstance(processor.image_processor , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.get_image_processor() UpperCamelCase :List[str] = self.get_tokenizer() UpperCamelCase :str = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = self.prepare_image_inputs() UpperCamelCase :List[str] = image_processor(SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) UpperCamelCase :Optional[Any] = processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''np''' ) for key in input_image_proc.keys(): self.assertAlmostEqual(input_image_proc[key].sum() , input_processor[key].sum() , delta=1e-2 ) def UpperCAmelCase ( self ) -> Any: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Union[str, Any] = self.get_tokenizer() UpperCamelCase :int = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = '''test''' UpperCamelCase :Optional[int] = processor(text=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = tokenizer(SCREAMING_SNAKE_CASE_ ) for key in encoded_tok.keys(): self.assertListEqual(encoded_tok[key] , encoded_processor[key] ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :List[str] = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = '''test''' UpperCamelCase :str = self.prepare_image_inputs() UpperCamelCase :Dict = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , ['''pixel_values''', '''labels'''] ) # test if it raises when no input is passed with pytest.raises(SCREAMING_SNAKE_CASE_ ): processor() def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Optional[Any] = self.get_image_processor() UpperCamelCase :Any = self.get_tokenizer() UpperCamelCase :Union[str, Any] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = [[1, 4, 5, 8, 1, 0, 8], [3, 4, 3, 1, 1, 8, 9], [3, 4, 3, 1, 1, 8, 9]] UpperCamelCase :Union[str, Any] = processor.char_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = tokenizer.batch_decode(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = [seq.replace(''' ''' , '''''' ) for seq in decoded_tok] self.assertListEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :List[Any] = self.get_image_processor() UpperCamelCase :Optional[Any] = self.get_tokenizer() UpperCamelCase :Any = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = None UpperCamelCase :List[Any] = self.prepare_image_inputs() UpperCamelCase :Union[str, Any] = processor(text=SCREAMING_SNAKE_CASE_ , images=SCREAMING_SNAKE_CASE_ ) self.assertListEqual(list(inputs.keys() ) , processor.model_input_names ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = self.get_image_processor() UpperCamelCase :Tuple = self.get_tokenizer() UpperCamelCase :Optional[int] = MgpstrProcessor(tokenizer=SCREAMING_SNAKE_CASE_ , image_processor=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.randn(1 , 27 , 38 ) UpperCamelCase :Union[str, Any] = torch.randn(1 , 27 , 5_0257 ) UpperCamelCase :Optional[Any] = torch.randn(1 , 27 , 3_0522 ) UpperCamelCase :Optional[Any] = processor.batch_decode([char_input, bpe_input, wp_input] ) self.assertListEqual(list(results.keys() ) , ['''generated_text''', '''scores''', '''char_preds''', '''bpe_preds''', '''wp_preds'''] )
259
0
import numpy as np import torch from imwatermark import WatermarkEncoder # Copied from https://github.com/Stability-AI/generative-models/blob/613af104c6b85184091d42d374fef420eddb356d/scripts/demo/streamlit_helpers.py#L66 UpperCAmelCase__ = 0b1_0_1_1_0_0_1_1_1_1_1_0_1_1_0_0_1_0_0_1_0_0_0_0_0_1_1_1_1_0_1_1_1_0_1_1_0_0_0_1_1_0_0_1_1_1_1_0 # bin(x)[2:] gives bits of x as str, use int to convert them to 0/1 UpperCAmelCase__ = [int(bit) for bit in bin(WATERMARK_MESSAGE)[2:]] class lowerCamelCase__ : def __init__(self ) -> int: _lowercase =WATERMARK_BITS _lowercase =WatermarkEncoder() self.encoder.set_watermark('''bits''' , self.watermark ) def __A (self , UpperCAmelCase ) -> Dict: # can't encode images that are smaller than 256 if images.shape[-1] < 2_5_6: return images _lowercase =(2_5_5 * (images / 2 + 0.5)).cpu().permute(0 , 2 , 3 , 1 ).float().numpy() _lowercase =[self.encoder.encode(UpperCAmelCase , '''dwtDct''' ) for image in images] _lowercase =torch.from_numpy(np.array(UpperCAmelCase ) ).permute(0 , 3 , 1 , 2 ) _lowercase =torch.clamp(2 * (images / 2_5_5 - 0.5) , min=-1.0 , max=1.0 ) return images
5
import math def _A ( SCREAMING_SNAKE_CASE__ : int = 100 ): UpperCamelCase :Dict = sum(i * i for i in range(1 , n + 1 ) ) UpperCamelCase :List[str] = int(math.pow(sum(range(1 , n + 1 ) ) , 2 ) ) return square_of_sum - sum_of_squares if __name__ == "__main__": print(f'''{solution() = }''')
259
0
from __future__ import annotations def __lowerCAmelCase ( a__ , a__ ) -> list[int]: __a = 0 __a = len(a__ ) - 1 while i < j: if nums[i] + nums[j] == target: return [i, j] elif nums[i] + nums[j] < target: __a = i + 1 else: __a = j - 1 return [] if __name__ == "__main__": import doctest doctest.testmod() print(F"{two_pointer([2, 7, 1_1, 1_5], 9) = }")
6
def _A ( SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : str ): UpperCamelCase :Any = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = [[False for _ in range(m + 1 )] for _ in range(n + 1 )] UpperCamelCase :List[str] = True for i in range(SCREAMING_SNAKE_CASE__ ): for j in range(m + 1 ): if dp[i][j]: if j < m and a[i].upper() == b[j]: UpperCamelCase :List[Any] = True if a[i].islower(): UpperCamelCase :List[Any] = True return dp[n][m] if __name__ == "__main__": import doctest doctest.testmod()
259
0
from typing import List from .keymap import KEYMAP, get_character def _snake_case( SCREAMING_SNAKE_CASE__ : str ) -> Optional[int]: '''simple docstring''' def decorator(SCREAMING_SNAKE_CASE__ : Any ): A__ = getattr(SCREAMING_SNAKE_CASE__ , 'handle_key' , [] ) handle += [key] setattr(SCREAMING_SNAKE_CASE__ , 'handle_key' , SCREAMING_SNAKE_CASE__ ) return func return decorator def _snake_case( *SCREAMING_SNAKE_CASE__ : List[str] ) -> Union[str, Any]: '''simple docstring''' def decorator(SCREAMING_SNAKE_CASE__ : List[str] ): A__ = getattr(SCREAMING_SNAKE_CASE__ , 'handle_key' , [] ) handle += keys setattr(SCREAMING_SNAKE_CASE__ , 'handle_key' , SCREAMING_SNAKE_CASE__ ) return func return decorator class A ( _UpperCAmelCase ): """simple docstring""" def __new__( cls : List[Any],lowercase_ : List[str],lowercase_ : Dict,lowercase_ : Optional[Any] )-> Tuple: '''simple docstring''' A__ = super().__new__(cls,lowercase_,lowercase_,lowercase_ ) if not hasattr(lowercase_,'key_handler' ): setattr(lowercase_,'key_handler',{} ) setattr(lowercase_,'handle_input',KeyHandler.handle_input ) for value in attrs.values(): A__ = getattr(lowercase_,'handle_key',[] ) for key in handled_keys: A__ = value return new_cls @staticmethod def snake_case__ ( cls : str )-> Optional[Any]: '''simple docstring''' A__ = get_character() if char != KEYMAP["undefined"]: A__ = ord(lowercase_ ) A__ = cls.key_handler.get(lowercase_ ) if handler: A__ = char return handler(cls ) else: return None def _snake_case( cls : Optional[Any] ) -> Dict: '''simple docstring''' return KeyHandler(cls.__name__ , cls.__bases__ , cls.__dict__.copy() )
7
from math import factorial __snake_case = {str(digit): factorial(digit) for digit in range(10)} def _A ( SCREAMING_SNAKE_CASE__ : int ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''Parameter number must be int''' ) if number < 0: raise ValueError('''Parameter number must be greater than or equal to 0''' ) # Converts number in string to iterate on its digits and adds its factorial. return sum(DIGIT_FACTORIAL[digit] for digit in str(SCREAMING_SNAKE_CASE__ ) ) def _A ( SCREAMING_SNAKE_CASE__ : int = 60 , SCREAMING_SNAKE_CASE__ : int = 1000000 ): if not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) or not isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TypeError('''Parameters chain_length and number_limit must be int''' ) if chain_length <= 0 or number_limit <= 0: raise ValueError( '''Parameters chain_length and number_limit must be greater than 0''' ) # the counter for the chains with the exact desired length UpperCamelCase :Any = 0 # the cached sizes of the previous chains UpperCamelCase :dict[int, int] = {} for start_chain_element in range(1 , SCREAMING_SNAKE_CASE__ ): # The temporary set will contain the elements of the chain UpperCamelCase :List[Any] = set() UpperCamelCase :Any = 0 # Stop computing the chain when you find a cached size, a repeating item or the # length is greater then the desired one. UpperCamelCase :Optional[Any] = start_chain_element while ( chain_element not in chain_sets_lengths and chain_element not in chain_set and chain_set_length <= chain_length ): chain_set.add(SCREAMING_SNAKE_CASE__ ) chain_set_length += 1 UpperCamelCase :List[Any] = digit_factorial_sum(SCREAMING_SNAKE_CASE__ ) if chain_element in chain_sets_lengths: chain_set_length += chain_sets_lengths[chain_element] UpperCamelCase :Any = chain_set_length # If chain contains the exact amount of elements increase the counter if chain_set_length == chain_length: chains_counter += 1 return chains_counter if __name__ == "__main__": import doctest doctest.testmod() print(f'''{solution()}''')
259
0
from typing import TYPE_CHECKING from ..utils import _LazyModule lowerCAmelCase_ = { '''config''': [ '''EXTERNAL_DATA_FORMAT_SIZE_LIMIT''', '''OnnxConfig''', '''OnnxConfigWithPast''', '''OnnxSeq2SeqConfigWithPast''', '''PatchingSpec''', ], '''convert''': ['''export''', '''validate_model_outputs'''], '''features''': ['''FeaturesManager'''], '''utils''': ['''ParameterFormat''', '''compute_serialized_parameters_size'''], } if TYPE_CHECKING: from .config import ( EXTERNAL_DATA_FORMAT_SIZE_LIMIT, OnnxConfig, OnnxConfigWithPast, OnnxSeqaSeqConfigWithPast, PatchingSpec, ) from .convert import export, validate_model_outputs from .features import FeaturesManager from .utils import ParameterFormat, compute_serialized_parameters_size else: import sys lowerCAmelCase_ = _LazyModule(__name__, globals()['''__file__'''], _import_structure, module_spec=__spec__)
8
import unittest import numpy as np import torch from diffusers import DDIMPipeline, DDIMScheduler, UNetaDModel from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu, slow, torch_device from ..pipeline_params import UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS, UNCONDITIONAL_IMAGE_GENERATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin enable_full_determinism() class UpperCAmelCase_ ( lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : int =DDIMPipeline UpperCamelCase_ : str =UNCONDITIONAL_IMAGE_GENERATION_PARAMS UpperCamelCase_ : str =PipelineTesterMixin.required_optional_params - { 'num_images_per_prompt', 'latents', 'callback', 'callback_steps', } UpperCamelCase_ : Optional[Any] =UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS UpperCamelCase_ : List[str] =False def UpperCAmelCase ( self ) -> Any: torch.manual_seed(0 ) UpperCamelCase :Optional[int] = UNetaDModel( block_out_channels=(32, 64) , layers_per_block=2 , sample_size=32 , in_channels=3 , out_channels=3 , down_block_types=('''DownBlock2D''', '''AttnDownBlock2D''') , up_block_types=('''AttnUpBlock2D''', '''UpBlock2D''') , ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Any = {'''unet''': unet, '''scheduler''': scheduler} return components def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=0 ) -> Any: if str(SCREAMING_SNAKE_CASE_ ).startswith('''mps''' ): UpperCamelCase :List[Any] = torch.manual_seed(SCREAMING_SNAKE_CASE_ ) else: UpperCamelCase :List[Any] = torch.Generator(device=SCREAMING_SNAKE_CASE_ ).manual_seed(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = { '''batch_size''': 1, '''generator''': generator, '''num_inference_steps''': 2, '''output_type''': '''numpy''', } return inputs def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Optional[int] = '''cpu''' UpperCamelCase :Union[str, Any] = self.get_dummy_components() UpperCamelCase :Optional[Any] = self.pipeline_class(**SCREAMING_SNAKE_CASE_ ) pipe.to(SCREAMING_SNAKE_CASE_ ) pipe.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = self.get_dummy_inputs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = pipe(**SCREAMING_SNAKE_CASE_ ).images UpperCamelCase :str = image[0, -3:, -3:, -1] self.assertEqual(image.shape , (1, 32, 32, 3) ) UpperCamelCase :Tuple = np.array( [1.000e00, 5.717e-01, 4.717e-01, 1.000e00, 0.000e00, 1.000e00, 3.000e-04, 0.000e00, 9.000e-04] ) UpperCamelCase :List[str] = np.abs(image_slice.flatten() - expected_slice ).max() self.assertLessEqual(SCREAMING_SNAKE_CASE_ , 1e-3 ) def UpperCAmelCase ( self ) -> int: super().test_dict_tuple_outputs_equivalent(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Optional[int]: super().test_save_load_local(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> Any: super().test_save_load_optional_components(expected_max_difference=3e-3 ) def UpperCAmelCase ( self ) -> str: super().test_inference_batch_single_identical(expected_max_diff=3e-3 ) @slow @require_torch_gpu class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :int = '''google/ddpm-cifar10-32''' UpperCamelCase :Union[str, Any] = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = DDIMScheduler() UpperCamelCase :Tuple = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddim.to(SCREAMING_SNAKE_CASE_ ) ddim.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddim(generator=SCREAMING_SNAKE_CASE_ , eta=0.0 , output_type='''numpy''' ).images UpperCamelCase :int = image[0, -3:, -3:, -1] assert image.shape == (1, 32, 32, 3) UpperCamelCase :Tuple = np.array([0.1723, 0.1617, 0.1600, 0.1626, 0.1497, 0.1513, 0.1505, 0.1442, 0.1453] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2 def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = '''google/ddpm-ema-bedroom-256''' UpperCamelCase :Any = UNetaDModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = DDIMScheduler.from_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = DDIMPipeline(unet=SCREAMING_SNAKE_CASE_ , scheduler=SCREAMING_SNAKE_CASE_ ) ddpm.to(SCREAMING_SNAKE_CASE_ ) ddpm.set_progress_bar_config(disable=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = torch.manual_seed(0 ) UpperCamelCase :Optional[int] = ddpm(generator=SCREAMING_SNAKE_CASE_ , output_type='''numpy''' ).images UpperCamelCase :Optional[int] = image[0, -3:, -3:, -1] assert image.shape == (1, 256, 256, 3) UpperCamelCase :Dict = np.array([0.0060, 0.0201, 0.0344, 0.0024, 0.0018, 0.0002, 0.0022, 0.0000, 0.0069] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2
259
0
from __future__ import annotations from collections.abc import Iterable, Iterator from dataclasses import dataclass __lowerCAmelCase : Dict =(3, 9, -1_1, 0, 7, 5, 1, -1) __lowerCAmelCase : int =(4, 6, 2, 0, 8, 1_0, 3, -2) @dataclass class _lowercase : '''simple docstring''' SCREAMING_SNAKE_CASE__ : int SCREAMING_SNAKE_CASE__ : Node | None class _lowercase : '''simple docstring''' def __init__( self :str , lowerCAmelCase__ :Iterable[int] ) -> None: __SCREAMING_SNAKE_CASE : Node | None = None for i in sorted(lowerCAmelCase__ , reverse=lowerCAmelCase__ ): __SCREAMING_SNAKE_CASE : int = Node(lowerCAmelCase__ , self.head ) def __iter__( self :Optional[Any] ) -> Iterator[int]: __SCREAMING_SNAKE_CASE : Tuple = self.head while node: yield node.data __SCREAMING_SNAKE_CASE : int = node.next_node def __len__( self :Optional[int] ) -> int: return sum(1 for _ in self ) def __str__( self :List[Any] ) -> str: return " -> ".join([str(lowerCAmelCase__ ) for node in self] ) def _UpperCamelCase ( lowercase__ , lowercase__ ): return SortedLinkedList(list(lowercase__ ) + list(lowercase__ ) ) if __name__ == "__main__": import doctest doctest.testmod() __lowerCAmelCase : Union[str, Any] =SortedLinkedList print(merge_lists(SSL(test_data_odd), SSL(test_data_even)))
9
from json import JSONDecodeError # Workaround for requests.exceptions.JSONDecodeError import requests def _A ( SCREAMING_SNAKE_CASE__ : str = "isbn/0140328726" ): UpperCamelCase :Optional[int] = olid.strip().strip('''/''' ) # Remove leading/trailing whitespace & slashes if new_olid.count('''/''' ) != 1: UpperCamelCase :str = F'''{olid} is not a valid Open Library olid''' raise ValueError(SCREAMING_SNAKE_CASE__ ) return requests.get(F'''https://openlibrary.org/{new_olid}.json''' ).json() def _A ( SCREAMING_SNAKE_CASE__ : dict ): UpperCamelCase :str = { '''title''': '''Title''', '''publish_date''': '''Publish date''', '''authors''': '''Authors''', '''number_of_pages''': '''Number of pages:''', '''first_sentence''': '''First sentence''', '''isbn_10''': '''ISBN (10)''', '''isbn_13''': '''ISBN (13)''', } UpperCamelCase :Optional[Any] = {better_key: ol_book_data[key] for key, better_key in desired_keys.items()} UpperCamelCase :List[str] = [ get_openlibrary_data(author['''key'''] )['''name'''] for author in data['''Authors'''] ] UpperCamelCase :int = data['''First sentence''']['''value'''] for key, value in data.items(): if isinstance(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :List[str] = ''', '''.join(SCREAMING_SNAKE_CASE__ ) return data if __name__ == "__main__": import doctest doctest.testmod() while True: __snake_case = input("""\nEnter the ISBN code to search (or 'quit' to stop): """).strip() if isbn.lower() in ("", "q", "quit", "exit", "stop"): break if len(isbn) not in (10, 13) or not isbn.isdigit(): print(f'''Sorry, {isbn} is not a valid ISBN. Please, input a valid ISBN.''') continue print(f'''\nSearching Open Library for ISBN: {isbn}...\n''') try: __snake_case = summarize_book(get_openlibrary_data(f'''isbn/{isbn}''')) print("""\n""".join(f'''{key}: {value}''' for key, value in book_summary.items())) except JSONDecodeError: # Workaround for requests.exceptions.RequestException: print(f'''Sorry, there are no results for ISBN: {isbn}.''')
259
0
from arguments import InitializationArguments from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer, HfArgumentParser # Configuration __A = HfArgumentParser(InitializationArguments) __A = parser.parse_args() # Load codeparrot tokenizer trained for Python code tokenization __A = AutoTokenizer.from_pretrained(args.tokenizer_name) # Config: "scale_attn_by_layer_idx" and "reorder_and_upcast_attn" are Mistral stability tweaks __A = { "vocab_size": len(tokenizer), "scale_attn_by_inverse_layer_idx": True, "reorder_and_upcast_attn": True, } # Load model config (GPT-2 large in this case) __A = AutoConfig.from_pretrained(args.config_name, **config_kwargs) # Initialize new model with config __A = AutoModelForCausalLM.from_config(config) # Save model to the hub model.save_pretrained(args.model_name, push_to_hub=args.push_to_hub)
10
import inspect import tempfile import unittest from huggingface_hub import hf_hub_download from transformers import is_torch_available from transformers.testing_utils import is_flaky, require_torch, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin __snake_case = 1E-4 if is_torch_available(): import torch from transformers import AutoformerConfig, AutoformerForPrediction, AutoformerModel from transformers.models.autoformer.modeling_autoformer import AutoformerDecoder, AutoformerEncoder @require_torch class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=14 , SCREAMING_SNAKE_CASE_=10 , SCREAMING_SNAKE_CASE_=19 , SCREAMING_SNAKE_CASE_=5 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=[1, 2, 3, 4, 5] , SCREAMING_SNAKE_CASE_=25 , SCREAMING_SNAKE_CASE_=5 , ) -> str: UpperCamelCase :Any = d_model UpperCamelCase :List[str] = parent UpperCamelCase :List[Any] = batch_size UpperCamelCase :str = prediction_length UpperCamelCase :str = context_length UpperCamelCase :int = cardinality UpperCamelCase :Optional[Any] = num_time_features UpperCamelCase :Optional[Any] = lags_sequence UpperCamelCase :str = embedding_dimension UpperCamelCase :str = is_training UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :Tuple = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :List[Any] = attention_probs_dropout_prob UpperCamelCase :Optional[int] = context_length UpperCamelCase :Tuple = prediction_length + label_length UpperCamelCase :Optional[Any] = label_length UpperCamelCase :Optional[int] = moving_average UpperCamelCase :Union[str, Any] = autocorrelation_factor def UpperCAmelCase ( self ) -> Optional[int]: return AutoformerConfig( d_model=self.d_model , encoder_layers=self.num_hidden_layers , decoder_layers=self.num_hidden_layers , encoder_attention_heads=self.num_attention_heads , decoder_attention_heads=self.num_attention_heads , encoder_ffn_dim=self.intermediate_size , decoder_ffn_dim=self.intermediate_size , dropout=self.hidden_dropout_prob , attention_dropout=self.attention_probs_dropout_prob , prediction_length=self.prediction_length , context_length=self.context_length , label_length=self.label_length , lags_sequence=self.lags_sequence , num_time_features=self.num_time_features , num_static_categorical_features=1 , cardinality=[self.cardinality] , embedding_dimension=[self.embedding_dimension] , moving_average=self.moving_average , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :Optional[Any] = config.context_length + max(config.lags_sequence ) UpperCamelCase :Union[str, Any] = ids_tensor([self.batch_size, 1] , config.cardinality[0] ) UpperCamelCase :List[str] = floats_tensor([self.batch_size, _past_length, config.num_time_features] ) UpperCamelCase :Union[str, Any] = floats_tensor([self.batch_size, _past_length] ) UpperCamelCase :Any = floats_tensor([self.batch_size, _past_length] ) > 0.5 # decoder inputs UpperCamelCase :Tuple = floats_tensor([self.batch_size, config.prediction_length, config.num_time_features] ) UpperCamelCase :int = floats_tensor([self.batch_size, config.prediction_length] ) UpperCamelCase :Union[str, Any] = { '''past_values''': past_values, '''static_categorical_features''': static_categorical_features, '''past_time_features''': past_time_features, '''past_observed_mask''': past_observed_mask, '''future_time_features''': future_time_features, '''future_values''': future_values, } return inputs_dict def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.get_config() UpperCamelCase :Union[str, Any] = self.prepare_autoformer_inputs_dict(SCREAMING_SNAKE_CASE_ ) return config, inputs_dict def UpperCAmelCase ( self ) -> Any: UpperCamelCase , UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() return config, inputs_dict def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[Any]: UpperCamelCase :int = AutoformerModel(config=SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ).eval() UpperCamelCase :Any = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = outputs.encoder_last_hidden_state UpperCamelCase :str = outputs.last_hidden_state with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Any = model.get_encoder() encoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = AutoformerEncoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = model.create_network_inputs(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Tuple = model.decomposition_layer(transformer_inputs[:, : config.context_length, ...] ) UpperCamelCase :Tuple = torch.cat( (transformer_inputs[:, : config.context_length, ...], feature[:, : config.context_length, ...]) , dim=-1 , ) UpperCamelCase :Optional[Any] = encoder(inputs_embeds=SCREAMING_SNAKE_CASE_ )[0] self.parent.assertTrue((encoder_last_hidden_state_a - encoder_last_hidden_state).abs().max().item() < 1e-3 ) UpperCamelCase :Optional[Any] = ( torch.mean(transformer_inputs[:, : config.context_length, ...] , dim=1 ) .unsqueeze(1 ) .repeat(1 , config.prediction_length , 1 ) ) UpperCamelCase :Union[str, Any] = torch.zeros( [transformer_inputs.shape[0], config.prediction_length, transformer_inputs.shape[2]] , device=enc_input.device , ) UpperCamelCase :Tuple = torch.cat( ( torch.cat((seasonal_input[:, -config.label_length :, ...], zeros) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) UpperCamelCase :Optional[Any] = torch.cat( ( torch.cat((trend_input[:, -config.label_length :, ...], mean) , dim=1 ), feature[:, config.context_length - config.label_length :, ...], ) , dim=-1 , ) with tempfile.TemporaryDirectory() as tmpdirname: UpperCamelCase :Union[str, Any] = model.get_decoder() decoder.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = AutoformerDecoder.from_pretrained(SCREAMING_SNAKE_CASE_ ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = decoder( trend=SCREAMING_SNAKE_CASE_ , inputs_embeds=SCREAMING_SNAKE_CASE_ , encoder_hidden_states=SCREAMING_SNAKE_CASE_ , )[0] self.parent.assertTrue((last_hidden_state_a - last_hidden_state).abs().max().item() < 1e-3 ) @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[str] =(AutoformerModel, AutoformerForPrediction) if is_torch_available() else () UpperCamelCase_ : List[str] =(AutoformerForPrediction,) if is_torch_available() else () UpperCamelCase_ : Optional[Any] ={'feature-extraction': AutoformerModel} if is_torch_available() else {} UpperCamelCase_ : Any =False UpperCamelCase_ : List[str] =False UpperCamelCase_ : Dict =False UpperCamelCase_ : Dict =False UpperCamelCase_ : int =False UpperCamelCase_ : Optional[int] =False def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = AutoformerModelTester(self ) UpperCamelCase :int = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase , UpperCamelCase :str = self.model_tester.prepare_config_and_inputs() for model_class in self.all_model_classes: UpperCamelCase :Optional[int] = model_class(SCREAMING_SNAKE_CASE_ ) with tempfile.TemporaryDirectory() as tmpdirname: model.save_pretrained(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :List[str] = model_class.from_pretrained(SCREAMING_SNAKE_CASE_ , output_loading_info=SCREAMING_SNAKE_CASE_ ) self.assertEqual(info['''missing_keys'''] , [] ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_encoder_decoder_model_standalone(*SCREAMING_SNAKE_CASE_ ) @unittest.skip(reason='''Model has no tokens embeddings''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :str = inspect.signature(getattr(SCREAMING_SNAKE_CASE_ , '''forward''' ) ) # The main input is the name of the argument after `self` UpperCamelCase :List[str] = list(model_signature.parameters.keys() )[1] self.assertEqual(AutoformerModel.main_input_name , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase , UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Optional[Any] = [ '''past_values''', '''past_time_features''', '''past_observed_mask''', '''static_categorical_features''', '''static_real_features''', '''future_values''', '''future_time_features''', ] if model.__class__.__name__ in ["AutoformerForPrediction"]: expected_arg_names.append('''future_observed_mask''' ) expected_arg_names.extend( [ '''decoder_attention_mask''', '''head_mask''', '''decoder_head_mask''', '''cross_attn_head_mask''', '''encoder_outputs''', '''past_key_values''', '''output_hidden_states''', '''output_attentions''', '''use_cache''', '''return_dict''', ] ) self.assertListEqual(arg_names[: len(SCREAMING_SNAKE_CASE_ )] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :List[Any] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = True UpperCamelCase :Dict = getattr(self.model_tester , '''seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = getattr(self.model_tester , '''decoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = getattr(self.model_tester , '''encoder_seq_length''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = getattr(self.model_tester , '''d_model''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = getattr(self.model_tester , '''num_attention_heads''' , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = d_model // num_attention_heads for model_class in self.all_model_classes: UpperCamelCase :Tuple = True UpperCamelCase :Tuple = False UpperCamelCase :Any = True UpperCamelCase :List[Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :int = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) # check that output_attentions also work using config del inputs_dict["output_attentions"] UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) UpperCamelCase :List[str] = len(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = 7 if "last_hidden_state" in outputs: correct_outlen += 1 if "trend" in outputs: correct_outlen += 1 if "past_key_values" in outputs: correct_outlen += 1 # past_key_values have been returned if "loss" in outputs: correct_outlen += 1 if "params" in outputs: correct_outlen += 1 self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # decoder attentions UpperCamelCase :Union[str, Any] = outputs.decoder_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(decoder_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # cross attentions UpperCamelCase :Union[str, Any] = outputs.cross_attentions self.assertIsInstance(SCREAMING_SNAKE_CASE_ , (list, tuple) ) self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(cross_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, decoder_seq_length, dim] , ) # Check attention is always last and order is fine UpperCamelCase :Any = True UpperCamelCase :int = True UpperCamelCase :Any = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() with torch.no_grad(): UpperCamelCase :Optional[Any] = model(**self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ) self.assertEqual(out_len + 2 , len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :List[str] = outputs.encoder_attentions if config.is_encoder_decoder else outputs.attentions self.assertEqual(len(SCREAMING_SNAKE_CASE_ ) , self.model_tester.num_hidden_layers ) self.assertListEqual( list(self_attentions[0].shape[-3:] ) , [self.model_tester.num_attention_heads, encoder_seq_length, dim] , ) @is_flaky() def UpperCAmelCase ( self ) -> List[Any]: super().test_retain_grad_hidden_states_attentions() def _A ( SCREAMING_SNAKE_CASE__ : int="train-batch.pt" ): UpperCamelCase :Union[str, Any] = hf_hub_download(repo_id='''hf-internal-testing/tourism-monthly-batch''' , filename=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) UpperCamelCase :Tuple = torch.load(SCREAMING_SNAKE_CASE__ , map_location=SCREAMING_SNAKE_CASE__ ) return batch @require_torch @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :int = AutoformerModel.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = prepare_batch() with torch.no_grad(): UpperCamelCase :Optional[Any] = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , future_values=batch['''future_values'''] , future_time_features=batch['''future_time_features'''] , )[0] UpperCamelCase :Union[str, Any] = torch.Size( (64, model.config.prediction_length + model.config.label_length, model.config.feature_size) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Tuple = torch.tensor( [[0.3593, -1.3398, 0.6330], [0.2279, 1.5396, -0.1792], [0.0450, 1.3225, -0.2335]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Dict = model( past_values=batch['''past_values'''] , past_time_features=batch['''past_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , static_categorical_features=batch['''static_categorical_features'''] , ).encoder_last_hidden_state UpperCamelCase :Union[str, Any] = torch.Size((64, model.config.context_length, model.config.d_model) ) self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = torch.tensor( [[-0.0734, -0.9036, 0.8358], [4.7186, 2.4113, 1.9581], [1.7953, 2.3558, 1.2970]] , device=SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(output[0, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=SCREAMING_SNAKE_CASE_ ) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = AutoformerForPrediction.from_pretrained('''huggingface/autoformer-tourism-monthly''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = prepare_batch('''val-batch.pt''' ) with torch.no_grad(): UpperCamelCase :Tuple = model.generate( static_categorical_features=batch['''static_categorical_features'''] , past_time_features=batch['''past_time_features'''] , past_values=batch['''past_values'''] , future_time_features=batch['''future_time_features'''] , past_observed_mask=batch['''past_observed_mask'''] , ) UpperCamelCase :Optional[int] = torch.Size((64, model.config.num_parallel_samples, model.config.prediction_length) ) self.assertEqual(outputs.sequences.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor([3130.6763, 4056.5293, 7053.0786] , device=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = outputs.sequences.mean(dim=1 ) self.assertTrue(torch.allclose(mean_prediction[0, -3:] , SCREAMING_SNAKE_CASE_ , rtol=1e-1 ) )
259
0
def _UpperCAmelCase (UpperCamelCase__ : int = 2000000 ): _A : Tuple = [0 for i in range(n + 1 )] _A : Union[str, Any] = 1 _A : Optional[Any] = 1 for i in range(2 , int(n**0.5 ) + 1 ): if primality_list[i] == 0: for j in range(i * i , n + 1 , UpperCamelCase__ ): _A : Union[str, Any] = 1 _A : Union[str, Any] = 0 for i in range(UpperCamelCase__ ): if primality_list[i] == 0: sum_of_primes += i return sum_of_primes if __name__ == "__main__": print(f"{solution() = }")
11
import inspect import logging import os import random import shutil import tempfile import unittest import pytest import torch from torch import nn from torch.utils.data import DataLoader, TensorDataset from accelerate import Accelerator from accelerate.test_utils import execute_subprocess_async, require_cuda from accelerate.utils import ProjectConfiguration, set_seed __snake_case = logging.getLogger(__name__) def _A ( SCREAMING_SNAKE_CASE__ : Dict=2 , SCREAMING_SNAKE_CASE__ : Dict=3 , SCREAMING_SNAKE_CASE__ : Any=16 , SCREAMING_SNAKE_CASE__ : int = 10 , SCREAMING_SNAKE_CASE__ : int = 2 ): def get_dataset(SCREAMING_SNAKE_CASE__ : List[Any] ): UpperCamelCase :Union[str, Any] = torch.randn(batch_size * n_batches , 1 ) return TensorDataset(SCREAMING_SNAKE_CASE__ , a * x + b + 0.1 * torch.randn(batch_size * n_batches , 1 ) ) UpperCamelCase :str = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = get_dataset(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) UpperCamelCase :Any = DataLoader(SCREAMING_SNAKE_CASE__ , shuffle=SCREAMING_SNAKE_CASE__ , batch_size=SCREAMING_SNAKE_CASE__ , num_workers=4 ) return (train_dataloader, valid_dataloader) def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Any=None ): UpperCamelCase :Dict = [] for epoch in range(SCREAMING_SNAKE_CASE__ ): # Train quickly model.train() for batch in dataloader: UpperCamelCase , UpperCamelCase :Optional[Any] = batch UpperCamelCase :int = model(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = torch.nn.functional.mse_loss(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) accelerator.backward(SCREAMING_SNAKE_CASE__ ) optimizer.step() optimizer.zero_grad() rands.append(random.random() ) # Introduce some randomness if scheduler is not None: scheduler.step() return rands class UpperCAmelCase_ ( nn.Module ): """simple docstring""" def __init__( self ) -> str: super().__init__() UpperCamelCase :Optional[int] = nn.Parameter(torch.randn(1 ) ) UpperCamelCase :int = nn.Parameter(torch.randn(1 ) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> int: return x * self.a + self.b class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> Dict: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Tuple = ProjectConfiguration(total_limit=1 , project_dir=SCREAMING_SNAKE_CASE_ , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Dict = Accelerator(project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Union[str, Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() # Save second state accelerator.save_state() self.assertEqual(len(os.listdir(accelerator.project_dir ) ) , 1 ) def UpperCAmelCase ( self ) -> str: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[str] = DummyModel() UpperCamelCase :Union[str, Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Dict = dummy_dataloaders() # Train baseline UpperCamelCase :Dict = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :int = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial UpperCamelCase :int = os.path.join(SCREAMING_SNAKE_CASE_ , '''initial''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[int] = optimizer.state_dict() UpperCamelCase :Optional[int] = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Any = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :List[Any] = dummy_dataloaders() UpperCamelCase :List[str] = Accelerator() UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Tuple = model.a.item(), model.b.item() UpperCamelCase :Tuple = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything UpperCamelCase :Optional[int] = os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoint''' ) accelerator.save_state(SCREAMING_SNAKE_CASE_ ) # Load everything back in and make sure all states work accelerator.load_state(SCREAMING_SNAKE_CASE_ ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Union[str, Any] = model.a.item(), model.b.item() UpperCamelCase :Optional[Any] = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :Optional[int] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :int = dummy_dataloaders() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() ((UpperCamelCase) , (UpperCamelCase)) :List[str] = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() UpperCamelCase :Any = train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[int] = model.a.item(), model.b.item() UpperCamelCase :Any = optimizer.state_dict() # Train partially set_seed(42 ) UpperCamelCase :Union[str, Any] = DummyModel() UpperCamelCase :List[Any] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase , UpperCamelCase :Tuple = dummy_dataloaders() UpperCamelCase :Optional[Any] = ProjectConfiguration(iteration=1 , automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Union[str, Any] = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ((UpperCamelCase) , (UpperCamelCase)) :Dict = model.a.item(), model.b.item() UpperCamelCase :Dict = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = train(2 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save everything accelerator.save_state() # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_1''' ) ) test_rands += train(1 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) ((UpperCamelCase) , (UpperCamelCase)) :Optional[Any] = model.a.item(), model.b.item() UpperCamelCase :str = optimizer.state_dict() self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertEqual(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[Any] = torch.tensor([1, 2, 3] ) UpperCamelCase :Any = torch.tensor([2, 3, 4] ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :Optional[Any] = torch.optim.Adam(net.parameters() ) UpperCamelCase :Optional[Any] = Accelerator() with self.assertRaises(SCREAMING_SNAKE_CASE_ ) as ve: accelerator.register_for_checkpointing(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = str(ve.exception ) self.assertTrue('''Item at index 0''' in message ) self.assertTrue('''Item at index 1''' in message ) self.assertFalse('''Item at index 2''' in message ) self.assertFalse('''Item at index 3''' in message ) def UpperCAmelCase ( self ) -> Any: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :List[Any] = DummyModel() UpperCamelCase :List[str] = torch.optim.Adam(params=model.parameters() , lr=1e-3 ) UpperCamelCase :Any = torch.optim.lr_scheduler.StepLR(SCREAMING_SNAKE_CASE_ , step_size=1 , gamma=0.99 ) UpperCamelCase , UpperCamelCase :Any = dummy_dataloaders() UpperCamelCase :Optional[int] = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ ) # Train baseline UpperCamelCase :str = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Tuple = accelerator.prepare( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) # Save initial accelerator.save_state() UpperCamelCase :int = scheduler.state_dict() train(3 , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) self.assertNotEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) # Load everything back in and make sure all states work accelerator.load_state(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) self.assertEqual(SCREAMING_SNAKE_CASE_ , scheduler.state_dict() ) def UpperCAmelCase ( self ) -> Union[str, Any]: with tempfile.TemporaryDirectory() as tmpdir: set_seed(42 ) UpperCamelCase :Optional[Any] = DummyModel() UpperCamelCase :int = ProjectConfiguration(automatic_checkpoint_naming=SCREAMING_SNAKE_CASE_ , total_limit=2 ) # Train baseline UpperCamelCase :Tuple = Accelerator(project_dir=SCREAMING_SNAKE_CASE_ , project_config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = accelerator.prepare(SCREAMING_SNAKE_CASE_ ) # Save 3 states: for _ in range(11 ): accelerator.save_state() self.assertTrue(not os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_0''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_9''' ) ) ) self.assertTrue(os.path.exists(os.path.join(SCREAMING_SNAKE_CASE_ , '''checkpoints''' , '''checkpoint_10''' ) ) ) @require_cuda def UpperCAmelCase ( self ) -> int: UpperCamelCase :int = ['''torchrun''', F'''--nproc_per_node={torch.cuda.device_count()}''', inspect.getfile(self.__class__ )] execute_subprocess_async(SCREAMING_SNAKE_CASE_ , env=os.environ.copy() ) if __name__ == "__main__": __snake_case = """/tmp/accelerate/state_checkpointing""" __snake_case = DummyModel() __snake_case = torch.optim.Adam(params=model.parameters(), lr=1E-3) __snake_case = torch.optim.lr_scheduler.StepLR(optimizer, step_size=1, gamma=0.9_9) __snake_case , __snake_case = dummy_dataloaders() __snake_case = ProjectConfiguration(automatic_checkpoint_naming=True) # Train baseline __snake_case = Accelerator(project_dir=savedir, project_config=project_config, mixed_precision="""no""") if accelerator.process_index == 0: if os.path.exists(savedir): shutil.rmtree(savedir) os.makedirs(savedir) __snake_case , __snake_case , __snake_case , __snake_case , __snake_case = accelerator.prepare( model, optimizer, train_dataloader, valid_dataloader, scheduler ) __snake_case , __snake_case = accelerator.prepare(model, optimizer) train(3, model, train_dataloader, optimizer, accelerator, scheduler) # Check that the intial optimizer is loaded on the GPU for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert param_device.type == accelerator.device.type __snake_case = model.cpu() accelerator.wait_for_everyone() accelerator.save_state() accelerator.wait_for_everyone() # Check CPU state accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""cpu""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == torch.device("""cpu""").type ), f"Loaded optimizer states did not match, expected to be loaded on the CPU but got {param_device}" # Check device state model.to(accelerator.device) accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""on_device""") for group in optimizer.param_groups: __snake_case = group["""params"""][0].device break assert ( param_device.type == accelerator.device.type ), f"Loaded optimizer states did not match, expected to be loaded on {accelerator.device} but got {param_device}" # Check error with pytest.raises(TypeError, match="""Unsupported optimizer map location passed"""): accelerator.load_state(os.path.join(savedir, """checkpoints""", """checkpoint_0"""), map_location="""invalid""") accelerator.wait_for_everyone() if accelerator.process_index == 0: shutil.rmtree(savedir) accelerator.wait_for_everyone()
259
0
from __future__ import annotations from collections.abc import Iterator from typing import Generic, TypeVar UpperCAmelCase_ = TypeVar('T') class lowerCamelCase__( Generic[T]): def __init__( self: List[str] , UpperCamelCase_: T ): __lowerCamelCase = data __lowerCamelCase = None def __str__( self: Optional[Any] ): return F'{self.data}' class lowerCamelCase__( Generic[T]): def __init__( self: List[str] ): __lowerCamelCase = None def __iter__( self: Tuple ): __lowerCamelCase = self.top while node: yield node.data __lowerCamelCase = node.next def __str__( self: Any ): return "->".join([str(UpperCamelCase_ ) for item in self] ) def __len__( self: List[Any] ): return len(tuple(iter(self ) ) ) def lowerCAmelCase__ ( self: Union[str, Any] ): return self.top is None def lowerCAmelCase__ ( self: str , UpperCamelCase_: T ): __lowerCamelCase = Node(UpperCamelCase_ ) if not self.is_empty(): __lowerCamelCase = self.top __lowerCamelCase = node def lowerCAmelCase__ ( self: int ): if self.is_empty(): raise IndexError("""pop from empty stack""" ) assert isinstance(self.top , UpperCamelCase_ ) __lowerCamelCase = self.top __lowerCamelCase = self.top.next return pop_node.data def lowerCAmelCase__ ( self: str ): if self.is_empty(): raise IndexError("""peek from empty stack""" ) assert self.top is not None return self.top.data def lowerCAmelCase__ ( self: Union[str, Any] ): __lowerCamelCase = None if __name__ == "__main__": from doctest import testmod testmod()
12
import numpy as np __snake_case = [ ["""a""", """b""", """c""", """d""", """e"""], ["""f""", """g""", """h""", """i""", """k"""], ["""l""", """m""", """n""", """o""", """p"""], ["""q""", """r""", """s""", """t""", """u"""], ["""v""", """w""", """x""", """y""", """z"""], ] class UpperCAmelCase_ : """simple docstring""" def __init__( self ) -> None: UpperCamelCase :Dict = np.array(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> np.ndarray: UpperCamelCase , UpperCamelCase :Tuple = np.where(letter == self.SQUARE ) UpperCamelCase :List[Any] = np.concatenate([indexa + 1, indexa + 1] ) return indexes def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :int = self.SQUARE[indexa - 1, indexa - 1] return letter def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() UpperCamelCase :int = message.replace(''' ''' , '''''' ) UpperCamelCase :Dict = message.replace('''j''' , '''i''' ) UpperCamelCase :str = np.empty((2, len(SCREAMING_SNAKE_CASE_ )) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Union[str, Any] = numbers[0] UpperCamelCase :Dict = numbers[1] UpperCamelCase :Any = first_step.reshape(2 * len(SCREAMING_SNAKE_CASE_ ) ) UpperCamelCase :Union[str, Any] = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Dict = int(second_step[numbers_index * 2] ) UpperCamelCase :List[str] = int(second_step[(numbers_index * 2) + 1] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[Any] = encoded_message + letter return encoded_message def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ ) -> str: UpperCamelCase :Any = message.lower() message.replace(''' ''' , '''''' ) UpperCamelCase :Optional[int] = np.empty(2 * len(SCREAMING_SNAKE_CASE_ ) ) for letter_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :List[str] = self.letter_to_numbers(message[letter_index] ) UpperCamelCase :Dict = numbers[0] UpperCamelCase :List[str] = numbers[1] UpperCamelCase :int = first_step.reshape((2, len(SCREAMING_SNAKE_CASE_ )) ) UpperCamelCase :Any = '''''' for numbers_index in range(len(SCREAMING_SNAKE_CASE_ ) ): UpperCamelCase :Any = int(second_step[0, numbers_index] ) UpperCamelCase :List[Any] = int(second_step[1, numbers_index] ) UpperCamelCase :Tuple = self.numbers_to_letter(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = decoded_message + letter return decoded_message
259
0
import argparse import json from pathlib import Path import requests import timm import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import DeiTImageProcessor, ViTConfig, ViTForImageClassification, ViTImageProcessor, ViTModel from transformers.utils import logging logging.set_verbosity_info() lowerCAmelCase : List[Any] = logging.get_logger(__name__) def A_ ( _UpperCAmelCase , _UpperCAmelCase=False ): SCREAMING_SNAKE_CASE_: str = [] for i in range(config.num_hidden_layers ): # encoder layers: output projection, 2 feedforward neural networks and 2 layernorms rename_keys.append((f"blocks.{i}.norm1.weight", f"vit.encoder.layer.{i}.layernorm_before.weight") ) rename_keys.append((f"blocks.{i}.norm1.bias", f"vit.encoder.layer.{i}.layernorm_before.bias") ) rename_keys.append((f"blocks.{i}.attn.proj.weight", f"vit.encoder.layer.{i}.attention.output.dense.weight") ) rename_keys.append((f"blocks.{i}.attn.proj.bias", f"vit.encoder.layer.{i}.attention.output.dense.bias") ) rename_keys.append((f"blocks.{i}.norm2.weight", f"vit.encoder.layer.{i}.layernorm_after.weight") ) rename_keys.append((f"blocks.{i}.norm2.bias", f"vit.encoder.layer.{i}.layernorm_after.bias") ) rename_keys.append((f"blocks.{i}.mlp.fc1.weight", f"vit.encoder.layer.{i}.intermediate.dense.weight") ) rename_keys.append((f"blocks.{i}.mlp.fc1.bias", f"vit.encoder.layer.{i}.intermediate.dense.bias") ) rename_keys.append((f"blocks.{i}.mlp.fc2.weight", f"vit.encoder.layer.{i}.output.dense.weight") ) rename_keys.append((f"blocks.{i}.mlp.fc2.bias", f"vit.encoder.layer.{i}.output.dense.bias") ) # projection layer + position embeddings rename_keys.extend( [ ("cls_token", "vit.embeddings.cls_token"), ("patch_embed.proj.weight", "vit.embeddings.patch_embeddings.projection.weight"), ("patch_embed.proj.bias", "vit.embeddings.patch_embeddings.projection.bias"), ("pos_embed", "vit.embeddings.position_embeddings"), ] ) if base_model: # layernorm + pooler rename_keys.extend( [ ("norm.weight", "layernorm.weight"), ("norm.bias", "layernorm.bias"), ("pre_logits.fc.weight", "pooler.dense.weight"), ("pre_logits.fc.bias", "pooler.dense.bias"), ] ) # if just the base model, we should remove "vit" from all keys that start with "vit" SCREAMING_SNAKE_CASE_: Dict = [(pair[0], pair[1][4:]) if pair[1].startswith("vit" ) else pair for pair in rename_keys] else: # layernorm + classification head rename_keys.extend( [ ("norm.weight", "vit.layernorm.weight"), ("norm.bias", "vit.layernorm.bias"), ("head.weight", "classifier.weight"), ("head.bias", "classifier.bias"), ] ) return rename_keys def A_ ( _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase=False ): for i in range(config.num_hidden_layers ): if base_model: SCREAMING_SNAKE_CASE_: Dict = "" else: SCREAMING_SNAKE_CASE_: Any = "vit." # read in weights + bias of input projection layer (in timm, this is a single matrix + bias) SCREAMING_SNAKE_CASE_: str = state_dict.pop(f"blocks.{i}.attn.qkv.weight" ) SCREAMING_SNAKE_CASE_: Tuple = state_dict.pop(f"blocks.{i}.attn.qkv.bias" ) # next, add query, keys and values (in that order) to the state dict SCREAMING_SNAKE_CASE_: Dict = in_proj_weight[ : config.hidden_size, : ] SCREAMING_SNAKE_CASE_: List[str] = in_proj_bias[: config.hidden_size] SCREAMING_SNAKE_CASE_: Dict = in_proj_weight[ config.hidden_size : config.hidden_size * 2, : ] SCREAMING_SNAKE_CASE_: Dict = in_proj_bias[ config.hidden_size : config.hidden_size * 2 ] SCREAMING_SNAKE_CASE_: Tuple = in_proj_weight[ -config.hidden_size :, : ] SCREAMING_SNAKE_CASE_: List[Any] = in_proj_bias[-config.hidden_size :] def A_ ( _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: List[str] = ["head.weight", "head.bias"] for k in ignore_keys: state_dict.pop(_UpperCAmelCase , _UpperCAmelCase ) def A_ ( _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: List[str] = dct.pop(_UpperCAmelCase ) SCREAMING_SNAKE_CASE_: int = val def A_ ( ): SCREAMING_SNAKE_CASE_: Union[str, Any] = "http://images.cocodataset.org/val2017/000000039769.jpg" SCREAMING_SNAKE_CASE_: Dict = Image.open(requests.get(_UpperCAmelCase , stream=_UpperCAmelCase ).raw ) return im @torch.no_grad() def A_ ( _UpperCAmelCase , _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: Optional[Any] = ViTConfig() SCREAMING_SNAKE_CASE_: int = False # dataset (ImageNet-21k only or also fine-tuned on ImageNet 2012), patch_size and image_size if vit_name[-5:] == "in21k": SCREAMING_SNAKE_CASE_: str = True SCREAMING_SNAKE_CASE_: Tuple = int(vit_name[-12:-10] ) SCREAMING_SNAKE_CASE_: List[str] = int(vit_name[-9:-6] ) else: SCREAMING_SNAKE_CASE_: Optional[int] = 10_00 SCREAMING_SNAKE_CASE_: str = "huggingface/label-files" SCREAMING_SNAKE_CASE_: Optional[Any] = "imagenet-1k-id2label.json" SCREAMING_SNAKE_CASE_: Tuple = json.load(open(hf_hub_download(_UpperCAmelCase , _UpperCAmelCase , repo_type="dataset" ) , "r" ) ) SCREAMING_SNAKE_CASE_: Optional[Any] = {int(_UpperCAmelCase ): v for k, v in idalabel.items()} SCREAMING_SNAKE_CASE_: Any = idalabel SCREAMING_SNAKE_CASE_: Any = {v: k for k, v in idalabel.items()} SCREAMING_SNAKE_CASE_: Union[str, Any] = int(vit_name[-6:-4] ) SCREAMING_SNAKE_CASE_: Optional[int] = int(vit_name[-3:] ) # size of the architecture if "deit" in vit_name: if vit_name[9:].startswith("tiny" ): SCREAMING_SNAKE_CASE_: Any = 1_92 SCREAMING_SNAKE_CASE_: Any = 7_68 SCREAMING_SNAKE_CASE_: int = 12 SCREAMING_SNAKE_CASE_: List[Any] = 3 elif vit_name[9:].startswith("small" ): SCREAMING_SNAKE_CASE_: Union[str, Any] = 3_84 SCREAMING_SNAKE_CASE_: Any = 15_36 SCREAMING_SNAKE_CASE_: List[str] = 12 SCREAMING_SNAKE_CASE_: Union[str, Any] = 6 else: pass else: if vit_name[4:].startswith("small" ): SCREAMING_SNAKE_CASE_: Union[str, Any] = 7_68 SCREAMING_SNAKE_CASE_: List[Any] = 23_04 SCREAMING_SNAKE_CASE_: Union[str, Any] = 8 SCREAMING_SNAKE_CASE_: str = 8 elif vit_name[4:].startswith("base" ): pass elif vit_name[4:].startswith("large" ): SCREAMING_SNAKE_CASE_: Optional[int] = 10_24 SCREAMING_SNAKE_CASE_: List[str] = 40_96 SCREAMING_SNAKE_CASE_: List[Any] = 24 SCREAMING_SNAKE_CASE_: Optional[Any] = 16 elif vit_name[4:].startswith("huge" ): SCREAMING_SNAKE_CASE_: Optional[Any] = 12_80 SCREAMING_SNAKE_CASE_: List[Any] = 51_20 SCREAMING_SNAKE_CASE_: str = 32 SCREAMING_SNAKE_CASE_: List[str] = 16 # load original model from timm SCREAMING_SNAKE_CASE_: int = timm.create_model(_UpperCAmelCase , pretrained=_UpperCAmelCase ) timm_model.eval() # load state_dict of original model, remove and rename some keys SCREAMING_SNAKE_CASE_: Dict = timm_model.state_dict() if base_model: remove_classification_head_(_UpperCAmelCase ) SCREAMING_SNAKE_CASE_: Optional[Any] = create_rename_keys(_UpperCAmelCase , _UpperCAmelCase ) for src, dest in rename_keys: rename_key(_UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase ) read_in_q_k_v(_UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase ) # load HuggingFace model if vit_name[-5:] == "in21k": SCREAMING_SNAKE_CASE_: str = ViTModel(_UpperCAmelCase ).eval() else: SCREAMING_SNAKE_CASE_: Tuple = ViTForImageClassification(_UpperCAmelCase ).eval() model.load_state_dict(_UpperCAmelCase ) # Check outputs on an image, prepared by ViTImageProcessor/DeiTImageProcessor if "deit" in vit_name: SCREAMING_SNAKE_CASE_: int = DeiTImageProcessor(size=config.image_size ) else: SCREAMING_SNAKE_CASE_: Union[str, Any] = ViTImageProcessor(size=config.image_size ) SCREAMING_SNAKE_CASE_: List[Any] = image_processor(images=prepare_img() , return_tensors="pt" ) SCREAMING_SNAKE_CASE_: Dict = encoding["pixel_values"] SCREAMING_SNAKE_CASE_: int = model(_UpperCAmelCase ) if base_model: SCREAMING_SNAKE_CASE_: Tuple = timm_model.forward_features(_UpperCAmelCase ) assert timm_pooled_output.shape == outputs.pooler_output.shape assert torch.allclose(_UpperCAmelCase , outputs.pooler_output , atol=1e-3 ) else: SCREAMING_SNAKE_CASE_: Tuple = timm_model(_UpperCAmelCase ) assert timm_logits.shape == outputs.logits.shape assert torch.allclose(_UpperCAmelCase , outputs.logits , atol=1e-3 ) Path(_UpperCAmelCase ).mkdir(exist_ok=_UpperCAmelCase ) print(f"Saving model {vit_name} to {pytorch_dump_folder_path}" ) model.save_pretrained(_UpperCAmelCase ) print(f"Saving image processor to {pytorch_dump_folder_path}" ) image_processor.save_pretrained(_UpperCAmelCase ) if __name__ == "__main__": lowerCAmelCase : Optional[Any] = argparse.ArgumentParser() # Required parameters parser.add_argument( """--vit_name""", default="""vit_base_patch16_224""", type=str, help="""Name of the ViT timm model you'd like to convert.""", ) parser.add_argument( """--pytorch_dump_folder_path""", default=None, type=str, help="""Path to the output PyTorch model directory.""" ) lowerCAmelCase : Union[str, Any] = parser.parse_args() convert_vit_checkpoint(args.vit_name, args.pytorch_dump_folder_path)
13
import argparse import collections import numpy as np import torch from flax import traverse_util from tax import checkpoints from transformers import MTaConfig, UMTaEncoderModel, UMTaForConditionalGeneration from transformers.utils import logging logging.set_verbosity_info() def _A ( SCREAMING_SNAKE_CASE__ : List[Any] , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Tuple ): return params[F'''{prefix}/{prefix}/relpos_bias/rel_embedding'''][:, i, :] def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : Any="attention" ): UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/key/kernel'''][:, i, :, :] ) UpperCamelCase :Optional[Any] = k_tmp.reshape(k_tmp.shape[0] , k_tmp.shape[1] * k_tmp.shape[2] ) UpperCamelCase :Optional[int] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/out/kernel'''][:, i, :, :] ) UpperCamelCase :List[Any] = o_tmp.reshape(o_tmp.shape[0] * o_tmp.shape[1] , o_tmp.shape[2] ) UpperCamelCase :Union[str, Any] = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/query/kernel'''][:, i, :, :] ) UpperCamelCase :Any = q_tmp.reshape(q_tmp.shape[0] , q_tmp.shape[1] * q_tmp.shape[2] ) UpperCamelCase :str = np.ascontiguousarray(params[F'''{prefix}/{prefix}/{layer_name}/value/kernel'''][:, i, :, :] ) UpperCamelCase :str = v_tmp.reshape(v_tmp.shape[0] , v_tmp.shape[1] * v_tmp.shape[2] ) return k, o, q, v def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] , SCREAMING_SNAKE_CASE__ : List[str]=False ): if split_mlp_wi: UpperCamelCase :List[Any] = params[F'''{prefix}/{prefix}/mlp/wi_0/kernel'''][:, i, :] UpperCamelCase :int = params[F'''{prefix}/{prefix}/mlp/wi_1/kernel'''][:, i, :] UpperCamelCase :str = (wi_a, wi_a) else: UpperCamelCase :Optional[Any] = params[F'''{prefix}/{prefix}/mlp/wi/kernel'''][:, i, :] UpperCamelCase :Optional[int] = params[F'''{prefix}/{prefix}/mlp/wo/kernel'''][:, i, :] return wi, wo def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Optional[int] ): return params[F'''{prefix}/{prefix}/{layer_name}/scale'''][:, i] def _A ( SCREAMING_SNAKE_CASE__ : dict , *, SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : bool , SCREAMING_SNAKE_CASE__ : bool = False ): UpperCamelCase :Tuple = traverse_util.flatten_dict(variables['''target'''] ) UpperCamelCase :List[Any] = {'''/'''.join(SCREAMING_SNAKE_CASE__ ): v for k, v in old.items()} # v1.1 models have a gated GeLU with wi_0 and wi_1 instead of wi UpperCamelCase :int = '''encoder/encoder/mlp/wi_0/kernel''' in old print('''Split MLP:''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Optional[int] = collections.OrderedDict() # Shared embeddings. UpperCamelCase :int = old['''token_embedder/embedding'''] # Encoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[str] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :Dict = k.T UpperCamelCase :Optional[Any] = o.T UpperCamelCase :int = q.T UpperCamelCase :Any = v.T # Block i, layer 1 (MLP). UpperCamelCase :Tuple = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Any = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[Any] = wi[0].T UpperCamelCase :Tuple = wi[1].T else: UpperCamelCase :Optional[Any] = wi.T UpperCamelCase :Dict = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :List[str] = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''encoder''' ).T UpperCamelCase :Optional[Any] = old['''encoder/encoder_norm/scale'''] if not scalable_attention: UpperCamelCase :str = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''encoder''' ).T UpperCamelCase :Any = tax_relpos_bias_lookup( SCREAMING_SNAKE_CASE__ , 0 , '''decoder''' ).T if not is_encoder_only: # Decoder. for i in range(SCREAMING_SNAKE_CASE__ ): # Block i, layer 0 (Self Attention). UpperCamelCase :Union[str, Any] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_self_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :Dict = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''self_attention''' ) UpperCamelCase :str = layer_norm UpperCamelCase :int = k.T UpperCamelCase :Optional[int] = o.T UpperCamelCase :Tuple = q.T UpperCamelCase :List[str] = v.T # Block i, layer 1 (Cross Attention). UpperCamelCase :str = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_cross_attention_layer_norm''' ) UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase :List[Any] = tax_attention_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''encoder_decoder_attention''' ) UpperCamelCase :Tuple = layer_norm UpperCamelCase :Optional[Any] = k.T UpperCamelCase :List[str] = o.T UpperCamelCase :List[str] = q.T UpperCamelCase :str = v.T # Block i, layer 2 (MLP). UpperCamelCase :List[str] = tax_layer_norm_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , '''pre_mlp_layer_norm''' ) UpperCamelCase , UpperCamelCase :Optional[int] = tax_mlp_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' , SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = layer_norm if split_mlp_wi: UpperCamelCase :List[str] = wi[0].T UpperCamelCase :str = wi[1].T else: UpperCamelCase :Dict = wi.T UpperCamelCase :Optional[Any] = wo.T if scalable_attention: # convert the rel_embedding of each layer UpperCamelCase :Tuple = tax_relpos_bias_lookup(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , '''decoder''' ).T UpperCamelCase :Union[str, Any] = old['''decoder/decoder_norm/scale'''] # LM Head (only in v1.1 checkpoints, in v1.0 embeddings are used instead) if "decoder/logits_dense/kernel" in old: UpperCamelCase :Union[str, Any] = old['''decoder/logits_dense/kernel'''].T return new def _A ( SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : bool ): UpperCamelCase :Optional[int] = collections.OrderedDict([(k, torch.from_numpy(v.copy() )) for (k, v) in converted_params.items()] ) # Add what is missing. if "encoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if not is_encoder_only: if "decoder.embed_tokens.weight" not in state_dict: UpperCamelCase :Dict = state_dict['''shared.weight'''] if "lm_head.weight" not in state_dict: # For old 1.0 models. print('''Using shared word embeddings as lm_head.''' ) UpperCamelCase :List[Any] = state_dict['''shared.weight'''] return state_dict def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Tuple , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Dict = checkpoints.load_tax_checkpoint(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :str = convert_tax_to_pytorch( SCREAMING_SNAKE_CASE__ , num_layers=config.num_layers , is_encoder_only=SCREAMING_SNAKE_CASE__ , scalable_attention=SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Dict = make_state_dict(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) model.load_state_dict(SCREAMING_SNAKE_CASE__ , strict=SCREAMING_SNAKE_CASE__ ) def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : str , SCREAMING_SNAKE_CASE__ : bool = False , SCREAMING_SNAKE_CASE__ : bool = False , ): UpperCamelCase :Any = MTaConfig.from_json_file(SCREAMING_SNAKE_CASE__ ) print(F'''Building PyTorch model from configuration: {config}''' ) # Non-v1.1 checkpoints could also use T5Model, but this works for all. # The v1.0 checkpoints will simply have an LM head that is the word embeddings. if is_encoder_only: UpperCamelCase :List[str] = UMTaEncoderModel(SCREAMING_SNAKE_CASE__ ) else: UpperCamelCase :Any = UMTaForConditionalGeneration(SCREAMING_SNAKE_CASE__ ) # Load weights from tf checkpoint load_tax_weights_in_ta(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) # Save pytorch-model print(F'''Save PyTorch model to {pytorch_dump_path}''' ) model.save_pretrained(SCREAMING_SNAKE_CASE__ ) # Verify that we can load the checkpoint. model.from_pretrained(SCREAMING_SNAKE_CASE__ ) print('''Done''' ) if __name__ == "__main__": __snake_case = argparse.ArgumentParser(description="""Converts a native T5X checkpoint into a PyTorch checkpoint.""") # Required parameters parser.add_argument( """--t5x_checkpoint_path""", default=None, type=str, required=True, help="""Path to the T5X checkpoint.""" ) parser.add_argument( """--config_file""", default=None, type=str, required=True, help="""The config json file corresponding to the pre-trained T5 model.\nThis specifies the model architecture.""", ) parser.add_argument( """--pytorch_dump_path""", default=None, type=str, required=True, help="""Path to the output PyTorch model.""" ) parser.add_argument( """--is_encoder_only""", action="""store_true""", help="""Check if the model is encoder-decoder model""", default=False ) parser.add_argument( """--scalable_attention""", action="""store_true""", help="""Whether the model uses scaled attention (umt5 model)""", default=False, ) __snake_case = parser.parse_args() convert_tax_checkpoint_to_pytorch( args.tax_checkpoint_path, args.config_file, args.pytorch_dump_path, args.is_encoder_only, args.scalable_attention, )
259
0
from statistics import mean import numpy as np def SCREAMING_SNAKE_CASE ( lowercase_ , lowercase_ , lowercase_ , lowercase_ ) -> list: """simple docstring""" A__ = 0 # Number of processes finished A__ = 0 # Displays the finished process. # If it is 0, the performance is completed if it is 1, before the performance. A__ = [0] * no_of_process # List to include calculation results A__ = [0] * no_of_process # Sort by arrival time. A__ = [burst_time[i] for i in np.argsort(lowercase_ )] A__ = [process_name[i] for i in np.argsort(lowercase_ )] arrival_time.sort() while no_of_process > finished_process_count: A__ = 0 while finished_process[i] == 1: i += 1 if current_time < arrival_time[i]: A__ = arrival_time[i] A__ = 0 # Index showing the location of the process being performed A__ = 0 # Saves the current response ratio. A__ = 0 for i in range(0 , lowercase_ ): if finished_process[i] == 0 and arrival_time[i] <= current_time: A__ = (burst_time[i] + (current_time - arrival_time[i])) / burst_time[ i ] if response_ratio < temp: A__ = temp A__ = i # Calculate the turn around time A__ = current_time + burst_time[loc] - arrival_time[loc] current_time += burst_time[loc] # Indicates that the process has been performed. A__ = 1 # Increase finished_process_count by 1 finished_process_count += 1 return turn_around_time def SCREAMING_SNAKE_CASE ( lowercase_ , lowercase_ , lowercase_ , lowercase_ ) -> list: """simple docstring""" A__ = [0] * no_of_process for i in range(0 , lowercase_ ): A__ = turn_around_time[i] - burst_time[i] return waiting_time if __name__ == "__main__": _lowerCamelCase : List[Any] = 5 _lowerCamelCase : Any = ["""A""", """B""", """C""", """D""", """E"""] _lowerCamelCase : Optional[int] = [1, 2, 3, 4, 5] _lowerCamelCase : Optional[int] = [1, 2, 3, 4, 5] _lowerCamelCase : Union[str, Any] = calculate_turn_around_time( process_name, arrival_time, burst_time, no_of_process ) _lowerCamelCase : Tuple = calculate_waiting_time( process_name, turn_around_time, burst_time, no_of_process ) print("""Process name \tArrival time \tBurst time \tTurn around time \tWaiting time""") for i in range(0, no_of_process): print( F'''{process_name[i]}\t\t{arrival_time[i]}\t\t{burst_time[i]}\t\t''' F'''{turn_around_time[i]}\t\t\t{waiting_time[i]}''' ) print(F'''average waiting time : {mean(waiting_time):.5f}''') print(F'''average turn around time : {mean(turn_around_time):.5f}''')
14
def _A ( SCREAMING_SNAKE_CASE__ : list[int] , SCREAMING_SNAKE_CASE__ : list[int] ): UpperCamelCase :Tuple = len(SCREAMING_SNAKE_CASE__ ) print('''The following activities are selected:''' ) # The first activity is always selected UpperCamelCase :Dict = 0 print(SCREAMING_SNAKE_CASE__ , end=''',''' ) # Consider rest of the activities for j in range(SCREAMING_SNAKE_CASE__ ): # If this activity has start time greater than # or equal to the finish time of previously # selected activity, then select it if start[j] >= finish[i]: print(SCREAMING_SNAKE_CASE__ , end=''',''' ) UpperCamelCase :List[str] = j if __name__ == "__main__": import doctest doctest.testmod() __snake_case = [1, 3, 0, 5, 8, 5] __snake_case = [2, 4, 6, 7, 9, 9] print_max_activities(start, finish)
259
0
SCREAMING_SNAKE_CASE :int = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' def UpperCAmelCase ( ) -> None: """simple docstring""" __A = input("Enter message: " ) __A = input("Enter key [alphanumeric]: " ) __A = input("Encrypt/Decrypt [e/d]: " ) if mode.lower().startswith("e" ): __A = "encrypt" __A = encrypt_message(a_ , a_ ) elif mode.lower().startswith("d" ): __A = "decrypt" __A = decrypt_message(a_ , a_ ) print(F'''\n{mode.title()}ed message:''' ) print(a_ ) def UpperCAmelCase ( a_ , a_ ) -> str: """simple docstring""" return translate_message(a_ , a_ , "encrypt" ) def UpperCAmelCase ( a_ , a_ ) -> str: """simple docstring""" return translate_message(a_ , a_ , "decrypt" ) def UpperCAmelCase ( a_ , a_ , a_ ) -> str: """simple docstring""" __A = [] __A = 0 __A = key.upper() for symbol in message: __A = LETTERS.find(symbol.upper() ) if num != -1: if mode == "encrypt": num += LETTERS.find(key[key_index] ) elif mode == "decrypt": num -= LETTERS.find(key[key_index] ) num %= len(a_ ) if symbol.isupper(): translated.append(LETTERS[num] ) elif symbol.islower(): translated.append(LETTERS[num].lower() ) key_index += 1 if key_index == len(a_ ): __A = 0 else: translated.append(a_ ) return "".join(a_ ) if __name__ == "__main__": main()
15
import copy import os from typing import Union from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { """microsoft/git-base""": """https://huggingface.co/microsoft/git-base/resolve/main/config.json""", } class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Dict ='git_vision_model' def __init__( self , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=224 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_="quick_gelu" , SCREAMING_SNAKE_CASE_=1e-5 , SCREAMING_SNAKE_CASE_=0.0 , SCREAMING_SNAKE_CASE_=0.02 , **SCREAMING_SNAKE_CASE_ , ) -> Tuple: super().__init__(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = hidden_size UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :Dict = num_hidden_layers UpperCamelCase :int = num_attention_heads UpperCamelCase :List[str] = num_channels UpperCamelCase :Optional[int] = patch_size UpperCamelCase :Optional[int] = image_size UpperCamelCase :List[Any] = initializer_range UpperCamelCase :Union[str, Any] = attention_dropout UpperCamelCase :Tuple = layer_norm_eps UpperCamelCase :Optional[Any] = hidden_act @classmethod def UpperCAmelCase ( cls , SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) -> "PretrainedConfig": cls._set_token_in_kwargs(SCREAMING_SNAKE_CASE_ ) UpperCamelCase , UpperCamelCase :Dict = cls.get_config_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) # get the vision config dict if we are loading from GITConfig if config_dict.get('''model_type''' ) == "git": UpperCamelCase :Tuple = config_dict['''vision_config'''] if "model_type" in config_dict and hasattr(cls , '''model_type''' ) and config_dict["model_type"] != cls.model_type: logger.warning( F'''You are using a model of type {config_dict["model_type"]} to instantiate a model of type ''' F'''{cls.model_type}. This is not supported for all configurations of models and can yield errors.''' ) return cls.from_dict(SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) class UpperCAmelCase_ ( lowercase ): """simple docstring""" UpperCamelCase_ : Optional[Any] ='git' def __init__( self , SCREAMING_SNAKE_CASE_=None , SCREAMING_SNAKE_CASE_=3_0522 , SCREAMING_SNAKE_CASE_=768 , SCREAMING_SNAKE_CASE_=6 , SCREAMING_SNAKE_CASE_=12 , SCREAMING_SNAKE_CASE_=3072 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=1024 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=1e-12 , SCREAMING_SNAKE_CASE_=0 , SCREAMING_SNAKE_CASE_="absolute" , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=False , SCREAMING_SNAKE_CASE_=101 , SCREAMING_SNAKE_CASE_=102 , SCREAMING_SNAKE_CASE_=None , **SCREAMING_SNAKE_CASE_ , ) -> int: super().__init__(bos_token_id=SCREAMING_SNAKE_CASE_ , eos_token_id=SCREAMING_SNAKE_CASE_ , pad_token_id=SCREAMING_SNAKE_CASE_ , **SCREAMING_SNAKE_CASE_ ) if vision_config is None: UpperCamelCase :Tuple = {} logger.info('''vision_config is None. initializing the GitVisionConfig with default values.''' ) UpperCamelCase :Union[str, Any] = GitVisionConfig(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = vocab_size UpperCamelCase :Optional[Any] = hidden_size UpperCamelCase :List[Any] = num_hidden_layers UpperCamelCase :List[Any] = num_attention_heads UpperCamelCase :Dict = hidden_act UpperCamelCase :List[str] = intermediate_size UpperCamelCase :List[str] = hidden_dropout_prob UpperCamelCase :Optional[int] = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = max_position_embeddings UpperCamelCase :Tuple = initializer_range UpperCamelCase :Any = layer_norm_eps UpperCamelCase :int = position_embedding_type UpperCamelCase :Dict = use_cache UpperCamelCase :Tuple = tie_word_embeddings UpperCamelCase :Union[str, Any] = num_image_with_embedding UpperCamelCase :Optional[int] = bos_token_id UpperCamelCase :List[Any] = eos_token_id def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase :Union[str, Any] = copy.deepcopy(self.__dict__ ) UpperCamelCase :Optional[int] = self.vision_config.to_dict() UpperCamelCase :int = self.__class__.model_type return output
259
0
"""simple docstring""" import warnings from ...utils import logging from .image_processing_poolformer import PoolFormerImageProcessor lowerCAmelCase_ = logging.get_logger(__name__) class __A ( A_ ): '''simple docstring''' def __init__( self : Any ,*_snake_case : int ,**_snake_case : str ) -> None: """simple docstring""" warnings.warn( '''The class PoolFormerFeatureExtractor is deprecated and will be removed in version 5 of Transformers.''' ''' Please use PoolFormerImageProcessor instead.''' ,_snake_case ,) super().__init__(*_snake_case ,**_snake_case )
16
import time from contextlib import contextmanager from pathlib import Path import pytest import requests from huggingface_hub.hf_api import HfApi, HfFolder __snake_case = """__DUMMY_TRANSFORMERS_USER__""" __snake_case = """Dummy User""" __snake_case = """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""" __snake_case = """https://hub-ci.huggingface.co""" __snake_case = CI_HUB_ENDPOINT + """/datasets/{repo_id}/resolve/{revision}/{path}""" __snake_case = CI_HUB_ENDPOINT + """/{repo_id}/resolve/{revision}/{filename}""" __snake_case = Path("""~/.huggingface/hub_ci_token""").expanduser() @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): monkeypatch.setattr( '''huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any ): monkeypatch.setattr('''datasets.config.HF_ENDPOINT''' , SCREAMING_SNAKE_CASE__ ) monkeypatch.setattr('''datasets.config.HUB_DATASETS_URL''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : List[str] ): monkeypatch.setattr('''huggingface_hub.hf_api.HfFolder.path_token''' , SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : List[Any] ): HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield HfFolder.delete_token() @pytest.fixture(scope='''session''' ) def _A ( ): return HfApi(endpoint=SCREAMING_SNAKE_CASE__ ) @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi ): UpperCamelCase :Tuple = HfFolder.get_token() HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) yield CI_HUB_USER_TOKEN if previous_token is not None: HfFolder.save_token(SCREAMING_SNAKE_CASE__ ) @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Dict ): def _cleanup_repo(SCREAMING_SNAKE_CASE__ : Tuple ): hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) return _cleanup_repo @pytest.fixture def _A ( SCREAMING_SNAKE_CASE__ : Tuple ): @contextmanager def _temporary_repo(SCREAMING_SNAKE_CASE__ : Any ): try: yield repo_id finally: cleanup_repo(SCREAMING_SNAKE_CASE__ ) return _temporary_repo @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : Optional[Any] ): UpperCamelCase :Union[str, Any] = F'''repo_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :int = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data/text_data.txt''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Dict ): return hf_private_dataset_repo_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : Any ): UpperCamelCase :Optional[int] = F'''repo_zipped_txt_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Any = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[int] , SCREAMING_SNAKE_CASE__ : List[str] ): return hf_private_dataset_repo_zipped_txt_data_ @pytest.fixture(scope='''session''' ) def _A ( SCREAMING_SNAKE_CASE__ : HfApi , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : List[str] ): UpperCamelCase :Dict = F'''repo_zipped_img_data-{int(time.time() * 1_0e3 )}''' UpperCamelCase :Dict = F'''{CI_HUB_USER}/{repo_name}''' hf_api.create_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , private=SCREAMING_SNAKE_CASE__ ) hf_api.upload_file( token=SCREAMING_SNAKE_CASE__ , path_or_fileobj=str(SCREAMING_SNAKE_CASE__ ) , path_in_repo='''data.zip''' , repo_id=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' , ) yield repo_id try: hf_api.delete_repo(SCREAMING_SNAKE_CASE__ , token=SCREAMING_SNAKE_CASE__ , repo_type='''dataset''' ) except (requests.exceptions.HTTPError, ValueError): # catch http error and token invalid error pass @pytest.fixture() def _A ( SCREAMING_SNAKE_CASE__ : Dict , SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : Tuple ): return hf_private_dataset_repo_zipped_img_data_
259
0
"""simple docstring""" from sklearn.metrics import mean_squared_error import datasets _a = '\\n@article{scikit-learn,\n title={Scikit-learn: Machine Learning in {P}ython},\n author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V.\n and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P.\n and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A. and\n Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.},\n journal={Journal of Machine Learning Research},\n volume={12},\n pages={2825--2830},\n year={2011}\n}\n' _a = '\\nMean Squared Error(MSE) is the average of the square of difference between the predicted\nand actual values.\n' _a = '\nArgs:\n predictions: array-like of shape (n_samples,) or (n_samples, n_outputs)\n Estimated target values.\n references: array-like of shape (n_samples,) or (n_samples, n_outputs)\n Ground truth (correct) target values.\n sample_weight: array-like of shape (n_samples,), default=None\n Sample weights.\n multioutput: {"raw_values", "uniform_average"} or array-like of shape (n_outputs,), default="uniform_average"\n Defines aggregating of multiple output values. Array-like value defines weights used to average errors.\n\n "raw_values" : Returns a full set of errors in case of multioutput input.\n\n "uniform_average" : Errors of all outputs are averaged with uniform weight.\n\n squared : bool, default=True\n If True returns MSE value, if False returns RMSE (Root Mean Squared Error) value.\n\nReturns:\n mse : mean squared error.\nExamples:\n\n >>> mse_metric = datasets.load_metric("mse")\n >>> predictions = [2.5, 0.0, 2, 8]\n >>> references = [3, -0.5, 2, 7]\n >>> results = mse_metric.compute(predictions=predictions, references=references)\n >>> print(results)\n {\'mse\': 0.375}\n >>> rmse_result = mse_metric.compute(predictions=predictions, references=references, squared=False)\n >>> print(rmse_result)\n {\'mse\': 0.6123724356957945}\n\n If you\'re using multi-dimensional lists, then set the config as follows :\n\n >>> mse_metric = datasets.load_metric("mse", "multilist")\n >>> predictions = [[0.5, 1], [-1, 1], [7, -6]]\n >>> references = [[0, 2], [-1, 2], [8, -5]]\n >>> results = mse_metric.compute(predictions=predictions, references=references)\n >>> print(results)\n {\'mse\': 0.7083333333333334}\n >>> results = mse_metric.compute(predictions=predictions, references=references, multioutput=\'raw_values\')\n >>> print(results) # doctest: +NORMALIZE_WHITESPACE\n {\'mse\': array([0.41666667, 1. ])}\n' @datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION ,_KWARGS_DESCRIPTION ) class _lowerCAmelCase ( datasets.Metric ): """simple docstring""" def _lowercase ( self : Dict ): return datasets.MetricInfo( description=_DESCRIPTION, citation=_CITATION, inputs_description=_KWARGS_DESCRIPTION, features=datasets.Features(self._get_feature_types() ), reference_urls=[ "https://scikit-learn.org/stable/modules/generated/sklearn.metrics.mean_squared_error.html" ], ) def _lowercase ( self : List[Any] ): if self.config_name == "multilist": return { "predictions": datasets.Sequence(datasets.Value("float" ) ), "references": datasets.Sequence(datasets.Value("float" ) ), } else: return { "predictions": datasets.Value("float" ), "references": datasets.Value("float" ), } def _lowercase ( self : Optional[Any], UpperCAmelCase__ : List[Any], UpperCAmelCase__ : Union[str, Any], UpperCAmelCase__ : int=None, UpperCAmelCase__ : Tuple="uniform_average", UpperCAmelCase__ : List[Any]=True ): __lowercase = mean_squared_error( UpperCAmelCase__, UpperCAmelCase__, sample_weight=UpperCAmelCase__, multioutput=UpperCAmelCase__, squared=UpperCAmelCase__ ) return {"mse": mse}
17
from __future__ import annotations import unittest from transformers import RoFormerConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask from ...test_pipeline_mixin import PipelineTesterMixin if is_tf_available(): import tensorflow as tf from transformers import ( TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerModel, ) from transformers.models.roformer.modeling_tf_roformer import ( TFRoFormerSelfAttention, TFRoFormerSinusoidalPositionalEmbedding, ) class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=13 , SCREAMING_SNAKE_CASE_=7 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=99 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=512 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=None , ) -> Dict: UpperCamelCase :Any = parent UpperCamelCase :Dict = 13 UpperCamelCase :List[Any] = 7 UpperCamelCase :List[Any] = True UpperCamelCase :Dict = True UpperCamelCase :Union[str, Any] = True UpperCamelCase :List[str] = True UpperCamelCase :Dict = 99 UpperCamelCase :Any = 32 UpperCamelCase :Tuple = 2 UpperCamelCase :Union[str, Any] = 4 UpperCamelCase :List[str] = 37 UpperCamelCase :Dict = '''gelu''' UpperCamelCase :Dict = 0.1 UpperCamelCase :Tuple = 0.1 UpperCamelCase :Dict = 512 UpperCamelCase :str = 16 UpperCamelCase :Optional[Any] = 2 UpperCamelCase :Dict = 0.02 UpperCamelCase :Optional[int] = 3 UpperCamelCase :int = 4 UpperCamelCase :Dict = None def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size, self.seq_length] , self.vocab_size ) UpperCamelCase :Optional[int] = None if self.use_input_mask: UpperCamelCase :Dict = random_attention_mask([self.batch_size, self.seq_length] ) UpperCamelCase :Dict = None if self.use_token_type_ids: UpperCamelCase :List[Any] = ids_tensor([self.batch_size, self.seq_length] , self.type_vocab_size ) UpperCamelCase :Union[str, Any] = None UpperCamelCase :Optional[int] = None UpperCamelCase :Any = None if self.use_labels: UpperCamelCase :Optional[int] = ids_tensor([self.batch_size] , self.type_sequence_label_size ) UpperCamelCase :Optional[Any] = ids_tensor([self.batch_size, self.seq_length] , self.num_labels ) UpperCamelCase :int = ids_tensor([self.batch_size] , self.num_choices ) UpperCamelCase :Union[str, Any] = RoFormerConfig( vocab_size=self.vocab_size , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , max_position_embeddings=self.max_position_embeddings , type_vocab_size=self.type_vocab_size , initializer_range=self.initializer_range , return_dict=SCREAMING_SNAKE_CASE_ , ) return config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[Any] = TFRoFormerModel(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = {'''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids} UpperCamelCase :int = [input_ids, input_mask] UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :int = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = True UpperCamelCase :Union[str, Any] = TFRoFormerForCausalLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Any = model(SCREAMING_SNAKE_CASE_ )['''logits'''] self.parent.assertListEqual( list(prediction_scores.numpy().shape ) , [self.batch_size, self.seq_length, self.vocab_size] ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :str = TFRoFormerForMaskedLM(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[Any] = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.vocab_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> List[str]: UpperCamelCase :List[Any] = self.num_labels UpperCamelCase :int = TFRoFormerForSequenceClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Optional[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :List[Any] = self.num_choices UpperCamelCase :Any = TFRoFormerForMultipleChoice(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :int = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :Any = tf.tile(tf.expand_dims(SCREAMING_SNAKE_CASE_ , 1 ) , (1, self.num_choices, 1) ) UpperCamelCase :List[Any] = { '''input_ids''': multiple_choice_inputs_ids, '''attention_mask''': multiple_choice_input_mask, '''token_type_ids''': multiple_choice_token_type_ids, } UpperCamelCase :Dict = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.num_choices) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :Union[str, Any] = self.num_labels UpperCamelCase :Dict = TFRoFormerForTokenClassification(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :Tuple = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.num_labels) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Union[str, Any]: UpperCamelCase :Union[str, Any] = TFRoFormerForQuestionAnswering(config=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = { '''input_ids''': input_ids, '''attention_mask''': input_mask, '''token_type_ids''': token_type_ids, } UpperCamelCase :List[Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.start_logits.shape , (self.batch_size, self.seq_length) ) self.parent.assertEqual(result.end_logits.shape , (self.batch_size, self.seq_length) ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Optional[int] = self.prepare_config_and_inputs() ( ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ( UpperCamelCase ) , ) :Union[str, Any] = config_and_inputs UpperCamelCase :Any = {'''input_ids''': input_ids, '''token_type_ids''': token_type_ids, '''attention_mask''': input_mask} return config, inputs_dict @require_tf class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : str =( ( TFRoFormerModel, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerForMultipleChoice, ) if is_tf_available() else () ) UpperCamelCase_ : Tuple =( { 'feature-extraction': TFRoFormerModel, 'fill-mask': TFRoFormerForMaskedLM, 'question-answering': TFRoFormerForQuestionAnswering, 'text-classification': TFRoFormerForSequenceClassification, 'text-generation': TFRoFormerForCausalLM, 'token-classification': TFRoFormerForTokenClassification, 'zero-shot': TFRoFormerForSequenceClassification, } if is_tf_available() else {} ) UpperCamelCase_ : Tuple =False UpperCamelCase_ : Optional[Any] =False def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: if pipeline_test_casse_name == "TextGenerationPipelineTests": return True return False def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Any = TFRoFormerModelTester(self ) UpperCamelCase :Optional[int] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> List[str]: self.config_tester.run_common_tests() def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_masked_lm(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> str: UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_lm_head(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Optional[Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_multiple_choice(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_question_answering(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Union[str, Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_sequence_classification(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_token_classification(*SCREAMING_SNAKE_CASE_ ) @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Dict = TFRoFormerModel.from_pretrained('''junnyu/roformer_chinese_base''' ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" @slow def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :Tuple = TFRoFormerForMaskedLM.from_pretrained('''junnyu/roformer_chinese_base''' ) UpperCamelCase :Union[str, Any] = tf.constant([[0, 1, 2, 3, 4, 5]] ) UpperCamelCase :str = model(SCREAMING_SNAKE_CASE_ )[0] # TODO Replace vocab size UpperCamelCase :Tuple = 5_0000 UpperCamelCase :Optional[Any] = [1, 6, vocab_size] self.assertEqual(output.shape , SCREAMING_SNAKE_CASE_ ) print(output[:, :3, :3] ) # TODO Replace values below with what was printed above. UpperCamelCase :int = tf.constant( [ [ [-0.1205_3341, -1.026_4901, 0.2922_1946], [-1.513_3783, 0.19_7433, 0.1519_0607], [-5.013_5403, -3.90_0256, -0.8403_8764], ] ] ) tf.debugging.assert_near(output[:, :3, :3] , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Optional[int] =1E-4 def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :str = tf.constant([[4, 10]] ) UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=6 , embedding_dim=6 ) UpperCamelCase :str = emba(input_ids.shape ) UpperCamelCase :List[str] = tf.constant( [[0.0000, 0.0000, 0.0000, 1.0000, 1.0000, 1.0000], [0.8415, 0.0464, 0.0022, 0.5403, 0.9989, 1.0000]] ) tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Dict = tf.constant( [ [0.0000, 0.0000, 0.0000, 0.0000, 0.0000], [0.8415, 0.8219, 0.8020, 0.7819, 0.7617], [0.9093, 0.9364, 0.9581, 0.9749, 0.9870], ] ) UpperCamelCase :Dict = TFRoFormerSinusoidalPositionalEmbedding(num_positions=512 , embedding_dim=512 ) emba([2, 16, 512] ) UpperCamelCase :Any = emba.weight[:3, :5] tf.debugging.assert_near(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) @require_tf class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" UpperCamelCase_ : List[Any] =1E-4 def UpperCAmelCase ( self ) -> List[str]: # 2,12,16,64 UpperCamelCase :List[Any] = tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = -tf.reshape(tf.range(2 * 12 * 16 * 64 , dtype=tf.floataa ) , shape=(2, 12, 16, 64) ) / 100 UpperCamelCase :List[Any] = TFRoFormerSinusoidalPositionalEmbedding(num_positions=32 , embedding_dim=64 ) UpperCamelCase :int = embed_positions([2, 16, 768] )[None, None, :, :] UpperCamelCase , UpperCamelCase :List[str] = TFRoFormerSelfAttention.apply_rotary_position_embeddings( SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = tf.constant( [ [0.0000, 0.0100, 0.0200, 0.0300, 0.0400, 0.0500, 0.0600, 0.0700], [-0.2012, 0.8897, 0.0263, 0.9401, 0.2074, 0.9463, 0.3481, 0.9343], [-1.7057, 0.6271, -1.2145, 1.3897, -0.6303, 1.7647, -0.1173, 1.8985], [-2.1731, -1.6397, -2.7358, 0.2854, -2.1840, 1.7183, -1.3018, 2.4871], [0.2717, -3.6173, -2.9206, -2.1988, -3.6638, 0.3858, -2.9155, 2.2980], [3.9859, -2.1580, -0.7984, -4.4904, -4.1181, -2.0252, -4.4782, 1.1253], ] ) UpperCamelCase :Optional[int] = tf.constant( [ [0.0000, -0.0100, -0.0200, -0.0300, -0.0400, -0.0500, -0.0600, -0.0700], [0.2012, -0.8897, -0.0263, -0.9401, -0.2074, -0.9463, -0.3481, -0.9343], [1.7057, -0.6271, 1.2145, -1.3897, 0.6303, -1.7647, 0.1173, -1.8985], [2.1731, 1.6397, 2.7358, -0.2854, 2.1840, -1.7183, 1.3018, -2.4871], [-0.2717, 3.6173, 2.9206, 2.1988, 3.6638, -0.3858, 2.9155, -2.2980], [-3.9859, 2.1580, 0.7984, 4.4904, 4.1181, 2.0252, 4.4782, -1.1253], ] ) tf.debugging.assert_near(query_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance ) tf.debugging.assert_near(key_layer[0, 0, :6, :8] , SCREAMING_SNAKE_CASE_ , atol=self.tolerance )
259
0
import contextlib import copy import random from typing import Any, Dict, Iterable, Optional, Union import numpy as np import torch from .utils import deprecate, is_transformers_available if is_transformers_available(): import transformers def _snake_case ( lowerCAmelCase : int ): """simple docstring""" random.seed(lowerCAmelCase ) np.random.seed(lowerCAmelCase ) torch.manual_seed(lowerCAmelCase ) torch.cuda.manual_seed_all(lowerCAmelCase ) # ^^ safe to call this function even if cuda is not available class a__ : def __init__( self : Tuple,_A : Iterable[torch.nn.Parameter],_A : float = 0.9999,_A : float = 0.0,_A : int = 0,_A : bool = False,_A : Union[float, int] = 1.0,_A : Union[float, int] = 2 / 3,_A : Optional[Any] = None,_A : Dict[str, Any] = None,**_A : str,): """simple docstring""" if isinstance(_A,torch.nn.Module ): SCREAMING_SNAKE_CASE_ : Union[str, Any] = ( "Passing a `torch.nn.Module` to `ExponentialMovingAverage` is deprecated. " "Please pass the parameters of the module instead." ) deprecate( "passing a `torch.nn.Module` to `ExponentialMovingAverage`","1.0.0",_A,standard_warn=_A,) SCREAMING_SNAKE_CASE_ : str = parameters.parameters() # set use_ema_warmup to True if a torch.nn.Module is passed for backwards compatibility SCREAMING_SNAKE_CASE_ : str = True if kwargs.get("max_value",_A ) is not None: SCREAMING_SNAKE_CASE_ : str = "The `max_value` argument is deprecated. Please use `decay` instead." deprecate("max_value","1.0.0",_A,standard_warn=_A ) SCREAMING_SNAKE_CASE_ : Optional[int] = kwargs["max_value"] if kwargs.get("min_value",_A ) is not None: SCREAMING_SNAKE_CASE_ : Optional[int] = "The `min_value` argument is deprecated. Please use `min_decay` instead." deprecate("min_value","1.0.0",_A,standard_warn=_A ) SCREAMING_SNAKE_CASE_ : int = kwargs["min_value"] SCREAMING_SNAKE_CASE_ : Dict = list(_A ) SCREAMING_SNAKE_CASE_ : List[str] = [p.clone().detach() for p in parameters] if kwargs.get("device",_A ) is not None: SCREAMING_SNAKE_CASE_ : str = "The `device` argument is deprecated. Please use `to` instead." deprecate("device","1.0.0",_A,standard_warn=_A ) self.to(device=kwargs["device"] ) SCREAMING_SNAKE_CASE_ : Dict = None SCREAMING_SNAKE_CASE_ : Any = decay SCREAMING_SNAKE_CASE_ : List[str] = min_decay SCREAMING_SNAKE_CASE_ : Tuple = update_after_step SCREAMING_SNAKE_CASE_ : List[str] = use_ema_warmup SCREAMING_SNAKE_CASE_ : List[Any] = inv_gamma SCREAMING_SNAKE_CASE_ : List[Any] = power SCREAMING_SNAKE_CASE_ : Union[str, Any] = 0 SCREAMING_SNAKE_CASE_ : Union[str, Any] = None # set in `step()` SCREAMING_SNAKE_CASE_ : Dict = model_cls SCREAMING_SNAKE_CASE_ : Any = model_config @classmethod def __UpperCamelCase ( cls : Dict,_A : Tuple,_A : List[Any] ): """simple docstring""" SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ : Union[str, Any] = model_cls.load_config(_A,return_unused_kwargs=_A ) SCREAMING_SNAKE_CASE_ : Tuple = model_cls.from_pretrained(_A ) SCREAMING_SNAKE_CASE_ : Optional[Any] = cls(model.parameters(),model_cls=_A,model_config=model.config ) ema_model.load_state_dict(_A ) return ema_model def __UpperCamelCase ( self : Optional[Any],_A : int ): """simple docstring""" if self.model_cls is None: raise ValueError("`save_pretrained` can only be used if `model_cls` was defined at __init__." ) if self.model_config is None: raise ValueError("`save_pretrained` can only be used if `model_config` was defined at __init__." ) SCREAMING_SNAKE_CASE_ : str = self.model_cls.from_config(self.model_config ) SCREAMING_SNAKE_CASE_ : Dict = self.state_dict() state_dict.pop("shadow_params",_A ) model.register_to_config(**_A ) self.copy_to(model.parameters() ) model.save_pretrained(_A ) def __UpperCamelCase ( self : str,_A : int ): """simple docstring""" SCREAMING_SNAKE_CASE_ : List[Any] = max(0,optimization_step - self.update_after_step - 1 ) if step <= 0: return 0.0 if self.use_ema_warmup: SCREAMING_SNAKE_CASE_ : Optional[int] = 1 - (1 + step / self.inv_gamma) ** -self.power else: SCREAMING_SNAKE_CASE_ : List[str] = (1 + step) / (10 + step) SCREAMING_SNAKE_CASE_ : Union[str, Any] = min(_A,self.decay ) # make sure decay is not smaller than min_decay SCREAMING_SNAKE_CASE_ : Any = max(_A,self.min_decay ) return cur_decay_value @torch.no_grad() def __UpperCamelCase ( self : str,_A : Iterable[torch.nn.Parameter] ): """simple docstring""" if isinstance(_A,torch.nn.Module ): SCREAMING_SNAKE_CASE_ : Union[str, Any] = ( "Passing a `torch.nn.Module` to `ExponentialMovingAverage.step` is deprecated. " "Please pass the parameters of the module instead." ) deprecate( "passing a `torch.nn.Module` to `ExponentialMovingAverage.step`","1.0.0",_A,standard_warn=_A,) SCREAMING_SNAKE_CASE_ : Tuple = parameters.parameters() SCREAMING_SNAKE_CASE_ : int = list(_A ) self.optimization_step += 1 # Compute the decay factor for the exponential moving average. SCREAMING_SNAKE_CASE_ : Any = self.get_decay(self.optimization_step ) SCREAMING_SNAKE_CASE_ : Union[str, Any] = decay SCREAMING_SNAKE_CASE_ : Tuple = 1 - decay SCREAMING_SNAKE_CASE_ : str = contextlib.nullcontext if is_transformers_available() and transformers.deepspeed.is_deepspeed_zeroa_enabled(): import deepspeed for s_param, param in zip(self.shadow_params,_A ): if is_transformers_available() and transformers.deepspeed.is_deepspeed_zeroa_enabled(): SCREAMING_SNAKE_CASE_ : Union[str, Any] = deepspeed.zero.GatheredParameters(_A,modifier_rank=_A ) with context_manager(): if param.requires_grad: s_param.sub_(one_minus_decay * (s_param - param) ) else: s_param.copy_(_A ) def __UpperCamelCase ( self : int,_A : Iterable[torch.nn.Parameter] ): """simple docstring""" SCREAMING_SNAKE_CASE_ : Tuple = list(_A ) for s_param, param in zip(self.shadow_params,_A ): param.data.copy_(s_param.to(param.device ).data ) def __UpperCamelCase ( self : Dict,_A : Any=None,_A : Union[str, Any]=None ): """simple docstring""" SCREAMING_SNAKE_CASE_ : Union[str, Any] = [ p.to(device=_A,dtype=_A ) if p.is_floating_point() else p.to(device=_A ) for p in self.shadow_params ] def __UpperCamelCase ( self : Optional[Any] ): """simple docstring""" return { "decay": self.decay, "min_decay": self.min_decay, "optimization_step": self.optimization_step, "update_after_step": self.update_after_step, "use_ema_warmup": self.use_ema_warmup, "inv_gamma": self.inv_gamma, "power": self.power, "shadow_params": self.shadow_params, } def __UpperCamelCase ( self : Optional[Any],_A : Iterable[torch.nn.Parameter] ): """simple docstring""" SCREAMING_SNAKE_CASE_ : List[str] = [param.detach().cpu().clone() for param in parameters] def __UpperCamelCase ( self : int,_A : Iterable[torch.nn.Parameter] ): """simple docstring""" if self.temp_stored_params is None: raise RuntimeError("This ExponentialMovingAverage has no `store()`ed weights " "to `restore()`" ) for c_param, param in zip(self.temp_stored_params,_A ): param.data.copy_(c_param.data ) # Better memory-wise. SCREAMING_SNAKE_CASE_ : Dict = None def __UpperCamelCase ( self : Union[str, Any],_A : dict ): """simple docstring""" SCREAMING_SNAKE_CASE_ : int = copy.deepcopy(_A ) SCREAMING_SNAKE_CASE_ : str = state_dict.get("decay",self.decay ) if self.decay < 0.0 or self.decay > 1.0: raise ValueError("Decay must be between 0 and 1" ) SCREAMING_SNAKE_CASE_ : Dict = state_dict.get("min_decay",self.min_decay ) if not isinstance(self.min_decay,_A ): raise ValueError("Invalid min_decay" ) SCREAMING_SNAKE_CASE_ : Any = state_dict.get("optimization_step",self.optimization_step ) if not isinstance(self.optimization_step,_A ): raise ValueError("Invalid optimization_step" ) SCREAMING_SNAKE_CASE_ : Any = state_dict.get("update_after_step",self.update_after_step ) if not isinstance(self.update_after_step,_A ): raise ValueError("Invalid update_after_step" ) SCREAMING_SNAKE_CASE_ : Optional[Any] = state_dict.get("use_ema_warmup",self.use_ema_warmup ) if not isinstance(self.use_ema_warmup,_A ): raise ValueError("Invalid use_ema_warmup" ) SCREAMING_SNAKE_CASE_ : Optional[Any] = state_dict.get("inv_gamma",self.inv_gamma ) if not isinstance(self.inv_gamma,(float, int) ): raise ValueError("Invalid inv_gamma" ) SCREAMING_SNAKE_CASE_ : int = state_dict.get("power",self.power ) if not isinstance(self.power,(float, int) ): raise ValueError("Invalid power" ) SCREAMING_SNAKE_CASE_ : str = state_dict.get("shadow_params",_A ) if shadow_params is not None: SCREAMING_SNAKE_CASE_ : int = shadow_params if not isinstance(self.shadow_params,_A ): raise ValueError("shadow_params must be a list" ) if not all(isinstance(_A,torch.Tensor ) for p in self.shadow_params ): raise ValueError("shadow_params must all be Tensors" )
18
import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, _config_zero_init, floats_tensor, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from torch import nn from transformers import MODEL_MAPPING, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTModel from transformers.models.dpt.modeling_dpt import DPT_PRETRAINED_MODEL_ARCHIVE_LIST if is_vision_available(): from PIL import Image from transformers import DPTImageProcessor class UpperCAmelCase_ : """simple docstring""" def __init__( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_=2 , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=16 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=32 , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=[0, 1, 2, 3] , SCREAMING_SNAKE_CASE_=4 , SCREAMING_SNAKE_CASE_=37 , SCREAMING_SNAKE_CASE_="gelu" , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.1 , SCREAMING_SNAKE_CASE_=0.02 , SCREAMING_SNAKE_CASE_=3 , SCREAMING_SNAKE_CASE_=[1, 384, 24, 24] , SCREAMING_SNAKE_CASE_=True , SCREAMING_SNAKE_CASE_=None , ) -> int: UpperCamelCase :List[Any] = parent UpperCamelCase :List[str] = batch_size UpperCamelCase :Optional[Any] = image_size UpperCamelCase :Optional[Any] = patch_size UpperCamelCase :Optional[Any] = num_channels UpperCamelCase :Union[str, Any] = is_training UpperCamelCase :Dict = use_labels UpperCamelCase :List[Any] = hidden_size UpperCamelCase :Optional[int] = num_hidden_layers UpperCamelCase :Any = backbone_out_indices UpperCamelCase :int = num_attention_heads UpperCamelCase :Union[str, Any] = intermediate_size UpperCamelCase :List[str] = hidden_act UpperCamelCase :Optional[int] = hidden_dropout_prob UpperCamelCase :int = attention_probs_dropout_prob UpperCamelCase :Optional[Any] = initializer_range UpperCamelCase :List[Any] = num_labels UpperCamelCase :Any = backbone_featmap_shape UpperCamelCase :Optional[int] = scope UpperCamelCase :Optional[int] = is_hybrid # sequence length of DPT = num_patches + 1 (we add 1 for the [CLS] token) UpperCamelCase :Tuple = (image_size // patch_size) ** 2 UpperCamelCase :int = num_patches + 1 def UpperCAmelCase ( self ) -> str: UpperCamelCase :Tuple = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size] ) UpperCamelCase :int = None if self.use_labels: UpperCamelCase :str = ids_tensor([self.batch_size, self.image_size, self.image_size] , self.num_labels ) UpperCamelCase :Any = self.get_config() return config, pixel_values, labels def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :Tuple = { '''global_padding''': '''same''', '''layer_type''': '''bottleneck''', '''depths''': [3, 4, 9], '''out_features''': ['''stage1''', '''stage2''', '''stage3'''], '''embedding_dynamic_padding''': True, '''hidden_sizes''': [96, 192, 384, 768], '''num_groups''': 2, } return DPTConfig( image_size=self.image_size , patch_size=self.patch_size , num_channels=self.num_channels , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , backbone_out_indices=self.backbone_out_indices , num_attention_heads=self.num_attention_heads , intermediate_size=self.intermediate_size , hidden_act=self.hidden_act , hidden_dropout_prob=self.hidden_dropout_prob , attention_probs_dropout_prob=self.attention_probs_dropout_prob , is_decoder=SCREAMING_SNAKE_CASE_ , initializer_range=self.initializer_range , is_hybrid=self.is_hybrid , backbone_config=SCREAMING_SNAKE_CASE_ , backbone_featmap_shape=self.backbone_featmap_shape , ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Optional[int]: UpperCamelCase :Optional[int] = DPTModel(config=SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Optional[int] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> int: UpperCamelCase :Tuple = self.num_labels UpperCamelCase :Any = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :Union[str, Any] = model(SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual(result.predicted_depth.shape , (self.batch_size, self.image_size, self.image_size) ) def UpperCAmelCase ( self , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ) -> Tuple: UpperCamelCase :int = self.num_labels UpperCamelCase :str = DPTForSemanticSegmentation(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.eval() UpperCamelCase :List[str] = model(SCREAMING_SNAKE_CASE_ , labels=SCREAMING_SNAKE_CASE_ ) self.parent.assertEqual( result.logits.shape , (self.batch_size, self.num_labels, self.image_size, self.image_size) ) def UpperCAmelCase ( self ) -> Dict: UpperCamelCase :List[Any] = self.prepare_config_and_inputs() UpperCamelCase , UpperCamelCase , UpperCamelCase :Optional[Any] = config_and_inputs UpperCamelCase :List[Any] = {'''pixel_values''': pixel_values} return config, inputs_dict @require_torch class UpperCAmelCase_ ( lowercase, lowercase, unittest.TestCase ): """simple docstring""" UpperCamelCase_ : Tuple =(DPTModel, DPTForDepthEstimation, DPTForSemanticSegmentation) if is_torch_available() else () UpperCamelCase_ : Optional[Any] =( { 'depth-estimation': DPTForDepthEstimation, 'feature-extraction': DPTModel, 'image-segmentation': DPTForSemanticSegmentation, } if is_torch_available() else {} ) UpperCamelCase_ : List[Any] =False UpperCamelCase_ : Optional[int] =False UpperCamelCase_ : Union[str, Any] =False def UpperCAmelCase ( self ) -> int: UpperCamelCase :Optional[Any] = DPTModelTester(self ) UpperCamelCase :List[Any] = ConfigTester(self , config_class=SCREAMING_SNAKE_CASE_ , has_text_modality=SCREAMING_SNAKE_CASE_ , hidden_size=37 ) def UpperCAmelCase ( self ) -> Union[str, Any]: self.config_tester.run_common_tests() @unittest.skip(reason='''DPT does not use inputs_embeds''' ) def UpperCAmelCase ( self ) -> int: pass def UpperCAmelCase ( self ) -> Optional[int]: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) self.assertIsInstance(model.get_input_embeddings() , (nn.Module) ) UpperCamelCase :Optional[int] = model.get_output_embeddings() self.assertTrue(x is None or isinstance(SCREAMING_SNAKE_CASE_ , nn.Linear ) ) def UpperCAmelCase ( self ) -> int: UpperCamelCase , UpperCamelCase :Union[str, Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: UpperCamelCase :Optional[Any] = model_class(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic UpperCamelCase :Tuple = [*signature.parameters.keys()] UpperCamelCase :Any = ['''pixel_values'''] self.assertListEqual(arg_names[:1] , SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Optional[Any]: UpperCamelCase :Any = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: UpperCamelCase :Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_depth_estimation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Tuple: UpperCamelCase :int = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_semantic_segmentation(*SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> Any: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :Dict = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :int = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ): continue UpperCamelCase :Union[str, Any] = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.train() UpperCamelCase :Union[str, Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Dict = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Optional[int]: for model_class in self.all_model_classes: if model_class.__name__ == "DPTForDepthEstimation": continue UpperCamelCase , UpperCamelCase :List[str] = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Union[str, Any] = False UpperCamelCase :Dict = True if model_class in get_values(SCREAMING_SNAKE_CASE_ ) or not model_class.supports_gradient_checkpointing: continue UpperCamelCase :Tuple = model_class(SCREAMING_SNAKE_CASE_ ) model.to(SCREAMING_SNAKE_CASE_ ) model.gradient_checkpointing_enable() model.train() UpperCamelCase :List[Any] = self._prepare_for_class(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ , return_labels=SCREAMING_SNAKE_CASE_ ) UpperCamelCase :List[str] = model(**SCREAMING_SNAKE_CASE_ ).loss loss.backward() def UpperCAmelCase ( self ) -> Dict: UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Dict = _config_zero_init(SCREAMING_SNAKE_CASE_ ) for model_class in self.all_model_classes: UpperCamelCase :Tuple = model_class(config=SCREAMING_SNAKE_CASE_ ) # Skip the check for the backbone UpperCamelCase :List[str] = [] for name, module in model.named_modules(): if module.__class__.__name__ == "DPTViTHybridEmbeddings": UpperCamelCase :Tuple = [F'''{name}.{key}''' for key in module.state_dict().keys()] break for name, param in model.named_parameters(): if param.requires_grad: if name in backbone_params: continue self.assertIn( ((param.data.mean() * 1e9).round() / 1e9).item() , [0.0, 1.0] , msg=F'''Parameter {name} of model {model_class} seems not properly initialized''' , ) @unittest.skip('''Will be fixed soon by reducing the size of the model used for common tests.''' ) def UpperCAmelCase ( self ) -> Tuple: pass @slow def UpperCAmelCase ( self ) -> Any: for model_name in DPT_PRETRAINED_MODEL_ARCHIVE_LIST[1:]: UpperCamelCase :int = DPTModel.from_pretrained(SCREAMING_SNAKE_CASE_ ) self.assertIsNotNone(SCREAMING_SNAKE_CASE_ ) def UpperCAmelCase ( self ) -> List[Any]: # We do this test only for DPTForDepthEstimation since it is the only model that uses readout_type UpperCamelCase , UpperCamelCase :int = self.model_tester.prepare_config_and_inputs_for_common() UpperCamelCase :Optional[Any] = '''add''' with self.assertRaises(SCREAMING_SNAKE_CASE_ ): UpperCamelCase :int = DPTForDepthEstimation(SCREAMING_SNAKE_CASE_ ) def _A ( ): UpperCamelCase :List[Any] = Image.open('''./tests/fixtures/tests_samples/COCO/000000039769.png''' ) return image @require_torch @require_vision @slow class UpperCAmelCase_ ( unittest.TestCase ): """simple docstring""" def UpperCAmelCase ( self ) -> str: UpperCamelCase :Any = DPTImageProcessor.from_pretrained('''Intel/dpt-hybrid-midas''' ) UpperCamelCase :int = DPTForDepthEstimation.from_pretrained('''Intel/dpt-hybrid-midas''' ).to(SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Any = prepare_img() UpperCamelCase :Union[str, Any] = image_processor(images=SCREAMING_SNAKE_CASE_ , return_tensors='''pt''' ).to(SCREAMING_SNAKE_CASE_ ) # forward pass with torch.no_grad(): UpperCamelCase :Union[str, Any] = model(**SCREAMING_SNAKE_CASE_ ) UpperCamelCase :Optional[int] = outputs.predicted_depth # verify the predicted depth UpperCamelCase :List[str] = torch.Size((1, 384, 384) ) self.assertEqual(predicted_depth.shape , SCREAMING_SNAKE_CASE_ ) UpperCamelCase :str = torch.tensor( [[[5.6437, 5.6146, 5.6511], [5.4371, 5.5649, 5.5958], [5.5215, 5.5184, 5.5293]]] ).to(SCREAMING_SNAKE_CASE_ ) self.assertTrue(torch.allclose(outputs.predicted_depth[:3, :3, :3] / 100 , SCREAMING_SNAKE_CASE_ , atol=1e-4 ) )
259
0
__A ={str(digit): digit**5 for digit in range(1_0)} def lowerCamelCase_ ( lowerCamelCase__ ): return sum(DIGITS_FIFTH_POWER[digit] for digit in str(lowerCamelCase__ ) ) def lowerCamelCase_ ( ): return sum( number for number in range(1_0_0_0 , 1_0_0_0_0_0_0 ) if number == digits_fifth_powers_sum(lowerCamelCase__ ) ) if __name__ == "__main__": print(solution())
19
def _A ( ): for n in range(1 , 1000000 ): yield n * (n + 1) // 2 def _A ( SCREAMING_SNAKE_CASE__ : int ): UpperCamelCase :Optional[int] = 1 UpperCamelCase :List[Any] = 2 while i * i <= n: UpperCamelCase :str = 0 while n % i == 0: n //= i multiplicity += 1 divisors_count *= multiplicity + 1 i += 1 if n > 1: divisors_count *= 2 return divisors_count def _A ( ): return next(i for i in triangle_number_generator() if count_divisors(SCREAMING_SNAKE_CASE__ ) > 500 ) if __name__ == "__main__": print(solution())
259
0
def _snake_case( SCREAMING_SNAKE_CASE__ ) -> int: if divisor % 5 == 0 or divisor % 2 == 0: return 0 lowercase : str = 1 lowercase : str = 1 while repunit: lowercase : Tuple = (10 * repunit + 1) % divisor repunit_index += 1 return repunit_index def _snake_case( SCREAMING_SNAKE_CASE__ = 1_000_000 ) -> int: lowercase : List[Any] = limit - 1 if divisor % 2 == 0: divisor += 1 while least_divisible_repunit(SCREAMING_SNAKE_CASE__ ) <= limit: divisor += 2 return divisor if __name__ == "__main__": print(F'''{solution() = }''')
20
def _A ( SCREAMING_SNAKE_CASE__ : Union[str, Any] , SCREAMING_SNAKE_CASE__ : List[str] , SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Optional[Any] ): # Return True if there is node that has not iterated. UpperCamelCase :Tuple = [False] * len(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Tuple = [] queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :int = True while queue: UpperCamelCase :Optional[Any] = queue.pop(0 ) for ind in range(len(graph[u] ) ): if visited[ind] is False and graph[u][ind] > 0: queue.append(SCREAMING_SNAKE_CASE__ ) UpperCamelCase :Union[str, Any] = True UpperCamelCase :Optional[int] = u return visited[t] def _A ( SCREAMING_SNAKE_CASE__ : int , SCREAMING_SNAKE_CASE__ : Any , SCREAMING_SNAKE_CASE__ : str ): # This array is filled by BFS and to store path UpperCamelCase :Optional[int] = [-1] * (len(SCREAMING_SNAKE_CASE__ )) UpperCamelCase :Optional[int] = 0 while bfs(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): UpperCamelCase :Dict = float('''Inf''' ) UpperCamelCase :str = sink while s != source: # Find the minimum value in select path UpperCamelCase :Optional[Any] = min(SCREAMING_SNAKE_CASE__ , graph[parent[s]][s] ) UpperCamelCase :Any = parent[s] max_flow += path_flow UpperCamelCase :Tuple = sink while v != source: UpperCamelCase :List[str] = parent[v] graph[u][v] -= path_flow graph[v][u] += path_flow UpperCamelCase :Any = parent[v] return max_flow __snake_case = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] __snake_case , __snake_case = 0, 5 print(ford_fulkerson(graph, source, sink))
259
0