| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | """ Configuration base class and utilities.""" |
| |
|
| | from __future__ import (absolute_import, division, print_function, |
| | unicode_literals) |
| |
|
| | import copy |
| | import json |
| | import logging |
| | import os |
| | from io import open |
| |
|
| | from .file_utils import cached_path, CONFIG_NAME |
| |
|
| | logger = logging.getLogger(__name__) |
| |
|
| | class PretrainedConfig(object): |
| | r""" Base class for all configuration classes. |
| | Handles a few parameters common to all models' configurations as well as methods for loading/downloading/saving configurations. |
| | |
| | Note: |
| | A configuration file can be loaded and saved to disk. Loading the configuration file and using this file to initialize a model does **not** load the model weights. |
| | It only affects the model's configuration. |
| | |
| | Class attributes (overridden by derived classes): |
| | - ``pretrained_config_archive_map``: a python ``dict`` of with `short-cut-names` (string) as keys and `url` (string) of associated pretrained model configurations as values. |
| | |
| | Parameters: |
| | ``finetuning_task``: string, default `None`. Name of the task used to fine-tune the model. This can be used when converting from an original (TensorFlow or PyTorch) checkpoint. |
| | ``num_labels``: integer, default `2`. Number of classes to use when the model is a classification model (sequences/tokens) |
| | ``output_attentions``: boolean, default `False`. Should the model returns attentions weights. |
| | ``output_hidden_states``: string, default `False`. Should the model returns all hidden-states. |
| | ``torchscript``: string, default `False`. Is the model used with Torchscript. |
| | """ |
| | pretrained_config_archive_map = {} |
| |
|
| | def __init__(self, **kwargs): |
| | self.finetuning_task = kwargs.pop('finetuning_task', None) |
| | self.num_labels = kwargs.pop('num_labels', 2) |
| | self.output_attentions = kwargs.pop('output_attentions', False) |
| | self.output_hidden_states = kwargs.pop('output_hidden_states', False) |
| | self.torchscript = kwargs.pop('torchscript', False) |
| | self.pruned_heads = kwargs.pop('pruned_heads', {}) |
| |
|
| | def save_pretrained(self, save_directory): |
| | """ Save a configuration object to the directory `save_directory`, so that it |
| | can be re-loaded using the :func:`~pytorch_transformers.PretrainedConfig.from_pretrained` class method. |
| | """ |
| | assert os.path.isdir(save_directory), "Saving path should be a directory where the model and configuration can be saved" |
| |
|
| | |
| | output_config_file = os.path.join(save_directory, CONFIG_NAME) |
| |
|
| | self.to_json_file(output_config_file) |
| |
|
| | @classmethod |
| | def from_pretrained(cls, pretrained_model_name_or_path, **kwargs): |
| | r""" Instantiate a :class:`~pytorch_transformers.PretrainedConfig` (or a derived class) from a pre-trained model configuration. |
| | |
| | Parameters: |
| | pretrained_model_name_or_path: either: |
| | |
| | - a string with the `shortcut name` of a pre-trained model configuration to load from cache or download, e.g.: ``bert-base-uncased``. |
| | - a path to a `directory` containing a configuration file saved using the :func:`~pytorch_transformers.PretrainedConfig.save_pretrained` method, e.g.: ``./my_model_directory/``. |
| | - a path or url to a saved configuration JSON `file`, e.g.: ``./my_model_directory/configuration.json``. |
| | |
| | cache_dir: (`optional`) string: |
| | Path to a directory in which a downloaded pre-trained model |
| | configuration should be cached if the standard cache should not be used. |
| | |
| | kwargs: (`optional`) dict: key/value pairs with which to update the configuration object after loading. |
| | |
| | - The values in kwargs of any keys which are configuration attributes will be used to override the loaded values. |
| | - Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled by the `return_unused_kwargs` keyword parameter. |
| | |
| | force_download: (`optional`) boolean, default False: |
| | Force to (re-)download the model weights and configuration files and override the cached versions if they exists. |
| | |
| | proxies: (`optional`) dict, default None: |
| | A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. |
| | The proxies are used on each request. |
| | |
| | return_unused_kwargs: (`optional`) bool: |
| | |
| | - If False, then this function returns just the final configuration object. |
| | - If True, then this functions returns a tuple `(config, unused_kwargs)` where `unused_kwargs` is a dictionary consisting of the key/value pairs whose keys are not configuration attributes: ie the part of kwargs which has not been used to update `config` and is otherwise ignored. |
| | |
| | Examples:: |
| | |
| | # We can't instantiate directly the base class `PretrainedConfig` so let's show the examples on a |
| | # derived class: BertConfig |
| | config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from S3 and cache. |
| | config = BertConfig.from_pretrained('./test/saved_model/') # E.g. config (or model) was saved using `save_pretrained('./test/saved_model/')` |
| | config = BertConfig.from_pretrained('./test/saved_model/my_configuration.json') |
| | config = BertConfig.from_pretrained('bert-base-uncased', output_attention=True, foo=False) |
| | assert config.output_attention == True |
| | config, unused_kwargs = BertConfig.from_pretrained('bert-base-uncased', output_attention=True, |
| | foo=False, return_unused_kwargs=True) |
| | assert config.output_attention == True |
| | assert unused_kwargs == {'foo': False} |
| | |
| | """ |
| | cache_dir = kwargs.pop('cache_dir', None) |
| | force_download = kwargs.pop('force_download', False) |
| | proxies = kwargs.pop('proxies', None) |
| | return_unused_kwargs = kwargs.pop('return_unused_kwargs', False) |
| |
|
| | if pretrained_model_name_or_path in cls.pretrained_config_archive_map: |
| | config_file = cls.pretrained_config_archive_map[pretrained_model_name_or_path] |
| | elif os.path.isdir(pretrained_model_name_or_path): |
| | config_file = os.path.join(pretrained_model_name_or_path, CONFIG_NAME) |
| | else: |
| | config_file = pretrained_model_name_or_path |
| | |
| | try: |
| | resolved_config_file = cached_path(config_file, cache_dir=cache_dir, force_download=force_download, proxies=proxies) |
| | except EnvironmentError as e: |
| | if pretrained_model_name_or_path in cls.pretrained_config_archive_map: |
| | logger.error( |
| | "Couldn't reach server at '{}' to download pretrained model configuration file.".format( |
| | config_file)) |
| | else: |
| | logger.error( |
| | "Model name '{}' was not found in model name list ({}). " |
| | "We assumed '{}' was a path or url but couldn't find any file " |
| | "associated to this path or url.".format( |
| | pretrained_model_name_or_path, |
| | ', '.join(cls.pretrained_config_archive_map.keys()), |
| | config_file)) |
| | raise e |
| | if resolved_config_file == config_file: |
| | logger.info("loading configuration file {}".format(config_file)) |
| | else: |
| | logger.info("loading configuration file {} from cache at {}".format( |
| | config_file, resolved_config_file)) |
| |
|
| | |
| | config = cls.from_json_file(resolved_config_file) |
| |
|
| | if hasattr(config, 'pruned_heads'): |
| | config.pruned_heads = dict((int(key), set(value)) for key, value in config.pruned_heads.items()) |
| |
|
| | |
| | to_remove = [] |
| | for key, value in kwargs.items(): |
| | if hasattr(config, key): |
| | setattr(config, key, value) |
| | to_remove.append(key) |
| | for key in to_remove: |
| | kwargs.pop(key, None) |
| |
|
| | logger.info("Model config %s", config) |
| | if return_unused_kwargs: |
| | return config, kwargs |
| | else: |
| | return config |
| |
|
| | @classmethod |
| | def from_dict(cls, json_object): |
| | """Constructs a `Config` from a Python dictionary of parameters.""" |
| | config = cls(vocab_size_or_config_json_file=-1) |
| | for key, value in json_object.items(): |
| | config.__dict__[key] = value |
| | return config |
| |
|
| | @classmethod |
| | def from_json_file(cls, json_file): |
| | """Constructs a `BertConfig` from a json file of parameters.""" |
| | with open(json_file, "r", encoding='utf-8') as reader: |
| | text = reader.read() |
| | return cls.from_dict(json.loads(text)) |
| |
|
| | def __eq__(self, other): |
| | return self.__dict__ == other.__dict__ |
| |
|
| | def __repr__(self): |
| | return str(self.to_json_string()) |
| |
|
| | def to_dict(self): |
| | """Serializes this instance to a Python dictionary.""" |
| | output = copy.deepcopy(self.__dict__) |
| | return output |
| |
|
| | def to_json_string(self): |
| | """Serializes this instance to a JSON string.""" |
| | return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n" |
| |
|
| | def to_json_file(self, json_file_path): |
| | """ Save this instance to a json file.""" |
| | with open(json_file_path, "w", encoding='utf-8') as writer: |
| | writer.write(self.to_json_string()) |
| |
|