| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | import importlib |
| | import os |
| | import sys |
| | from importlib.util import find_spec |
| | from itertools import chain |
| | from types import ModuleType |
| | from typing import Any |
| |
|
| |
|
| | if sys.version_info < (3, 8): |
| | _is_python_greater_3_8 = False |
| | else: |
| | _is_python_greater_3_8 = True |
| |
|
| |
|
| | def is_peft_available() -> bool: |
| | return find_spec("peft") is not None |
| |
|
| |
|
| | def is_unsloth_available() -> bool: |
| | return find_spec("unsloth") is not None |
| |
|
| |
|
| | def is_accelerate_greater_20_0() -> bool: |
| | if _is_python_greater_3_8: |
| | from importlib.metadata import version |
| |
|
| | accelerate_version = version("accelerate") |
| | else: |
| | import pkg_resources |
| |
|
| | accelerate_version = pkg_resources.get_distribution("accelerate").version |
| | return accelerate_version >= "0.20.0" |
| |
|
| |
|
| | def is_transformers_greater_than(current_version: str) -> bool: |
| | if _is_python_greater_3_8: |
| | from importlib.metadata import version |
| |
|
| | _transformers_version = version("transformers") |
| | else: |
| | import pkg_resources |
| |
|
| | _transformers_version = pkg_resources.get_distribution("transformers").version |
| | return _transformers_version > current_version |
| |
|
| |
|
| | def is_torch_greater_2_0() -> bool: |
| | if _is_python_greater_3_8: |
| | from importlib.metadata import version |
| |
|
| | torch_version = version("torch") |
| | else: |
| | import pkg_resources |
| |
|
| | torch_version = pkg_resources.get_distribution("torch").version |
| | return torch_version >= "2.0" |
| |
|
| |
|
| | def is_diffusers_available() -> bool: |
| | return find_spec("diffusers") is not None |
| |
|
| |
|
| | def is_pil_available() -> bool: |
| | return find_spec("PIL") is not None |
| |
|
| |
|
| | def is_bitsandbytes_available() -> bool: |
| | import torch |
| |
|
| | |
| | return find_spec("bitsandbytes") is not None and torch.cuda.is_available() |
| |
|
| |
|
| | def is_torchvision_available() -> bool: |
| | return find_spec("torchvision") is not None |
| |
|
| |
|
| | def is_rich_available() -> bool: |
| | return find_spec("rich") is not None |
| |
|
| |
|
| | def is_wandb_available() -> bool: |
| | return find_spec("wandb") is not None |
| |
|
| |
|
| | def is_sklearn_available() -> bool: |
| | return find_spec("sklearn") is not None |
| |
|
| |
|
| | def is_xpu_available() -> bool: |
| | if is_accelerate_greater_20_0(): |
| | import accelerate |
| |
|
| | return accelerate.utils.is_xpu_available() |
| | else: |
| | if find_spec("intel_extension_for_pytorch") is None: |
| | return False |
| | try: |
| | import torch |
| |
|
| | return hasattr(torch, "xpu") and torch.xpu.is_available() |
| | except RuntimeError: |
| | return False |
| |
|
| |
|
| | def is_npu_available() -> bool: |
| | """Checks if `torch_npu` is installed and potentially if a NPU is in the environment""" |
| | if find_spec("torch") is None or find_spec("torch_npu") is None: |
| | return False |
| |
|
| | import torch |
| | import torch_npu |
| |
|
| | return hasattr(torch, "npu") and torch.npu.is_available() |
| |
|
| |
|
| | class _LazyModule(ModuleType): |
| | """ |
| | Module class that surfaces all objects but only performs associated imports when the objects are requested. |
| | """ |
| |
|
| | |
| | |
| | def __init__(self, name, module_file, import_structure, module_spec=None, extra_objects=None): |
| | super().__init__(name) |
| | self._modules = set(import_structure.keys()) |
| | self._class_to_module = {} |
| | for key, values in import_structure.items(): |
| | for value in values: |
| | self._class_to_module[value] = key |
| | |
| | self.__all__ = list(import_structure.keys()) + list(chain(*import_structure.values())) |
| | self.__file__ = module_file |
| | self.__spec__ = module_spec |
| | self.__path__ = [os.path.dirname(module_file)] |
| | self._objects = {} if extra_objects is None else extra_objects |
| | self._name = name |
| | self._import_structure = import_structure |
| |
|
| | |
| | def __dir__(self): |
| | result = super().__dir__() |
| | |
| | |
| | for attr in self.__all__: |
| | if attr not in result: |
| | result.append(attr) |
| | return result |
| |
|
| | def __getattr__(self, name: str) -> Any: |
| | if name in self._objects: |
| | return self._objects[name] |
| | if name in self._modules: |
| | value = self._get_module(name) |
| | elif name in self._class_to_module.keys(): |
| | module = self._get_module(self._class_to_module[name]) |
| | value = getattr(module, name) |
| | else: |
| | raise AttributeError(f"module {self.__name__} has no attribute {name}") |
| |
|
| | setattr(self, name, value) |
| | return value |
| |
|
| | def _get_module(self, module_name: str): |
| | try: |
| | return importlib.import_module("." + module_name, self.__name__) |
| | except Exception as e: |
| | raise RuntimeError( |
| | f"Failed to import {self.__name__}.{module_name} because of the following error (look up to see its" |
| | f" traceback):\n{e}" |
| | ) from e |
| |
|
| | def __reduce__(self): |
| | return (self.__class__, (self._name, self.__file__, self._import_structure)) |
| |
|
| |
|
| | class OptionalDependencyNotAvailable(BaseException): |
| | """Internally used error class for signalling an optional dependency was not found.""" |
| |
|