danieldk HF Staff commited on
Commit
8d88d0e
·
1 Parent(s): babdbd0
build/torch-cpu/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
  def version() -> str:
2
- return "2"
3
 
4
 
5
  from . import layers
 
1
  def version() -> str:
2
+ return "0.2.0"
3
 
4
 
5
  from . import layers
build/torch-cpu/_ops.py CHANGED
@@ -1,8 +1,38 @@
1
  import torch
2
- ops = torch.ops._versions_2da0f64
3
 
4
- def add_op_namespace_prefix(op_name: str):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  """
6
  Prefix op by namespace.
7
  """
8
- return f"_versions_2da0f64::{op_name}"
 
1
  import torch
 
2
 
3
+ def get_backend() -> str:
4
+ """Detect the backend by inspecting torch."""
5
+ import torch
6
+
7
+ if hasattr(torch, "neuron"):
8
+ # Needs to be sorted before specific Torch builds, since Neuron
9
+ # extension can be loaded into e.g. CUDA Torch builds.
10
+ return "neuron"
11
+ elif torch.version.cuda is not None:
12
+ return "cuda"
13
+ elif torch.version.hip is not None:
14
+ return "rocm"
15
+ elif torch.backends.mps.is_available():
16
+ return "metal"
17
+ elif hasattr(torch.version, "xpu") and torch.version.xpu is not None:
18
+ return "xpu"
19
+ else:
20
+ return "cpu"
21
+
22
+
23
+ def _find_ops_name() -> str:
24
+ kernel_name = "versions"
25
+ unique_id = "babdbd0"
26
+ backend = get_backend()
27
+ return f"_{kernel_name}_{backend}_{unique_id}"
28
+
29
+
30
+ _OPS_NAME = _find_ops_name()
31
+
32
+ ops = getattr(torch.ops, _OPS_NAME)
33
+
34
+ def add_op_namespace_prefix(op_name: str) -> str:
35
  """
36
  Prefix op by namespace.
37
  """
38
+ return f"{_OPS_NAME}::{op_name}"
build/torch-cpu/metadata.json CHANGED
@@ -1,4 +1,9 @@
1
  {
 
2
  "version": 2,
3
- "python-depends": []
 
 
 
 
4
  }
 
1
  {
2
+ "id": "_versions_cpu_babdbd0",
3
  "version": 2,
4
+ "license": "apache-2.0",
5
+ "python-depends": [],
6
+ "backend": {
7
+ "type": "cpu"
8
+ }
9
  }
build/torch-cpu/versions/__init__.py CHANGED
@@ -1,10 +1,10 @@
1
  import ctypes
 
2
  import sys
3
-
4
- import importlib
5
  from pathlib import Path
6
  from types import ModuleType
7
 
 
8
  def _import_from_path(file_path: Path) -> ModuleType:
9
  # We cannot use the module name as-is, after adding it to `sys.modules`,
10
  # it would also be used for other imports. So, we make a module name that
 
1
  import ctypes
2
+ import importlib.util
3
  import sys
 
 
4
  from pathlib import Path
5
  from types import ModuleType
6
 
7
+
8
  def _import_from_path(file_path: Path) -> ModuleType:
9
  # We cannot use the module name as-is, after adding it to `sys.modules`,
10
  # it would also be used for other imports. So, we make a module name that
build/torch-cuda/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
  def version() -> str:
2
- return "2"
3
 
4
 
5
  from . import layers
 
1
  def version() -> str:
2
+ return "0.2.0"
3
 
4
 
5
  from . import layers
build/torch-cuda/_ops.py CHANGED
@@ -1,8 +1,38 @@
1
  import torch
2
- ops = torch.ops._versions_2da0f64
3
 
4
- def add_op_namespace_prefix(op_name: str):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  """
6
  Prefix op by namespace.
7
  """
8
- return f"_versions_2da0f64::{op_name}"
 
1
  import torch
 
2
 
3
+ def get_backend() -> str:
4
+ """Detect the backend by inspecting torch."""
5
+ import torch
6
+
7
+ if hasattr(torch, "neuron"):
8
+ # Needs to be sorted before specific Torch builds, since Neuron
9
+ # extension can be loaded into e.g. CUDA Torch builds.
10
+ return "neuron"
11
+ elif torch.version.cuda is not None:
12
+ return "cuda"
13
+ elif torch.version.hip is not None:
14
+ return "rocm"
15
+ elif torch.backends.mps.is_available():
16
+ return "metal"
17
+ elif hasattr(torch.version, "xpu") and torch.version.xpu is not None:
18
+ return "xpu"
19
+ else:
20
+ return "cpu"
21
+
22
+
23
+ def _find_ops_name() -> str:
24
+ kernel_name = "versions"
25
+ unique_id = "babdbd0"
26
+ backend = get_backend()
27
+ return f"_{kernel_name}_{backend}_{unique_id}"
28
+
29
+
30
+ _OPS_NAME = _find_ops_name()
31
+
32
+ ops = getattr(torch.ops, _OPS_NAME)
33
+
34
+ def add_op_namespace_prefix(op_name: str) -> str:
35
  """
36
  Prefix op by namespace.
37
  """
38
+ return f"{_OPS_NAME}::{op_name}"
build/torch-cuda/metadata.json CHANGED
@@ -1,4 +1,9 @@
1
  {
 
2
  "version": 2,
3
- "python-depends": []
 
 
 
 
4
  }
 
1
  {
2
+ "id": "_versions_cuda_babdbd0",
3
  "version": 2,
4
+ "license": "apache-2.0",
5
+ "python-depends": [],
6
+ "backend": {
7
+ "type": "cuda"
8
+ }
9
  }
build/torch-cuda/versions/__init__.py CHANGED
@@ -1,10 +1,10 @@
1
  import ctypes
 
2
  import sys
3
-
4
- import importlib
5
  from pathlib import Path
6
  from types import ModuleType
7
 
 
8
  def _import_from_path(file_path: Path) -> ModuleType:
9
  # We cannot use the module name as-is, after adding it to `sys.modules`,
10
  # it would also be used for other imports. So, we make a module name that
 
1
  import ctypes
2
+ import importlib.util
3
  import sys
 
 
4
  from pathlib import Path
5
  from types import ModuleType
6
 
7
+
8
  def _import_from_path(file_path: Path) -> ModuleType:
9
  # We cannot use the module name as-is, after adding it to `sys.modules`,
10
  # it would also be used for other imports. So, we make a module name that
build/torch-rocm/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
  def version() -> str:
2
- return "2"
3
 
4
 
5
  from . import layers
 
1
  def version() -> str:
2
+ return "0.2.0"
3
 
4
 
5
  from . import layers
build/torch-rocm/_ops.py CHANGED
@@ -1,8 +1,38 @@
1
  import torch
2
- ops = torch.ops._versions_2da0f64
3
 
4
- def add_op_namespace_prefix(op_name: str):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  """
6
  Prefix op by namespace.
7
  """
8
- return f"_versions_2da0f64::{op_name}"
 
1
  import torch
 
2
 
3
+ def get_backend() -> str:
4
+ """Detect the backend by inspecting torch."""
5
+ import torch
6
+
7
+ if hasattr(torch, "neuron"):
8
+ # Needs to be sorted before specific Torch builds, since Neuron
9
+ # extension can be loaded into e.g. CUDA Torch builds.
10
+ return "neuron"
11
+ elif torch.version.cuda is not None:
12
+ return "cuda"
13
+ elif torch.version.hip is not None:
14
+ return "rocm"
15
+ elif torch.backends.mps.is_available():
16
+ return "metal"
17
+ elif hasattr(torch.version, "xpu") and torch.version.xpu is not None:
18
+ return "xpu"
19
+ else:
20
+ return "cpu"
21
+
22
+
23
+ def _find_ops_name() -> str:
24
+ kernel_name = "versions"
25
+ unique_id = "babdbd0"
26
+ backend = get_backend()
27
+ return f"_{kernel_name}_{backend}_{unique_id}"
28
+
29
+
30
+ _OPS_NAME = _find_ops_name()
31
+
32
+ ops = getattr(torch.ops, _OPS_NAME)
33
+
34
+ def add_op_namespace_prefix(op_name: str) -> str:
35
  """
36
  Prefix op by namespace.
37
  """
38
+ return f"{_OPS_NAME}::{op_name}"
build/torch-rocm/metadata.json CHANGED
@@ -1,4 +1,9 @@
1
  {
 
2
  "version": 2,
3
- "python-depends": []
 
 
 
 
4
  }
 
1
  {
2
+ "id": "_versions_rocm_babdbd0",
3
  "version": 2,
4
+ "license": "apache-2.0",
5
+ "python-depends": [],
6
+ "backend": {
7
+ "type": "rocm"
8
+ }
9
  }
build/torch-rocm/versions/__init__.py CHANGED
@@ -1,10 +1,10 @@
1
  import ctypes
 
2
  import sys
3
-
4
- import importlib
5
  from pathlib import Path
6
  from types import ModuleType
7
 
 
8
  def _import_from_path(file_path: Path) -> ModuleType:
9
  # We cannot use the module name as-is, after adding it to `sys.modules`,
10
  # it would also be used for other imports. So, we make a module name that
 
1
  import ctypes
2
+ import importlib.util
3
  import sys
 
 
4
  from pathlib import Path
5
  from types import ModuleType
6
 
7
+
8
  def _import_from_path(file_path: Path) -> ModuleType:
9
  # We cannot use the module name as-is, after adding it to `sys.modules`,
10
  # it would also be used for other imports. So, we make a module name that
build/torch-universal/versions/__init__.py DELETED
@@ -1,7 +0,0 @@
1
- def version() -> str:
2
- return "2"
3
-
4
-
5
- from . import layers
6
-
7
- __all__ = ["layers", "version"]
 
 
 
 
 
 
 
 
build/torch-universal/versions/__pycache__/__init__.cpython-312.pyc DELETED
Binary file (383 Bytes)
 
build/torch-universal/versions/__pycache__/layers.cpython-312.pyc DELETED
Binary file (614 Bytes)
 
build/torch-universal/versions/_ops.py DELETED
@@ -1,8 +0,0 @@
1
- import torch
2
- ops = torch.ops._versions_dc142fd_dirty
3
-
4
- def add_op_namespace_prefix(op_name: str):
5
- """
6
- Prefix op by namespace.
7
- """
8
- return f"_versions_dc142fd_dirty::{op_name}"
 
 
 
 
 
 
 
 
 
build/torch-universal/versions/layers.py DELETED
@@ -1,8 +0,0 @@
1
- import torch.nn as nn
2
-
3
- from . import version
4
-
5
-
6
- class Version(nn.Module):
7
- def forward(self) -> str:
8
- return version()
 
 
 
 
 
 
 
 
 
build/torch-xpu/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
  def version() -> str:
2
- return "2"
3
 
4
 
5
  from . import layers
 
1
  def version() -> str:
2
+ return "0.2.0"
3
 
4
 
5
  from . import layers
build/torch-xpu/_ops.py CHANGED
@@ -1,8 +1,38 @@
1
  import torch
2
- ops = torch.ops._versions_2da0f64
3
 
4
- def add_op_namespace_prefix(op_name: str):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  """
6
  Prefix op by namespace.
7
  """
8
- return f"_versions_2da0f64::{op_name}"
 
1
  import torch
 
2
 
3
+ def get_backend() -> str:
4
+ """Detect the backend by inspecting torch."""
5
+ import torch
6
+
7
+ if hasattr(torch, "neuron"):
8
+ # Needs to be sorted before specific Torch builds, since Neuron
9
+ # extension can be loaded into e.g. CUDA Torch builds.
10
+ return "neuron"
11
+ elif torch.version.cuda is not None:
12
+ return "cuda"
13
+ elif torch.version.hip is not None:
14
+ return "rocm"
15
+ elif torch.backends.mps.is_available():
16
+ return "metal"
17
+ elif hasattr(torch.version, "xpu") and torch.version.xpu is not None:
18
+ return "xpu"
19
+ else:
20
+ return "cpu"
21
+
22
+
23
+ def _find_ops_name() -> str:
24
+ kernel_name = "versions"
25
+ unique_id = "babdbd0"
26
+ backend = get_backend()
27
+ return f"_{kernel_name}_{backend}_{unique_id}"
28
+
29
+
30
+ _OPS_NAME = _find_ops_name()
31
+
32
+ ops = getattr(torch.ops, _OPS_NAME)
33
+
34
+ def add_op_namespace_prefix(op_name: str) -> str:
35
  """
36
  Prefix op by namespace.
37
  """
38
+ return f"{_OPS_NAME}::{op_name}"
build/torch-xpu/metadata.json CHANGED
@@ -1,4 +1,9 @@
1
  {
 
2
  "version": 2,
3
- "python-depends": []
 
 
 
 
4
  }
 
1
  {
2
+ "id": "_versions_xpu_babdbd0",
3
  "version": 2,
4
+ "license": "apache-2.0",
5
+ "python-depends": [],
6
+ "backend": {
7
+ "type": "xpu"
8
+ }
9
  }
build/torch-xpu/versions/__init__.py CHANGED
@@ -1,10 +1,10 @@
1
  import ctypes
 
2
  import sys
3
-
4
- import importlib
5
  from pathlib import Path
6
  from types import ModuleType
7
 
 
8
  def _import_from_path(file_path: Path) -> ModuleType:
9
  # We cannot use the module name as-is, after adding it to `sys.modules`,
10
  # it would also be used for other imports. So, we make a module name that
 
1
  import ctypes
2
+ import importlib.util
3
  import sys
 
 
4
  from pathlib import Path
5
  from types import ModuleType
6
 
7
+
8
  def _import_from_path(file_path: Path) -> ModuleType:
9
  # We cannot use the module name as-is, after adding it to `sys.modules`,
10
  # it would also be used for other imports. So, we make a module name that