diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..5b1e568ed03e202e7ef6292e4b285451935a3706 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,442 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text +build/torch27-cxx11-cu118-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch27-cxx11-cu126-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch27-cxx11-cu128-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch27-cxx11-rocm63-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/relu/_relu_4f16829.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/relu/_relu_c984dd4_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/relu/_relu_8d21eda.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/relu/_relu_e52ac39.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/relu/_relu_46e4283.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_9f1f069.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_6c79fff.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_e9d16fc_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_6c79fff_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_73d0c43_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_e0653a2.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_a5a5906.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_d308d74.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu126-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu128-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-cu129-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm63-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-rocm64-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch28-cxx11-xpu20251-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_dc7c154.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_3587a8c_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_53eed8c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cu128-x86_64-windows/relu/_relu_d7b860a.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_ff7d195.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-xpu20253-x86_64-windows/relu/_relu_d7b860a.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cpu-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_2b7ea88.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cu128-x86_64-windows/relu/_relu_5629432.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-xpu20253-x86_64-windows/relu/_relu_5629432.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_metal_jcelz7hn6aaa6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-x86_64-linux/_relu_cpu_smcctie2ikqu4.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_cuda_byb5bdzb6z5zm.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_cuda_ohbj3nuuoamci.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_cuda_oheoc6gkfjkwm.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_rocm_g773e4bf7332k.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_rocm_ijhrndbhfiwtg.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_xpu_gnle2ptjfkifg.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cpu-x86_64-linux/_relu_cpu_liz2pcyxsjqzq.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_cuda_udp2pzwonis42.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_cuda_ijqh67vz4tgr4.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_cuda_7mqpgn6tykjbc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_rocm_vox4kgutt6m6i.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_rocm_dd3ars5bdez2c.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_xpu_wcav6tli7hepe.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cu128-x86_64-windows/_relu_cuda_9ba4f37.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-xpu20253-x86_64-windows/_relu_xpu_9ba4f37.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-cu128-x86_64-windows/_relu_cuda_bd0179a.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-xpu20253-x86_64-windows/_relu_xpu_bd0179a.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-cu128-x86_64-windows/_relu_cuda_d91a431.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-xpu20253-x86_64-windows/_relu_xpu_d91a431.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-metal-aarch64-darwin/_relu_metal_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-aarch64-linux/_relu_cpu_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-aarch64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-aarch64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-aarch64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cpu-aarch64-linux/_relu_cpu_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-aarch64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-aarch64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-aarch64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cu128-x86_64-windows/_relu_cuda_657300a.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-x86_64-linux/_relu_cpu_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_rocm_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_rocm_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_xpu_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cpu-x86_64-linux/_relu_cpu_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-x86_64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-x86_64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-x86_64-linux/_relu_cuda_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_rocm_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_rocm_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_xpu_6261c06.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-xpu20253-x86_64-windows/_relu_xpu_657300a.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-aarch64-linux/_relu_cpu_0a30093.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-aarch64-linux/_relu_cuda_0a30093.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-aarch64-linux/_relu_cuda_0a30093.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-aarch64-linux/_relu_cuda_0a30093.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cpu-aarch64-linux/_relu_cpu_0a30093.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu126-aarch64-linux/_relu_cuda_0a30093.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu128-aarch64-linux/_relu_cuda_0a30093.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu130-aarch64-linux/_relu_cuda_0a30093.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cu128-x86_64-windows/_relu_cuda_fd389c8.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-xpu20253-x86_64-windows/_relu_xpu_fd389c8.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-aarch64-linux/_relu_cpu_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-aarch64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-aarch64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-aarch64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cpu-aarch64-linux/_relu_cpu_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu126-aarch64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu128-aarch64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu130-aarch64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu129-aarch64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-x86_64-linux/_relu_cpu_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_rocm_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_rocm_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_xpu_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cpu-x86_64-linux/_relu_cpu_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu126-x86_64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu128-x86_64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu130-x86_64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu129-x86_64-linux/_relu_cuda_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_rocm_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_rocm_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_xpu_86608d6.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-x86_64-linux/_relu_cpu_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_cuda_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_cuda_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_cuda_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_xpu_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cpu-x86_64-linux/_relu_cpu_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu126-x86_64-linux/_relu_cuda_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu128-x86_64-linux/_relu_cuda_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu130-x86_64-linux/_relu_cuda_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu129-x86_64-linux/_relu_cuda_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm63-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-rocm64-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-xpu20252-x86_64-linux/_relu_xpu_525b056_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-x86_64-linux/_relu_cpu_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_rocm_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_rocm_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_xpu_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cpu-x86_64-linux/_relu_cpu_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu126-x86_64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu128-x86_64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu130-x86_64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-rocm71-x86_64-linux/_relu_rocm_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-rocm72-x86_64-linux/_relu_rocm_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-xpu20253-x86_64-linux/_relu_xpu_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu129-x86_64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-aarch64-linux/_relu_cpu_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-aarch64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-aarch64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-aarch64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cpu-aarch64-linux/_relu_cpu_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu126-aarch64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu128-aarch64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu130-aarch64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu129-aarch64-linux/_relu_cuda_a1c8862.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-x86_64-linux/_relu_cpu_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-x86_64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-x86_64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-x86_64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm70-x86_64-linux/_relu_rocm_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-rocm71-x86_64-linux/_relu_rocm_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-xpu20253-x86_64-linux/_relu_xpu_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cpu-x86_64-linux/_relu_cpu_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu126-x86_64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu128-x86_64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu130-x86_64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-rocm71-x86_64-linux/_relu_rocm_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-rocm72-x86_64-linux/_relu_rocm_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-xpu20253-x86_64-linux/_relu_xpu_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu129-x86_64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cu128-x86_64-windows/_relu_cuda_e08ad6f.pyd filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cpu-aarch64-linux/_relu_cpu_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu126-aarch64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu128-aarch64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-cxx11-cu130-aarch64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cpu-aarch64-linux/_relu_cpu_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu126-aarch64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu128-aarch64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch211-cxx11-cu130-aarch64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch29-cxx11-cu129-aarch64-linux/_relu_cuda_918d6dc.abi3.so filter=lfs diff=lfs merge=lfs -text +build/torch210-xpu20253-x86_64-windows/_relu_xpu_e08ad6f.pyd filter=lfs diff=lfs merge=lfs -text diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..148de6b63fb9b03823a48d844c09d63b2ad11cd6 --- /dev/null +++ b/README.md @@ -0,0 +1,4 @@ +--- +tags: +- kernels +--- \ No newline at end of file diff --git a/benchmarks/benchmark.py b/benchmarks/benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..1c520aee6e9382a15eb176b2dab73ec5ba43cddf --- /dev/null +++ b/benchmarks/benchmark.py @@ -0,0 +1,28 @@ +import torch +import torch.nn.functional as F + +from kernels.benchmark import Benchmark + + +class ReluBenchmark(Benchmark): + seed: int = 42 + + def setup(self): + self.x = torch.randn(1024, 1024, device=self.device, dtype=torch.float32) + self.out = torch.empty_like(self.x) + + def benchmark_base(self): + self.out = self.kernel.relu(self.x) + + def verify_base(self) -> torch.Tensor: + return F.relu(self.x) + + def setup_large(self): + self.x = torch.randn(4096, 4096, device=self.device, dtype=torch.float32) + self.out = torch.empty_like(self.x) + + def benchmark_large(self): + self.out = self.kernel.relu(self.x) + + def verify_large(self) -> torch.Tensor: + return F.relu(self.x) diff --git a/build/torch-ext/relu/__init__.py b/build/torch-ext/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch-ext/relu/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch-ext/torch_binding.cpp b/build/torch-ext/torch_binding.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8b50483a79595f824737ec18acb7e55e284e3f3b --- /dev/null +++ b/build/torch-ext/torch_binding.cpp @@ -0,0 +1,17 @@ +#include + +#include "registration.h" +#include "torch_binding.h" + +TORCH_LIBRARY_EXPAND(TORCH_EXTENSION_NAME, ops) { + ops.def("relu(Tensor! out, Tensor input) -> ()"); +#if defined(CUDA_KERNEL) || defined(ROCM_KERNEL) + ops.impl("relu", torch::kCUDA, &relu); +#elif defined(METAL_KERNEL) + ops.impl("relu", torch::kMPS, relu); +#elif defined(XPU_KERNEL) + ops.impl("relu", torch::kXPU, &relu); +#endif +} + +REGISTER_EXTENSION(TORCH_EXTENSION_NAME) \ No newline at end of file diff --git a/build/torch-ext/torch_binding.h b/build/torch-ext/torch_binding.h new file mode 100644 index 0000000000000000000000000000000000000000..3bcf290482ad4adcac67efa739b2ee100baa7a10 --- /dev/null +++ b/build/torch-ext/torch_binding.h @@ -0,0 +1,5 @@ +#pragma once + +#include + +void relu(torch::Tensor &out, torch::Tensor const &input); \ No newline at end of file diff --git a/build/torch210-cpu-aarch64-darwin/__init__.py b/build/torch210-cpu-aarch64-darwin/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cpu-aarch64-darwin/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cpu-aarch64-darwin/_ops.py b/build/torch210-cpu-aarch64-darwin/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..ff8b9666569cab35ae0a56dc9d3363e5970dfe59 --- /dev/null +++ b/build/torch210-cpu-aarch64-darwin/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cpu_6261c06 +ops = torch.ops._relu_cpu_6261c06 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cpu_6261c06::{op_name}" diff --git a/build/torch210-cpu-aarch64-darwin/_relu_cpu_6261c06.abi3.so b/build/torch210-cpu-aarch64-darwin/_relu_cpu_6261c06.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..7a77a1f9b90354acafd256e9700299204581961c Binary files /dev/null and b/build/torch210-cpu-aarch64-darwin/_relu_cpu_6261c06.abi3.so differ diff --git a/build/torch210-cpu-aarch64-darwin/layers/__init__.py b/build/torch210-cpu-aarch64-darwin/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cpu-aarch64-darwin/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cpu-aarch64-darwin/metadata.json b/build/torch210-cpu-aarch64-darwin/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..a5381dd80836f863378b9f33a559815688de9287 --- /dev/null +++ b/build/torch210-cpu-aarch64-darwin/metadata.json @@ -0,0 +1,5 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch210-cpu-aarch64-darwin/relu/__init__.py b/build/torch210-cpu-aarch64-darwin/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch210-cpu-aarch64-darwin/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cu128-x86_64-windows/__init__.py b/build/torch210-cu128-x86_64-windows/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a658ca5bffa2a7dcb3b3656cbde59ed6d80005d1 --- /dev/null +++ b/build/torch210-cu128-x86_64-windows/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cu128-x86_64-windows/_ops.py b/build/torch210-cu128-x86_64-windows/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..dce2d061a7e69d6c56116e3ebdffcd943986d085 --- /dev/null +++ b/build/torch210-cu128-x86_64-windows/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_fd389c8 +ops = torch.ops._relu_cuda_fd389c8 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_fd389c8::{op_name}" diff --git a/build/torch210-cu128-x86_64-windows/_relu_cuda_fd389c8.pyd b/build/torch210-cu128-x86_64-windows/_relu_cuda_fd389c8.pyd new file mode 100644 index 0000000000000000000000000000000000000000..ab56f954ba3cb44903ba4b416298c3aafd733c2e --- /dev/null +++ b/build/torch210-cu128-x86_64-windows/_relu_cuda_fd389c8.pyd @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af9e521d3ea13ea0f2fb410c1e458f0753ea7b277e0ccfdf6102ace1f8e2696b +size 203264 diff --git a/build/torch210-cu128-x86_64-windows/layers/__init__.py b/build/torch210-cu128-x86_64-windows/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7fa87bf1eaaaafd7dbbc0f013dc6001d0534a40d --- /dev/null +++ b/build/torch210-cu128-x86_64-windows/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cu128-x86_64-windows/metadata.json b/build/torch210-cu128-x86_64-windows/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..7ddf5ff75a35f315c1398fff49390f17fd4e0ee9 --- /dev/null +++ b/build/torch210-cu128-x86_64-windows/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch210-cu128-x86_64-windows/relu/__init__.py b/build/torch210-cu128-x86_64-windows/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bc434ef44e63409acb52a8f3fff54a4adc46ed6a --- /dev/null +++ b/build/torch210-cu128-x86_64-windows/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-cpu-aarch64-linux/__init__.py b/build/torch210-cxx11-cpu-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-cpu-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-cpu-aarch64-linux/_ops.py b/build/torch210-cxx11-cpu-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..9a6c5de46c914d40bf499c1cdae64f5e4c9a9b1f --- /dev/null +++ b/build/torch210-cxx11-cpu-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cpu_918d6dc +ops = torch.ops._relu_cpu_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cpu_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-cpu-aarch64-linux/_relu_cpu_918d6dc.abi3.so b/build/torch210-cxx11-cpu-aarch64-linux/_relu_cpu_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..814b4ae717a24f7a6f45f1a569fc61cde1f1848b --- /dev/null +++ b/build/torch210-cxx11-cpu-aarch64-linux/_relu_cpu_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:910c8d2d40ffb2bbf7b4bade6dae4b7e6160668cec32379d6a33feea7b2a76ab +size 2025352 diff --git a/build/torch210-cxx11-cpu-aarch64-linux/layers/__init__.py b/build/torch210-cxx11-cpu-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-cpu-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-cpu-aarch64-linux/metadata.json b/build/torch210-cxx11-cpu-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..eb22148b3f551be150f7824a5684c19bbc40ae0e --- /dev/null +++ b/build/torch210-cxx11-cpu-aarch64-linux/metadata.json @@ -0,0 +1,8 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cpu" + } +} \ No newline at end of file diff --git a/build/torch210-cxx11-cpu-aarch64-linux/relu/__init__.py b/build/torch210-cxx11-cpu-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-cpu-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-cpu-x86_64-linux/__init__.py b/build/torch210-cxx11-cpu-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-cpu-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-cpu-x86_64-linux/_ops.py b/build/torch210-cxx11-cpu-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..9a6c5de46c914d40bf499c1cdae64f5e4c9a9b1f --- /dev/null +++ b/build/torch210-cxx11-cpu-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cpu_918d6dc +ops = torch.ops._relu_cpu_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cpu_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-cpu-x86_64-linux/_relu_cpu_918d6dc.abi3.so b/build/torch210-cxx11-cpu-x86_64-linux/_relu_cpu_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..ea7f51a03467cf1cee72a31e4e79a081f467c540 --- /dev/null +++ b/build/torch210-cxx11-cpu-x86_64-linux/_relu_cpu_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c99aa1351f9dee4fb48914cd4f1ef7af3ca33be603253ba798f02e78152f7614 +size 1778080 diff --git a/build/torch210-cxx11-cpu-x86_64-linux/layers/__init__.py b/build/torch210-cxx11-cpu-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-cpu-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-cpu-x86_64-linux/metadata.json b/build/torch210-cxx11-cpu-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..eb22148b3f551be150f7824a5684c19bbc40ae0e --- /dev/null +++ b/build/torch210-cxx11-cpu-x86_64-linux/metadata.json @@ -0,0 +1,8 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cpu" + } +} \ No newline at end of file diff --git a/build/torch210-cxx11-cpu-x86_64-linux/relu/__init__.py b/build/torch210-cxx11-cpu-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-cpu-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-cu126-aarch64-linux/__init__.py b/build/torch210-cxx11-cu126-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-cu126-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-cu126-aarch64-linux/_ops.py b/build/torch210-cxx11-cu126-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch210-cxx11-cu126-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-cu126-aarch64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch210-cxx11-cu126-aarch64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..e72bd4675c1b1bda0424af5c34ecaffb75f9f376 --- /dev/null +++ b/build/torch210-cxx11-cu126-aarch64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:72fb0ad68cd5fc3d5a911caf26118921fe87457938a01c174bca796036fca14d +size 2103912 diff --git a/build/torch210-cxx11-cu126-aarch64-linux/layers/__init__.py b/build/torch210-cxx11-cu126-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-cu126-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-cu126-aarch64-linux/metadata.json b/build/torch210-cxx11-cu126-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..f5902b55ab0b2b561c0cf97567c9806c60839c7f --- /dev/null +++ b/build/torch210-cxx11-cu126-aarch64-linux/metadata.json @@ -0,0 +1,18 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0+PTX" + ] + } +} diff --git a/build/torch210-cxx11-cu126-aarch64-linux/relu/__init__.py b/build/torch210-cxx11-cu126-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-cu126-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-cu126-x86_64-linux/__init__.py b/build/torch210-cxx11-cu126-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-cu126-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-cu126-x86_64-linux/_ops.py b/build/torch210-cxx11-cu126-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch210-cxx11-cu126-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-cu126-x86_64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch210-cxx11-cu126-x86_64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..e2257771200c26f524163d8edbc014e9c8905b58 --- /dev/null +++ b/build/torch210-cxx11-cu126-x86_64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9edb80faf33bee9de68a7b74077c8650286a0774914127d7115d1a90092e6803 +size 2019880 diff --git a/build/torch210-cxx11-cu126-x86_64-linux/layers/__init__.py b/build/torch210-cxx11-cu126-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-cu126-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-cu126-x86_64-linux/metadata.json b/build/torch210-cxx11-cu126-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..f5902b55ab0b2b561c0cf97567c9806c60839c7f --- /dev/null +++ b/build/torch210-cxx11-cu126-x86_64-linux/metadata.json @@ -0,0 +1,18 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0+PTX" + ] + } +} diff --git a/build/torch210-cxx11-cu126-x86_64-linux/relu/__init__.py b/build/torch210-cxx11-cu126-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-cu126-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-cu128-aarch64-linux/__init__.py b/build/torch210-cxx11-cu128-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-cu128-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-cu128-aarch64-linux/_ops.py b/build/torch210-cxx11-cu128-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch210-cxx11-cu128-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-cu128-aarch64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch210-cxx11-cu128-aarch64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..af88910069e0217e241a90887e35cb458e82e66b --- /dev/null +++ b/build/torch210-cxx11-cu128-aarch64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ad7917e6994838799623905d6a42f6090ee6b827d4370673e7fdebb84caf8667 +size 2235240 diff --git a/build/torch210-cxx11-cu128-aarch64-linux/layers/__init__.py b/build/torch210-cxx11-cu128-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-cu128-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-cu128-aarch64-linux/metadata.json b/build/torch210-cxx11-cu128-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..8b796af185fbbd8594fcd846949aa5fadc0ccdda --- /dev/null +++ b/build/torch210-cxx11-cu128-aarch64-linux/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch210-cxx11-cu128-aarch64-linux/relu/__init__.py b/build/torch210-cxx11-cu128-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-cu128-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-cu128-x86_64-linux/__init__.py b/build/torch210-cxx11-cu128-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-cu128-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-cu128-x86_64-linux/_ops.py b/build/torch210-cxx11-cu128-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch210-cxx11-cu128-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-cu128-x86_64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch210-cxx11-cu128-x86_64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..ab9b8d43420a252fea39d9e20ae7805776198f81 --- /dev/null +++ b/build/torch210-cxx11-cu128-x86_64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e62a7f7de6098b0a27097ca7999ef0d5c40a36a72cf7b124756ad7ce6df106c +size 2111840 diff --git a/build/torch210-cxx11-cu128-x86_64-linux/layers/__init__.py b/build/torch210-cxx11-cu128-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-cu128-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-cu128-x86_64-linux/metadata.json b/build/torch210-cxx11-cu128-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..8b796af185fbbd8594fcd846949aa5fadc0ccdda --- /dev/null +++ b/build/torch210-cxx11-cu128-x86_64-linux/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch210-cxx11-cu128-x86_64-linux/relu/__init__.py b/build/torch210-cxx11-cu128-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-cu128-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-cu130-aarch64-linux/__init__.py b/build/torch210-cxx11-cu130-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-cu130-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-cu130-aarch64-linux/_ops.py b/build/torch210-cxx11-cu130-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch210-cxx11-cu130-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-cu130-aarch64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch210-cxx11-cu130-aarch64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..39472d72b9eac58b0de9e0f4267560729dec8e9d --- /dev/null +++ b/build/torch210-cxx11-cu130-aarch64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e48fc9ad9674b95f45cd172fbd21f2b2616d480fd8ad73d5ae39ff5085642a7f +size 2236952 diff --git a/build/torch210-cxx11-cu130-aarch64-linux/layers/__init__.py b/build/torch210-cxx11-cu130-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-cu130-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-cu130-aarch64-linux/metadata.json b/build/torch210-cxx11-cu130-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..66651b7d3f95ac9e5ce5fc2a641b6f0f50788f87 --- /dev/null +++ b/build/torch210-cxx11-cu130-aarch64-linux/metadata.json @@ -0,0 +1,19 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "11.0", + "12.0+PTX", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch210-cxx11-cu130-aarch64-linux/relu/__init__.py b/build/torch210-cxx11-cu130-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-cu130-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-cu130-x86_64-linux/__init__.py b/build/torch210-cxx11-cu130-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-cu130-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-cu130-x86_64-linux/_ops.py b/build/torch210-cxx11-cu130-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch210-cxx11-cu130-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-cu130-x86_64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch210-cxx11-cu130-x86_64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..ac57c230fe76b8d3fa5edf0479c0300172f8cde4 --- /dev/null +++ b/build/torch210-cxx11-cu130-x86_64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e0d85a0c1e87da8a34349d22f09413cf2d2bb99e466c88084a4ade01bd1c408b +size 2129656 diff --git a/build/torch210-cxx11-cu130-x86_64-linux/layers/__init__.py b/build/torch210-cxx11-cu130-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-cu130-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-cu130-x86_64-linux/metadata.json b/build/torch210-cxx11-cu130-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..66651b7d3f95ac9e5ce5fc2a641b6f0f50788f87 --- /dev/null +++ b/build/torch210-cxx11-cu130-x86_64-linux/metadata.json @@ -0,0 +1,19 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "11.0", + "12.0+PTX", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch210-cxx11-cu130-x86_64-linux/relu/__init__.py b/build/torch210-cxx11-cu130-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-cu130-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-rocm70-x86_64-linux/__init__.py b/build/torch210-cxx11-rocm70-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-rocm70-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-rocm70-x86_64-linux/_ops.py b/build/torch210-cxx11-rocm70-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..a906fe2a048baf08667f38cf4194efa2c79ee074 --- /dev/null +++ b/build/torch210-cxx11-rocm70-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_rocm_918d6dc +ops = torch.ops._relu_rocm_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_rocm_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-rocm70-x86_64-linux/_relu_rocm_918d6dc.abi3.so b/build/torch210-cxx11-rocm70-x86_64-linux/_relu_rocm_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..253a54c6c297f5266863823821c8a7d16e1cb067 --- /dev/null +++ b/build/torch210-cxx11-rocm70-x86_64-linux/_relu_rocm_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3660541a47032a299999775125ba5b9cd9c35f73df5530dcb9a1a6c62710bf1 +size 1990368 diff --git a/build/torch210-cxx11-rocm70-x86_64-linux/layers/__init__.py b/build/torch210-cxx11-rocm70-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-rocm70-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-rocm70-x86_64-linux/metadata.json b/build/torch210-cxx11-rocm70-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..3e8d811f1dc42febd33121b2627f809447622baf --- /dev/null +++ b/build/torch210-cxx11-rocm70-x86_64-linux/metadata.json @@ -0,0 +1,17 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "rocm", + "archs": [ + "gfx1030", + "gfx1100", + "gfx1101", + "gfx906", + "gfx908", + "gfx90a", + "gfx942" + ] + } +} diff --git a/build/torch210-cxx11-rocm70-x86_64-linux/relu/__init__.py b/build/torch210-cxx11-rocm70-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-rocm70-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-rocm71-x86_64-linux/__init__.py b/build/torch210-cxx11-rocm71-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-rocm71-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-rocm71-x86_64-linux/_ops.py b/build/torch210-cxx11-rocm71-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..a906fe2a048baf08667f38cf4194efa2c79ee074 --- /dev/null +++ b/build/torch210-cxx11-rocm71-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_rocm_918d6dc +ops = torch.ops._relu_rocm_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_rocm_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-rocm71-x86_64-linux/_relu_rocm_918d6dc.abi3.so b/build/torch210-cxx11-rocm71-x86_64-linux/_relu_rocm_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..4c2b0579589f9d24155a4d479aaa0ede013d9319 --- /dev/null +++ b/build/torch210-cxx11-rocm71-x86_64-linux/_relu_rocm_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c4b96ff05266bb1a99dfeb3379d5a141cd291b623d86861c8cfa4e154af76cf +size 1990272 diff --git a/build/torch210-cxx11-rocm71-x86_64-linux/layers/__init__.py b/build/torch210-cxx11-rocm71-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-rocm71-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-rocm71-x86_64-linux/metadata.json b/build/torch210-cxx11-rocm71-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..3e8d811f1dc42febd33121b2627f809447622baf --- /dev/null +++ b/build/torch210-cxx11-rocm71-x86_64-linux/metadata.json @@ -0,0 +1,17 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "rocm", + "archs": [ + "gfx1030", + "gfx1100", + "gfx1101", + "gfx906", + "gfx908", + "gfx90a", + "gfx942" + ] + } +} diff --git a/build/torch210-cxx11-rocm71-x86_64-linux/relu/__init__.py b/build/torch210-cxx11-rocm71-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-rocm71-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-cxx11-xpu20253-x86_64-linux/__init__.py b/build/torch210-cxx11-xpu20253-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-cxx11-xpu20253-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-cxx11-xpu20253-x86_64-linux/_ops.py b/build/torch210-cxx11-xpu20253-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..9d305ef2df02748a1690210564c66551b53b68eb --- /dev/null +++ b/build/torch210-cxx11-xpu20253-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_xpu_918d6dc +ops = torch.ops._relu_xpu_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_xpu_918d6dc::{op_name}" diff --git a/build/torch210-cxx11-xpu20253-x86_64-linux/_relu_xpu_918d6dc.abi3.so b/build/torch210-cxx11-xpu20253-x86_64-linux/_relu_xpu_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..63b421c3543ef99654fed2bb51df2960b68c502d --- /dev/null +++ b/build/torch210-cxx11-xpu20253-x86_64-linux/_relu_xpu_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be4a9c18503d5c52cebab4fdcc1707cacfb2d1e820d935d3afe33fec2401c175 +size 2050448 diff --git a/build/torch210-cxx11-xpu20253-x86_64-linux/layers/__init__.py b/build/torch210-cxx11-xpu20253-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-cxx11-xpu20253-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-cxx11-xpu20253-x86_64-linux/metadata.json b/build/torch210-cxx11-xpu20253-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..b911d0a2549a35a1c65ab7e77d32e5aac23cd6ac --- /dev/null +++ b/build/torch210-cxx11-xpu20253-x86_64-linux/metadata.json @@ -0,0 +1,8 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "xpu" + } +} \ No newline at end of file diff --git a/build/torch210-cxx11-xpu20253-x86_64-linux/relu/__init__.py b/build/torch210-cxx11-xpu20253-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch210-cxx11-xpu20253-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-metal-aarch64-darwin/__init__.py b/build/torch210-metal-aarch64-darwin/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch210-metal-aarch64-darwin/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-metal-aarch64-darwin/_ops.py b/build/torch210-metal-aarch64-darwin/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..7bd33b28b0ade8900ea62cf12a7e08d624681ece --- /dev/null +++ b/build/torch210-metal-aarch64-darwin/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_metal_6261c06 +ops = torch.ops._relu_metal_6261c06 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_metal_6261c06::{op_name}" diff --git a/build/torch210-metal-aarch64-darwin/_relu_metal_6261c06.abi3.so b/build/torch210-metal-aarch64-darwin/_relu_metal_6261c06.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..158b35e2dd02e4455692e5461df8d34358bde529 --- /dev/null +++ b/build/torch210-metal-aarch64-darwin/_relu_metal_6261c06.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8dfb5099b945261311ae3e457b78b0acd63463c84f73246c2492d19698cfaaae +size 100680 diff --git a/build/torch210-metal-aarch64-darwin/layers/__init__.py b/build/torch210-metal-aarch64-darwin/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch210-metal-aarch64-darwin/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-metal-aarch64-darwin/metadata.json b/build/torch210-metal-aarch64-darwin/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..a5381dd80836f863378b9f33a559815688de9287 --- /dev/null +++ b/build/torch210-metal-aarch64-darwin/metadata.json @@ -0,0 +1,5 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch210-metal-aarch64-darwin/relu/__init__.py b/build/torch210-metal-aarch64-darwin/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch210-metal-aarch64-darwin/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch210-xpu20253-x86_64-windows/__init__.py b/build/torch210-xpu20253-x86_64-windows/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a658ca5bffa2a7dcb3b3656cbde59ed6d80005d1 --- /dev/null +++ b/build/torch210-xpu20253-x86_64-windows/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch210-xpu20253-x86_64-windows/_ops.py b/build/torch210-xpu20253-x86_64-windows/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..c38aa236fd4dc0e7e8e33bd3e47a2fc7ea04276d --- /dev/null +++ b/build/torch210-xpu20253-x86_64-windows/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_xpu_e08ad6f +ops = torch.ops._relu_xpu_e08ad6f + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_xpu_e08ad6f::{op_name}" diff --git a/build/torch210-xpu20253-x86_64-windows/_relu_xpu_e08ad6f.pyd b/build/torch210-xpu20253-x86_64-windows/_relu_xpu_e08ad6f.pyd new file mode 100644 index 0000000000000000000000000000000000000000..30eac4dd043171c92362a0ac2eb2f1955adf5361 --- /dev/null +++ b/build/torch210-xpu20253-x86_64-windows/_relu_xpu_e08ad6f.pyd @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd6858db3b3c85d92eb43c10b7d23eb9e4873970abac9e6cf2f6591cd47db53e +size 166400 diff --git a/build/torch210-xpu20253-x86_64-windows/layers/__init__.py b/build/torch210-xpu20253-x86_64-windows/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7fa87bf1eaaaafd7dbbc0f013dc6001d0534a40d --- /dev/null +++ b/build/torch210-xpu20253-x86_64-windows/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch210-xpu20253-x86_64-windows/metadata.json b/build/torch210-xpu20253-x86_64-windows/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..b911d0a2549a35a1c65ab7e77d32e5aac23cd6ac --- /dev/null +++ b/build/torch210-xpu20253-x86_64-windows/metadata.json @@ -0,0 +1,8 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "xpu" + } +} \ No newline at end of file diff --git a/build/torch210-xpu20253-x86_64-windows/relu/__init__.py b/build/torch210-xpu20253-x86_64-windows/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d259f9535943d5617b4906d784ecd93a9d6dff82 --- /dev/null +++ b/build/torch210-xpu20253-x86_64-windows/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cu128-x86_64-windows/__init__.py b/build/torch211-cu128-x86_64-windows/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a658ca5bffa2a7dcb3b3656cbde59ed6d80005d1 --- /dev/null +++ b/build/torch211-cu128-x86_64-windows/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cu128-x86_64-windows/_ops.py b/build/torch211-cu128-x86_64-windows/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..68aefa41226d72d11ab68b9736c04cabcbf24c7d --- /dev/null +++ b/build/torch211-cu128-x86_64-windows/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_e08ad6f +ops = torch.ops._relu_cuda_e08ad6f + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_e08ad6f::{op_name}" diff --git a/build/torch211-cu128-x86_64-windows/_relu_cuda_e08ad6f.pyd b/build/torch211-cu128-x86_64-windows/_relu_cuda_e08ad6f.pyd new file mode 100644 index 0000000000000000000000000000000000000000..625fe61b325b492880128f3cbe0b1db1cab85049 --- /dev/null +++ b/build/torch211-cu128-x86_64-windows/_relu_cuda_e08ad6f.pyd @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79a4f2c69a3ebcab6adfd2a05356c74cde20c1201982bd12f76ca5346f09fc33 +size 203776 diff --git a/build/torch211-cu128-x86_64-windows/layers/__init__.py b/build/torch211-cu128-x86_64-windows/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7fa87bf1eaaaafd7dbbc0f013dc6001d0534a40d --- /dev/null +++ b/build/torch211-cu128-x86_64-windows/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cu128-x86_64-windows/metadata.json b/build/torch211-cu128-x86_64-windows/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..7ddf5ff75a35f315c1398fff49390f17fd4e0ee9 --- /dev/null +++ b/build/torch211-cu128-x86_64-windows/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch211-cu128-x86_64-windows/relu/__init__.py b/build/torch211-cu128-x86_64-windows/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d259f9535943d5617b4906d784ecd93a9d6dff82 --- /dev/null +++ b/build/torch211-cu128-x86_64-windows/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-cpu-aarch64-linux/__init__.py b/build/torch211-cxx11-cpu-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-cpu-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-cpu-aarch64-linux/_ops.py b/build/torch211-cxx11-cpu-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..9a6c5de46c914d40bf499c1cdae64f5e4c9a9b1f --- /dev/null +++ b/build/torch211-cxx11-cpu-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cpu_918d6dc +ops = torch.ops._relu_cpu_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cpu_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-cpu-aarch64-linux/_relu_cpu_918d6dc.abi3.so b/build/torch211-cxx11-cpu-aarch64-linux/_relu_cpu_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..ad1f9c121e6159a1089dc8ddab2584345cb1cad5 --- /dev/null +++ b/build/torch211-cxx11-cpu-aarch64-linux/_relu_cpu_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:829a739dc9c8277d60587858338178c9a23795b69c461a98fff2e16cdb9a6d65 +size 2025352 diff --git a/build/torch211-cxx11-cpu-aarch64-linux/layers/__init__.py b/build/torch211-cxx11-cpu-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-cpu-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-cpu-aarch64-linux/metadata.json b/build/torch211-cxx11-cpu-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..eb22148b3f551be150f7824a5684c19bbc40ae0e --- /dev/null +++ b/build/torch211-cxx11-cpu-aarch64-linux/metadata.json @@ -0,0 +1,8 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cpu" + } +} \ No newline at end of file diff --git a/build/torch211-cxx11-cpu-aarch64-linux/relu/__init__.py b/build/torch211-cxx11-cpu-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-cpu-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-cpu-x86_64-linux/__init__.py b/build/torch211-cxx11-cpu-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-cpu-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-cpu-x86_64-linux/_ops.py b/build/torch211-cxx11-cpu-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..9a6c5de46c914d40bf499c1cdae64f5e4c9a9b1f --- /dev/null +++ b/build/torch211-cxx11-cpu-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cpu_918d6dc +ops = torch.ops._relu_cpu_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cpu_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-cpu-x86_64-linux/_relu_cpu_918d6dc.abi3.so b/build/torch211-cxx11-cpu-x86_64-linux/_relu_cpu_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..75ddd43c010a7568aea1fbfd3c649b2f11c2bd49 --- /dev/null +++ b/build/torch211-cxx11-cpu-x86_64-linux/_relu_cpu_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3504d928444d6fd3adb55acd3095a022b8d8cbcf7d126653248225b622c149b3 +size 1778080 diff --git a/build/torch211-cxx11-cpu-x86_64-linux/layers/__init__.py b/build/torch211-cxx11-cpu-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-cpu-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-cpu-x86_64-linux/metadata.json b/build/torch211-cxx11-cpu-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..eb22148b3f551be150f7824a5684c19bbc40ae0e --- /dev/null +++ b/build/torch211-cxx11-cpu-x86_64-linux/metadata.json @@ -0,0 +1,8 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cpu" + } +} \ No newline at end of file diff --git a/build/torch211-cxx11-cpu-x86_64-linux/relu/__init__.py b/build/torch211-cxx11-cpu-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-cpu-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-cu126-aarch64-linux/__init__.py b/build/torch211-cxx11-cu126-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-cu126-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-cu126-aarch64-linux/_ops.py b/build/torch211-cxx11-cu126-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch211-cxx11-cu126-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-cu126-aarch64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch211-cxx11-cu126-aarch64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..bb6db1467406adf609fab58ceeb7c6821543a99c --- /dev/null +++ b/build/torch211-cxx11-cu126-aarch64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f321f7acc5669f9f94e71f05ac9dffaee64a6a30b4647ac5d931f1856989295d +size 2100128 diff --git a/build/torch211-cxx11-cu126-aarch64-linux/layers/__init__.py b/build/torch211-cxx11-cu126-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-cu126-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-cu126-aarch64-linux/metadata.json b/build/torch211-cxx11-cu126-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..f5902b55ab0b2b561c0cf97567c9806c60839c7f --- /dev/null +++ b/build/torch211-cxx11-cu126-aarch64-linux/metadata.json @@ -0,0 +1,18 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0+PTX" + ] + } +} diff --git a/build/torch211-cxx11-cu126-aarch64-linux/relu/__init__.py b/build/torch211-cxx11-cu126-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-cu126-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-cu126-x86_64-linux/__init__.py b/build/torch211-cxx11-cu126-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-cu126-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-cu126-x86_64-linux/_ops.py b/build/torch211-cxx11-cu126-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch211-cxx11-cu126-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-cu126-x86_64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch211-cxx11-cu126-x86_64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..079ae164ee418565674764b7f17f8eefd48bd49c --- /dev/null +++ b/build/torch211-cxx11-cu126-x86_64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9170e70a58bf73d78a82e8879403a512eb12c4ca6e3563bfcf714436788ca79e +size 2012824 diff --git a/build/torch211-cxx11-cu126-x86_64-linux/layers/__init__.py b/build/torch211-cxx11-cu126-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-cu126-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-cu126-x86_64-linux/metadata.json b/build/torch211-cxx11-cu126-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..f5902b55ab0b2b561c0cf97567c9806c60839c7f --- /dev/null +++ b/build/torch211-cxx11-cu126-x86_64-linux/metadata.json @@ -0,0 +1,18 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0+PTX" + ] + } +} diff --git a/build/torch211-cxx11-cu126-x86_64-linux/relu/__init__.py b/build/torch211-cxx11-cu126-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-cu126-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-cu128-aarch64-linux/__init__.py b/build/torch211-cxx11-cu128-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-cu128-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-cu128-aarch64-linux/_ops.py b/build/torch211-cxx11-cu128-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch211-cxx11-cu128-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-cu128-aarch64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch211-cxx11-cu128-aarch64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..f479597726c88233840b01c87e2dcb54366ac6a1 --- /dev/null +++ b/build/torch211-cxx11-cu128-aarch64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba3463b89ba27273ac3efc7aca652e5081b0fb0bedba1717ab4e767ffa123601 +size 2231464 diff --git a/build/torch211-cxx11-cu128-aarch64-linux/layers/__init__.py b/build/torch211-cxx11-cu128-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-cu128-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-cu128-aarch64-linux/metadata.json b/build/torch211-cxx11-cu128-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..8b796af185fbbd8594fcd846949aa5fadc0ccdda --- /dev/null +++ b/build/torch211-cxx11-cu128-aarch64-linux/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch211-cxx11-cu128-aarch64-linux/relu/__init__.py b/build/torch211-cxx11-cu128-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-cu128-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-cu128-x86_64-linux/__init__.py b/build/torch211-cxx11-cu128-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-cu128-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-cu128-x86_64-linux/_ops.py b/build/torch211-cxx11-cu128-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch211-cxx11-cu128-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-cu128-x86_64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch211-cxx11-cu128-x86_64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..6af852a78ab67493ab83845db47ffb4a88324221 --- /dev/null +++ b/build/torch211-cxx11-cu128-x86_64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3e6c77795f1e04ec05f59820d52b292365a426f24822b872b8831e68df5f7015 +size 2100680 diff --git a/build/torch211-cxx11-cu128-x86_64-linux/layers/__init__.py b/build/torch211-cxx11-cu128-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-cu128-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-cu128-x86_64-linux/metadata.json b/build/torch211-cxx11-cu128-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..8b796af185fbbd8594fcd846949aa5fadc0ccdda --- /dev/null +++ b/build/torch211-cxx11-cu128-x86_64-linux/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch211-cxx11-cu128-x86_64-linux/relu/__init__.py b/build/torch211-cxx11-cu128-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-cu128-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-cu130-aarch64-linux/__init__.py b/build/torch211-cxx11-cu130-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-cu130-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-cu130-aarch64-linux/_ops.py b/build/torch211-cxx11-cu130-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch211-cxx11-cu130-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-cu130-aarch64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch211-cxx11-cu130-aarch64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..a8038f997e89abeea11d17f64120a4b0437ce92c --- /dev/null +++ b/build/torch211-cxx11-cu130-aarch64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e4bfd04bd2d3fc2a23f3d2cf72c91c3cccf45cb1eabca9ea482dfd746c5f8e1 +size 2233176 diff --git a/build/torch211-cxx11-cu130-aarch64-linux/layers/__init__.py b/build/torch211-cxx11-cu130-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-cu130-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-cu130-aarch64-linux/metadata.json b/build/torch211-cxx11-cu130-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..66651b7d3f95ac9e5ce5fc2a641b6f0f50788f87 --- /dev/null +++ b/build/torch211-cxx11-cu130-aarch64-linux/metadata.json @@ -0,0 +1,19 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "11.0", + "12.0+PTX", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch211-cxx11-cu130-aarch64-linux/relu/__init__.py b/build/torch211-cxx11-cu130-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-cu130-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-cu130-x86_64-linux/__init__.py b/build/torch211-cxx11-cu130-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-cu130-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-cu130-x86_64-linux/_ops.py b/build/torch211-cxx11-cu130-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch211-cxx11-cu130-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-cu130-x86_64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch211-cxx11-cu130-x86_64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..577e981527a75c8da7807a54b708515879e236fa --- /dev/null +++ b/build/torch211-cxx11-cu130-x86_64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4efbc291e3d41be3ab822175f5abec492c46fd273448c5157939d2c32c0c9c40 +size 2118496 diff --git a/build/torch211-cxx11-cu130-x86_64-linux/layers/__init__.py b/build/torch211-cxx11-cu130-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-cu130-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-cu130-x86_64-linux/metadata.json b/build/torch211-cxx11-cu130-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..66651b7d3f95ac9e5ce5fc2a641b6f0f50788f87 --- /dev/null +++ b/build/torch211-cxx11-cu130-x86_64-linux/metadata.json @@ -0,0 +1,19 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "11.0", + "12.0+PTX", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch211-cxx11-cu130-x86_64-linux/relu/__init__.py b/build/torch211-cxx11-cu130-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-cu130-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-rocm71-x86_64-linux/__init__.py b/build/torch211-cxx11-rocm71-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-rocm71-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-rocm71-x86_64-linux/_ops.py b/build/torch211-cxx11-rocm71-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..a906fe2a048baf08667f38cf4194efa2c79ee074 --- /dev/null +++ b/build/torch211-cxx11-rocm71-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_rocm_918d6dc +ops = torch.ops._relu_rocm_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_rocm_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-rocm71-x86_64-linux/_relu_rocm_918d6dc.abi3.so b/build/torch211-cxx11-rocm71-x86_64-linux/_relu_rocm_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..6a23ec787caa2512eb01da56fc9644c77d9027cc --- /dev/null +++ b/build/torch211-cxx11-rocm71-x86_64-linux/_relu_rocm_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33363228ddcacbffb4e3f8f80af732a61d052ad438d24efc321e52047c4eb6b6 +size 1978400 diff --git a/build/torch211-cxx11-rocm71-x86_64-linux/layers/__init__.py b/build/torch211-cxx11-rocm71-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-rocm71-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-rocm71-x86_64-linux/metadata.json b/build/torch211-cxx11-rocm71-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..3e8d811f1dc42febd33121b2627f809447622baf --- /dev/null +++ b/build/torch211-cxx11-rocm71-x86_64-linux/metadata.json @@ -0,0 +1,17 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "rocm", + "archs": [ + "gfx1030", + "gfx1100", + "gfx1101", + "gfx906", + "gfx908", + "gfx90a", + "gfx942" + ] + } +} diff --git a/build/torch211-cxx11-rocm71-x86_64-linux/relu/__init__.py b/build/torch211-cxx11-rocm71-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-rocm71-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-rocm72-x86_64-linux/__init__.py b/build/torch211-cxx11-rocm72-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-rocm72-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-rocm72-x86_64-linux/_ops.py b/build/torch211-cxx11-rocm72-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..a906fe2a048baf08667f38cf4194efa2c79ee074 --- /dev/null +++ b/build/torch211-cxx11-rocm72-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_rocm_918d6dc +ops = torch.ops._relu_rocm_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_rocm_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-rocm72-x86_64-linux/_relu_rocm_918d6dc.abi3.so b/build/torch211-cxx11-rocm72-x86_64-linux/_relu_rocm_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..565fb9fd96b2e647e5a33db8347c5b0222defbcb --- /dev/null +++ b/build/torch211-cxx11-rocm72-x86_64-linux/_relu_rocm_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d83be9cf3c131e2851fec525c47bb7bbf49456f0b9797e69307e892d8ecc1395 +size 1977568 diff --git a/build/torch211-cxx11-rocm72-x86_64-linux/layers/__init__.py b/build/torch211-cxx11-rocm72-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-rocm72-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-rocm72-x86_64-linux/metadata.json b/build/torch211-cxx11-rocm72-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..3e8d811f1dc42febd33121b2627f809447622baf --- /dev/null +++ b/build/torch211-cxx11-rocm72-x86_64-linux/metadata.json @@ -0,0 +1,17 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "rocm", + "archs": [ + "gfx1030", + "gfx1100", + "gfx1101", + "gfx906", + "gfx908", + "gfx90a", + "gfx942" + ] + } +} diff --git a/build/torch211-cxx11-rocm72-x86_64-linux/relu/__init__.py b/build/torch211-cxx11-rocm72-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-rocm72-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch211-cxx11-xpu20253-x86_64-linux/__init__.py b/build/torch211-cxx11-xpu20253-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch211-cxx11-xpu20253-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch211-cxx11-xpu20253-x86_64-linux/_ops.py b/build/torch211-cxx11-xpu20253-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..9d305ef2df02748a1690210564c66551b53b68eb --- /dev/null +++ b/build/torch211-cxx11-xpu20253-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_xpu_918d6dc +ops = torch.ops._relu_xpu_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_xpu_918d6dc::{op_name}" diff --git a/build/torch211-cxx11-xpu20253-x86_64-linux/_relu_xpu_918d6dc.abi3.so b/build/torch211-cxx11-xpu20253-x86_64-linux/_relu_xpu_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..7d2279402c916bcf2c74e4aaf58622a8de2bbc51 --- /dev/null +++ b/build/torch211-cxx11-xpu20253-x86_64-linux/_relu_xpu_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bcfef05ee9261af274e18fc45a0f5f1a61ee80a4a8f87c6a90f8cbddf5f4bb6d +size 2050448 diff --git a/build/torch211-cxx11-xpu20253-x86_64-linux/layers/__init__.py b/build/torch211-cxx11-xpu20253-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch211-cxx11-xpu20253-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch211-cxx11-xpu20253-x86_64-linux/metadata.json b/build/torch211-cxx11-xpu20253-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..b911d0a2549a35a1c65ab7e77d32e5aac23cd6ac --- /dev/null +++ b/build/torch211-cxx11-xpu20253-x86_64-linux/metadata.json @@ -0,0 +1,8 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "xpu" + } +} \ No newline at end of file diff --git a/build/torch211-cxx11-xpu20253-x86_64-linux/relu/__init__.py b/build/torch211-cxx11-xpu20253-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch211-cxx11-xpu20253-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch27-cxx11-cu118-x86_64-linux/relu/__init__.py b/build/torch27-cxx11-cu118-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch27-cxx11-cu118-x86_64-linux/relu/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch27-cxx11-cu118-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc b/build/torch27-cxx11-cu118-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..69834c376a7c05779e46883394c101607a40fe94 Binary files /dev/null and b/build/torch27-cxx11-cu118-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc differ diff --git a/build/torch27-cxx11-cu118-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc b/build/torch27-cxx11-cu118-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d7a801c97b587c9f24177b7cbbcf43e394bab89 Binary files /dev/null and b/build/torch27-cxx11-cu118-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc differ diff --git a/build/torch27-cxx11-cu118-x86_64-linux/relu/_ops.py b/build/torch27-cxx11-cu118-x86_64-linux/relu/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..952b6ed483d26adac23cc6a9672439262186e6a2 --- /dev/null +++ b/build/torch27-cxx11-cu118-x86_64-linux/relu/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_4f16829 +ops = torch.ops._relu_4f16829 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_4f16829::{op_name}" \ No newline at end of file diff --git a/build/torch27-cxx11-cu118-x86_64-linux/relu/_relu_4f16829.abi3.so b/build/torch27-cxx11-cu118-x86_64-linux/relu/_relu_4f16829.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..a9d8c3d5f13430f2ab0e075294ba60382d900b78 --- /dev/null +++ b/build/torch27-cxx11-cu118-x86_64-linux/relu/_relu_4f16829.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8d029813f6d7daf48c2e8a280332e73efd4a027a8d82a54b95d2ad919adfcb92 +size 1946768 diff --git a/build/torch27-cxx11-cu126-x86_64-linux/relu/__init__.py b/build/torch27-cxx11-cu126-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch27-cxx11-cu126-x86_64-linux/relu/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch27-cxx11-cu126-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc b/build/torch27-cxx11-cu126-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..37a1df96b6adf66c6858f03b19d3b3dc1684c26c Binary files /dev/null and b/build/torch27-cxx11-cu126-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc differ diff --git a/build/torch27-cxx11-cu126-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc b/build/torch27-cxx11-cu126-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2c1069e4b8fd8af8a99f33289ef6c6d696670532 Binary files /dev/null and b/build/torch27-cxx11-cu126-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc differ diff --git a/build/torch27-cxx11-cu126-x86_64-linux/relu/_ops.py b/build/torch27-cxx11-cu126-x86_64-linux/relu/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..952b6ed483d26adac23cc6a9672439262186e6a2 --- /dev/null +++ b/build/torch27-cxx11-cu126-x86_64-linux/relu/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_4f16829 +ops = torch.ops._relu_4f16829 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_4f16829::{op_name}" \ No newline at end of file diff --git a/build/torch27-cxx11-cu126-x86_64-linux/relu/_relu_4f16829.abi3.so b/build/torch27-cxx11-cu126-x86_64-linux/relu/_relu_4f16829.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..41f4e99098e70d7df622e3371622d4e058a46652 --- /dev/null +++ b/build/torch27-cxx11-cu126-x86_64-linux/relu/_relu_4f16829.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a98a60c0944f02270df28b8efb6924d599ed419da7e633bd58e850259a2d601 +size 1996384 diff --git a/build/torch27-cxx11-cu128-x86_64-linux/relu/__init__.py b/build/torch27-cxx11-cu128-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch27-cxx11-cu128-x86_64-linux/relu/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch27-cxx11-cu128-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc b/build/torch27-cxx11-cu128-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68dc6f81a9a629069f7a91c5fbf69858fcc2ae2a Binary files /dev/null and b/build/torch27-cxx11-cu128-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc differ diff --git a/build/torch27-cxx11-cu128-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc b/build/torch27-cxx11-cu128-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e86c1d0925a0f52af6300741377dd309785de6c5 Binary files /dev/null and b/build/torch27-cxx11-cu128-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc differ diff --git a/build/torch27-cxx11-cu128-x86_64-linux/relu/_ops.py b/build/torch27-cxx11-cu128-x86_64-linux/relu/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..952b6ed483d26adac23cc6a9672439262186e6a2 --- /dev/null +++ b/build/torch27-cxx11-cu128-x86_64-linux/relu/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_4f16829 +ops = torch.ops._relu_4f16829 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_4f16829::{op_name}" \ No newline at end of file diff --git a/build/torch27-cxx11-cu128-x86_64-linux/relu/_relu_4f16829.abi3.so b/build/torch27-cxx11-cu128-x86_64-linux/relu/_relu_4f16829.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..39d00bf70bc5d1d9523a86bc714c59b0205077e3 --- /dev/null +++ b/build/torch27-cxx11-cu128-x86_64-linux/relu/_relu_4f16829.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af772cd97bcdd41f2e763f5331e9a32e518e0317e70816b06e6d05588d6be6bf +size 2084096 diff --git a/build/torch27-cxx11-rocm63-x86_64-linux/relu/__init__.py b/build/torch27-cxx11-rocm63-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch27-cxx11-rocm63-x86_64-linux/relu/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch27-cxx11-rocm63-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc b/build/torch27-cxx11-rocm63-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..11b41f221f32e9000fdab2fceb3d3eb235610b52 Binary files /dev/null and b/build/torch27-cxx11-rocm63-x86_64-linux/relu/__pycache__/__init__.cpython-313.pyc differ diff --git a/build/torch27-cxx11-rocm63-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc b/build/torch27-cxx11-rocm63-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c9ac4385fae7a687eff98f14335aaa38712271d Binary files /dev/null and b/build/torch27-cxx11-rocm63-x86_64-linux/relu/__pycache__/_ops.cpython-313.pyc differ diff --git a/build/torch27-cxx11-rocm63-x86_64-linux/relu/_ops.py b/build/torch27-cxx11-rocm63-x86_64-linux/relu/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..952b6ed483d26adac23cc6a9672439262186e6a2 --- /dev/null +++ b/build/torch27-cxx11-rocm63-x86_64-linux/relu/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_4f16829 +ops = torch.ops._relu_4f16829 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_4f16829::{op_name}" \ No newline at end of file diff --git a/build/torch27-cxx11-rocm63-x86_64-linux/relu/_relu_4f16829.abi3.so b/build/torch27-cxx11-rocm63-x86_64-linux/relu/_relu_4f16829.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..965c5593ad19b1f60ac4c6661cbb8a5635d315eb --- /dev/null +++ b/build/torch27-cxx11-rocm63-x86_64-linux/relu/_relu_4f16829.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4c3bb2e2c28b3a3e65787d9b70cdfebef014479af07c045898a61829dfdb61c7 +size 1956296 diff --git a/build/torch28-cxx11-cu126-x86_64-linux/__init__.py b/build/torch28-cxx11-cu126-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch28-cxx11-cu126-x86_64-linux/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch28-cxx11-cu126-x86_64-linux/_ops.py b/build/torch28-cxx11-cu126-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..bddb1796a2bafbb0f5db892c43e50dffac1b8bf9 --- /dev/null +++ b/build/torch28-cxx11-cu126-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_dc7c154 +ops = torch.ops._relu_dc7c154 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_dc7c154::{op_name}" \ No newline at end of file diff --git a/build/torch28-cxx11-cu126-x86_64-linux/_relu_dc7c154.abi3.so b/build/torch28-cxx11-cu126-x86_64-linux/_relu_dc7c154.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..debc90058646bc993f1a2bc27a15000f0b36be0a --- /dev/null +++ b/build/torch28-cxx11-cu126-x86_64-linux/_relu_dc7c154.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5425b3e4a87d30933b83a7a27fa01099ddfb3e90a8496b69e7eadda079532268 +size 2013928 diff --git a/build/torch28-cxx11-cu126-x86_64-linux/metadata.json b/build/torch28-cxx11-cu126-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..9cf5deed9898dce769f4cc73913d3530b92a0bd8 --- /dev/null +++ b/build/torch28-cxx11-cu126-x86_64-linux/metadata.json @@ -0,0 +1,4 @@ +{ + "version": 1, + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch28-cxx11-cu126-x86_64-linux/relu/__init__.py b/build/torch28-cxx11-cu126-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch28-cxx11-cu126-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch28-cxx11-cu128-x86_64-linux/__init__.py b/build/torch28-cxx11-cu128-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch28-cxx11-cu128-x86_64-linux/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch28-cxx11-cu128-x86_64-linux/_ops.py b/build/torch28-cxx11-cu128-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..bddb1796a2bafbb0f5db892c43e50dffac1b8bf9 --- /dev/null +++ b/build/torch28-cxx11-cu128-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_dc7c154 +ops = torch.ops._relu_dc7c154 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_dc7c154::{op_name}" \ No newline at end of file diff --git a/build/torch28-cxx11-cu128-x86_64-linux/_relu_dc7c154.abi3.so b/build/torch28-cxx11-cu128-x86_64-linux/_relu_dc7c154.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..b25cafa72ba4efc162c659a35eb82bf3d012e65a --- /dev/null +++ b/build/torch28-cxx11-cu128-x86_64-linux/_relu_dc7c154.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9576a400cc659833be02dc67b13555402d033addc30fda586c14fb10e64955b9 +size 2101656 diff --git a/build/torch28-cxx11-cu128-x86_64-linux/metadata.json b/build/torch28-cxx11-cu128-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..9cf5deed9898dce769f4cc73913d3530b92a0bd8 --- /dev/null +++ b/build/torch28-cxx11-cu128-x86_64-linux/metadata.json @@ -0,0 +1,4 @@ +{ + "version": 1, + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch28-cxx11-cu128-x86_64-linux/relu/__init__.py b/build/torch28-cxx11-cu128-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch28-cxx11-cu128-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch28-cxx11-cu129-x86_64-linux/__init__.py b/build/torch28-cxx11-cu129-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch28-cxx11-cu129-x86_64-linux/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch28-cxx11-cu129-x86_64-linux/_ops.py b/build/torch28-cxx11-cu129-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..bddb1796a2bafbb0f5db892c43e50dffac1b8bf9 --- /dev/null +++ b/build/torch28-cxx11-cu129-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_dc7c154 +ops = torch.ops._relu_dc7c154 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_dc7c154::{op_name}" \ No newline at end of file diff --git a/build/torch28-cxx11-cu129-x86_64-linux/_relu_dc7c154.abi3.so b/build/torch28-cxx11-cu129-x86_64-linux/_relu_dc7c154.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..9c4b184dc2c0c715bf22ff3b2af276fcae840e33 --- /dev/null +++ b/build/torch28-cxx11-cu129-x86_64-linux/_relu_dc7c154.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:841b4489d47ba527f306201ca6a3aa097a40381d171834d64a1a04ef772de478 +size 2135440 diff --git a/build/torch28-cxx11-cu129-x86_64-linux/metadata.json b/build/torch28-cxx11-cu129-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..9cf5deed9898dce769f4cc73913d3530b92a0bd8 --- /dev/null +++ b/build/torch28-cxx11-cu129-x86_64-linux/metadata.json @@ -0,0 +1,4 @@ +{ + "version": 1, + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch28-cxx11-cu129-x86_64-linux/relu/__init__.py b/build/torch28-cxx11-cu129-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch28-cxx11-cu129-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch28-cxx11-rocm63-x86_64-linux/__init__.py b/build/torch28-cxx11-rocm63-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch28-cxx11-rocm63-x86_64-linux/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch28-cxx11-rocm63-x86_64-linux/_ops.py b/build/torch28-cxx11-rocm63-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..bddb1796a2bafbb0f5db892c43e50dffac1b8bf9 --- /dev/null +++ b/build/torch28-cxx11-rocm63-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_dc7c154 +ops = torch.ops._relu_dc7c154 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_dc7c154::{op_name}" \ No newline at end of file diff --git a/build/torch28-cxx11-rocm63-x86_64-linux/_relu_dc7c154.abi3.so b/build/torch28-cxx11-rocm63-x86_64-linux/_relu_dc7c154.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..798363fde80f1d49891ad992c41d1904532dbbdd --- /dev/null +++ b/build/torch28-cxx11-rocm63-x86_64-linux/_relu_dc7c154.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7517ece432e91eebab977b1784cd290fabcaa6f357b2cd07b44ac7e7113dc155 +size 1970264 diff --git a/build/torch28-cxx11-rocm63-x86_64-linux/metadata.json b/build/torch28-cxx11-rocm63-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..9cf5deed9898dce769f4cc73913d3530b92a0bd8 --- /dev/null +++ b/build/torch28-cxx11-rocm63-x86_64-linux/metadata.json @@ -0,0 +1,4 @@ +{ + "version": 1, + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch28-cxx11-rocm63-x86_64-linux/relu/__init__.py b/build/torch28-cxx11-rocm63-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch28-cxx11-rocm63-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch28-cxx11-rocm64-x86_64-linux/__init__.py b/build/torch28-cxx11-rocm64-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch28-cxx11-rocm64-x86_64-linux/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch28-cxx11-rocm64-x86_64-linux/_ops.py b/build/torch28-cxx11-rocm64-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..bddb1796a2bafbb0f5db892c43e50dffac1b8bf9 --- /dev/null +++ b/build/torch28-cxx11-rocm64-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_dc7c154 +ops = torch.ops._relu_dc7c154 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_dc7c154::{op_name}" \ No newline at end of file diff --git a/build/torch28-cxx11-rocm64-x86_64-linux/_relu_dc7c154.abi3.so b/build/torch28-cxx11-rocm64-x86_64-linux/_relu_dc7c154.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..2d5ba40e55a71640c28b5c78102c44eb5ecac2dc --- /dev/null +++ b/build/torch28-cxx11-rocm64-x86_64-linux/_relu_dc7c154.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1847c192cad970faa8b2c29f17d8766c37b9dc06f8e31e871043e1e08c0043f6 +size 1984936 diff --git a/build/torch28-cxx11-rocm64-x86_64-linux/metadata.json b/build/torch28-cxx11-rocm64-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..9cf5deed9898dce769f4cc73913d3530b92a0bd8 --- /dev/null +++ b/build/torch28-cxx11-rocm64-x86_64-linux/metadata.json @@ -0,0 +1,4 @@ +{ + "version": 1, + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch28-cxx11-rocm64-x86_64-linux/relu/__init__.py b/build/torch28-cxx11-rocm64-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch28-cxx11-rocm64-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch28-cxx11-xpu20251-x86_64-linux/__init__.py b/build/torch28-cxx11-xpu20251-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch28-cxx11-xpu20251-x86_64-linux/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch28-cxx11-xpu20251-x86_64-linux/_ops.py b/build/torch28-cxx11-xpu20251-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..bddb1796a2bafbb0f5db892c43e50dffac1b8bf9 --- /dev/null +++ b/build/torch28-cxx11-xpu20251-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_dc7c154 +ops = torch.ops._relu_dc7c154 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_dc7c154::{op_name}" \ No newline at end of file diff --git a/build/torch28-cxx11-xpu20251-x86_64-linux/_relu_dc7c154.abi3.so b/build/torch28-cxx11-xpu20251-x86_64-linux/_relu_dc7c154.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..53af2af983763181391aebd6537f61f46ec282eb --- /dev/null +++ b/build/torch28-cxx11-xpu20251-x86_64-linux/_relu_dc7c154.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cfd31ca45c1e897c8ec3f7c5f81b33972cd32532347aba419e6ae253c1b1c4cf +size 2049544 diff --git a/build/torch28-cxx11-xpu20251-x86_64-linux/metadata.json b/build/torch28-cxx11-xpu20251-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..9cf5deed9898dce769f4cc73913d3530b92a0bd8 --- /dev/null +++ b/build/torch28-cxx11-xpu20251-x86_64-linux/metadata.json @@ -0,0 +1,4 @@ +{ + "version": 1, + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch28-cxx11-xpu20251-x86_64-linux/relu/__init__.py b/build/torch28-cxx11-xpu20251-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch28-cxx11-xpu20251-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch28-metal-aarch64-darwin/__init__.py b/build/torch28-metal-aarch64-darwin/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8050dfd765d1a50720e99d17870c2c854d9e2574 --- /dev/null +++ b/build/torch28-metal-aarch64-darwin/__init__.py @@ -0,0 +1,12 @@ +from typing import Optional + +import torch + +from ._ops import ops + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out \ No newline at end of file diff --git a/build/torch28-metal-aarch64-darwin/_ops.py b/build/torch28-metal-aarch64-darwin/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..bddb1796a2bafbb0f5db892c43e50dffac1b8bf9 --- /dev/null +++ b/build/torch28-metal-aarch64-darwin/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_dc7c154 +ops = torch.ops._relu_dc7c154 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_dc7c154::{op_name}" \ No newline at end of file diff --git a/build/torch28-metal-aarch64-darwin/_relu_dc7c154.abi3.so b/build/torch28-metal-aarch64-darwin/_relu_dc7c154.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..b93f09d5dc5b36033884502dd6a4e2e88cffd523 Binary files /dev/null and b/build/torch28-metal-aarch64-darwin/_relu_dc7c154.abi3.so differ diff --git a/build/torch28-metal-aarch64-darwin/metadata.json b/build/torch28-metal-aarch64-darwin/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..9cf5deed9898dce769f4cc73913d3530b92a0bd8 --- /dev/null +++ b/build/torch28-metal-aarch64-darwin/metadata.json @@ -0,0 +1,4 @@ +{ + "version": 1, + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch28-metal-aarch64-darwin/relu/__init__.py b/build/torch28-metal-aarch64-darwin/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch28-metal-aarch64-darwin/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cpu-aarch64-darwin/__init__.py b/build/torch29-cpu-aarch64-darwin/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cpu-aarch64-darwin/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cpu-aarch64-darwin/_ops.py b/build/torch29-cpu-aarch64-darwin/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..ff8b9666569cab35ae0a56dc9d3363e5970dfe59 --- /dev/null +++ b/build/torch29-cpu-aarch64-darwin/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cpu_6261c06 +ops = torch.ops._relu_cpu_6261c06 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cpu_6261c06::{op_name}" diff --git a/build/torch29-cpu-aarch64-darwin/_relu_cpu_6261c06.abi3.so b/build/torch29-cpu-aarch64-darwin/_relu_cpu_6261c06.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..cee78779ce406fdd381f0a702bacbae901bba9c4 Binary files /dev/null and b/build/torch29-cpu-aarch64-darwin/_relu_cpu_6261c06.abi3.so differ diff --git a/build/torch29-cpu-aarch64-darwin/layers/__init__.py b/build/torch29-cpu-aarch64-darwin/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cpu-aarch64-darwin/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cpu-aarch64-darwin/metadata.json b/build/torch29-cpu-aarch64-darwin/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..a5381dd80836f863378b9f33a559815688de9287 --- /dev/null +++ b/build/torch29-cpu-aarch64-darwin/metadata.json @@ -0,0 +1,5 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch29-cpu-aarch64-darwin/relu/__init__.py b/build/torch29-cpu-aarch64-darwin/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-cpu-aarch64-darwin/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cpu-aarch64-linux/__init__.py b/build/torch29-cxx11-cpu-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cpu-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cpu-aarch64-linux/_ops.py b/build/torch29-cxx11-cpu-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..4a7c9d9737212971f138b60a95291f0405d2070a --- /dev/null +++ b/build/torch29-cxx11-cpu-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cpu_0a30093 +ops = torch.ops._relu_cpu_0a30093 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cpu_0a30093::{op_name}" diff --git a/build/torch29-cxx11-cpu-aarch64-linux/_relu_cpu_0a30093.abi3.so b/build/torch29-cxx11-cpu-aarch64-linux/_relu_cpu_0a30093.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..a8da7e28d55ae06abc38df09556fac6437e02ff1 --- /dev/null +++ b/build/torch29-cxx11-cpu-aarch64-linux/_relu_cpu_0a30093.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc64e81b9e4027f939f57f392865171a4ca57c315b1cece0fd91f0c184ab6c99 +size 2024136 diff --git a/build/torch29-cxx11-cpu-aarch64-linux/layers/__init__.py b/build/torch29-cxx11-cpu-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cpu-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cpu-aarch64-linux/metadata.json b/build/torch29-cxx11-cpu-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..a5381dd80836f863378b9f33a559815688de9287 --- /dev/null +++ b/build/torch29-cxx11-cpu-aarch64-linux/metadata.json @@ -0,0 +1,5 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch29-cxx11-cpu-aarch64-linux/relu/__init__.py b/build/torch29-cxx11-cpu-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-cxx11-cpu-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cpu-x86_64-linux/__init__.py b/build/torch29-cxx11-cpu-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cpu-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cpu-x86_64-linux/_ops.py b/build/torch29-cxx11-cpu-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..ff8b9666569cab35ae0a56dc9d3363e5970dfe59 --- /dev/null +++ b/build/torch29-cxx11-cpu-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cpu_6261c06 +ops = torch.ops._relu_cpu_6261c06 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cpu_6261c06::{op_name}" diff --git a/build/torch29-cxx11-cpu-x86_64-linux/_relu_cpu_6261c06.abi3.so b/build/torch29-cxx11-cpu-x86_64-linux/_relu_cpu_6261c06.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..b416f4fa6480e576708770ea40f9042a309cb974 --- /dev/null +++ b/build/torch29-cxx11-cpu-x86_64-linux/_relu_cpu_6261c06.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e5f3b27de92bba3fdbb9bef157f6e15bcb9119581699420e1dca69f0c7cacce +size 311616 diff --git a/build/torch29-cxx11-cpu-x86_64-linux/layers/__init__.py b/build/torch29-cxx11-cpu-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cpu-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cpu-x86_64-linux/metadata.json b/build/torch29-cxx11-cpu-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..a5381dd80836f863378b9f33a559815688de9287 --- /dev/null +++ b/build/torch29-cxx11-cpu-x86_64-linux/metadata.json @@ -0,0 +1,5 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch29-cxx11-cpu-x86_64-linux/relu/__init__.py b/build/torch29-cxx11-cpu-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-cxx11-cpu-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cu126-aarch64-linux/__init__.py b/build/torch29-cxx11-cu126-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cu126-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cu126-aarch64-linux/_ops.py b/build/torch29-cxx11-cu126-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..ee42ab413224d7f6eab808ebbcf75e616ef3dba2 --- /dev/null +++ b/build/torch29-cxx11-cu126-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_0a30093 +ops = torch.ops._relu_cuda_0a30093 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_0a30093::{op_name}" diff --git a/build/torch29-cxx11-cu126-aarch64-linux/_relu_cuda_0a30093.abi3.so b/build/torch29-cxx11-cu126-aarch64-linux/_relu_cuda_0a30093.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..451d2281d001d86f5169fab3700298302aa2bfb8 --- /dev/null +++ b/build/torch29-cxx11-cu126-aarch64-linux/_relu_cuda_0a30093.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c923f22d1b4d3e589bb6ce43a1c37b6bee20a4042b9085d75a5dee62744ed8e +size 2101784 diff --git a/build/torch29-cxx11-cu126-aarch64-linux/layers/__init__.py b/build/torch29-cxx11-cu126-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cu126-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cu126-aarch64-linux/metadata.json b/build/torch29-cxx11-cu126-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..f5902b55ab0b2b561c0cf97567c9806c60839c7f --- /dev/null +++ b/build/torch29-cxx11-cu126-aarch64-linux/metadata.json @@ -0,0 +1,18 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0+PTX" + ] + } +} diff --git a/build/torch29-cxx11-cu126-aarch64-linux/relu/__init__.py b/build/torch29-cxx11-cu126-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-cxx11-cu126-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cu126-x86_64-linux/__init__.py b/build/torch29-cxx11-cu126-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cu126-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cu126-x86_64-linux/_ops.py b/build/torch29-cxx11-cu126-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..54798b7a318e36d9aea831cec37d64d7c39f74a8 --- /dev/null +++ b/build/torch29-cxx11-cu126-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_6261c06 +ops = torch.ops._relu_cuda_6261c06 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_6261c06::{op_name}" diff --git a/build/torch29-cxx11-cu126-x86_64-linux/_relu_cuda_6261c06.abi3.so b/build/torch29-cxx11-cu126-x86_64-linux/_relu_cuda_6261c06.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..d964226d994f4b599766b25a7d96ae1a42ad402b --- /dev/null +++ b/build/torch29-cxx11-cu126-x86_64-linux/_relu_cuda_6261c06.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6ccb1d2b06fd0f7636eaa740b43937e557c1f07b47479b412b095089f49bc3d2 +size 2014200 diff --git a/build/torch29-cxx11-cu126-x86_64-linux/layers/__init__.py b/build/torch29-cxx11-cu126-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cu126-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cu126-x86_64-linux/metadata.json b/build/torch29-cxx11-cu126-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..f5902b55ab0b2b561c0cf97567c9806c60839c7f --- /dev/null +++ b/build/torch29-cxx11-cu126-x86_64-linux/metadata.json @@ -0,0 +1,18 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0+PTX" + ] + } +} diff --git a/build/torch29-cxx11-cu126-x86_64-linux/relu/__init__.py b/build/torch29-cxx11-cu126-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-cxx11-cu126-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cu128-aarch64-linux/__init__.py b/build/torch29-cxx11-cu128-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cu128-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cu128-aarch64-linux/_ops.py b/build/torch29-cxx11-cu128-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..ee42ab413224d7f6eab808ebbcf75e616ef3dba2 --- /dev/null +++ b/build/torch29-cxx11-cu128-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_0a30093 +ops = torch.ops._relu_cuda_0a30093 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_0a30093::{op_name}" diff --git a/build/torch29-cxx11-cu128-aarch64-linux/_relu_cuda_0a30093.abi3.so b/build/torch29-cxx11-cu128-aarch64-linux/_relu_cuda_0a30093.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..84da865e89735e33c01dc8f46e1180b76bad8da5 --- /dev/null +++ b/build/torch29-cxx11-cu128-aarch64-linux/_relu_cuda_0a30093.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a26a031722386f71a16a7eb8498dc29659b9425d6acf6c47219d733288fddb3 +size 2232968 diff --git a/build/torch29-cxx11-cu128-aarch64-linux/layers/__init__.py b/build/torch29-cxx11-cu128-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cu128-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cu128-aarch64-linux/metadata.json b/build/torch29-cxx11-cu128-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..8b796af185fbbd8594fcd846949aa5fadc0ccdda --- /dev/null +++ b/build/torch29-cxx11-cu128-aarch64-linux/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch29-cxx11-cu128-aarch64-linux/relu/__init__.py b/build/torch29-cxx11-cu128-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-cxx11-cu128-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cu128-x86_64-linux/__init__.py b/build/torch29-cxx11-cu128-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cu128-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cu128-x86_64-linux/_ops.py b/build/torch29-cxx11-cu128-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..54798b7a318e36d9aea831cec37d64d7c39f74a8 --- /dev/null +++ b/build/torch29-cxx11-cu128-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_6261c06 +ops = torch.ops._relu_cuda_6261c06 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_6261c06::{op_name}" diff --git a/build/torch29-cxx11-cu128-x86_64-linux/_relu_cuda_6261c06.abi3.so b/build/torch29-cxx11-cu128-x86_64-linux/_relu_cuda_6261c06.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..94e3d5947233ec1b2a2e0ac8a71e0e06e9e65475 --- /dev/null +++ b/build/torch29-cxx11-cu128-x86_64-linux/_relu_cuda_6261c06.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82802833c8c59b5e4136deaaf4b783aa0cd13288aedbccbcf278fb4e40d673f4 +size 2101928 diff --git a/build/torch29-cxx11-cu128-x86_64-linux/layers/__init__.py b/build/torch29-cxx11-cu128-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cu128-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cu128-x86_64-linux/metadata.json b/build/torch29-cxx11-cu128-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..8b796af185fbbd8594fcd846949aa5fadc0ccdda --- /dev/null +++ b/build/torch29-cxx11-cu128-x86_64-linux/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch29-cxx11-cu128-x86_64-linux/relu/__init__.py b/build/torch29-cxx11-cu128-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-cxx11-cu128-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cu129-aarch64-linux/__init__.py b/build/torch29-cxx11-cu129-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cu129-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cu129-aarch64-linux/_ops.py b/build/torch29-cxx11-cu129-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch29-cxx11-cu129-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch29-cxx11-cu129-aarch64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch29-cxx11-cu129-aarch64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..564f77f8f3f929e8884537a46ea3f74252ad40f0 --- /dev/null +++ b/build/torch29-cxx11-cu129-aarch64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70a89be0e1ec9ab5102d38e869153031f27a587d8cf31b73c03fb88502697660 +size 2234392 diff --git a/build/torch29-cxx11-cu129-aarch64-linux/layers/__init__.py b/build/torch29-cxx11-cu129-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cu129-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cu129-aarch64-linux/metadata.json b/build/torch29-cxx11-cu129-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..8b796af185fbbd8594fcd846949aa5fadc0ccdda --- /dev/null +++ b/build/torch29-cxx11-cu129-aarch64-linux/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch29-cxx11-cu129-aarch64-linux/relu/__init__.py b/build/torch29-cxx11-cu129-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch29-cxx11-cu129-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cu129-x86_64-linux/__init__.py b/build/torch29-cxx11-cu129-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cu129-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cu129-x86_64-linux/_ops.py b/build/torch29-cxx11-cu129-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..429199da2a1613e92ea169fe550a047d15425e42 --- /dev/null +++ b/build/torch29-cxx11-cu129-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_918d6dc +ops = torch.ops._relu_cuda_918d6dc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_918d6dc::{op_name}" diff --git a/build/torch29-cxx11-cu129-x86_64-linux/_relu_cuda_918d6dc.abi3.so b/build/torch29-cxx11-cu129-x86_64-linux/_relu_cuda_918d6dc.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..1d67d1de810d1b76af05423cfae00e32c07d0ab3 --- /dev/null +++ b/build/torch29-cxx11-cu129-x86_64-linux/_relu_cuda_918d6dc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:552ce7588e28efb1cf7a95aa6079e3d3f397911dd54e08b1d325e6d20fbbc5ba +size 2135712 diff --git a/build/torch29-cxx11-cu129-x86_64-linux/layers/__init__.py b/build/torch29-cxx11-cu129-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cu129-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cu129-x86_64-linux/metadata.json b/build/torch29-cxx11-cu129-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..8b796af185fbbd8594fcd846949aa5fadc0ccdda --- /dev/null +++ b/build/torch29-cxx11-cu129-x86_64-linux/metadata.json @@ -0,0 +1,21 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "10.1", + "12.0+PTX", + "7.0", + "7.2", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch29-cxx11-cu129-x86_64-linux/relu/__init__.py b/build/torch29-cxx11-cu129-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch29-cxx11-cu129-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cu130-aarch64-linux/__init__.py b/build/torch29-cxx11-cu130-aarch64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cu130-aarch64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cu130-aarch64-linux/_ops.py b/build/torch29-cxx11-cu130-aarch64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..ee42ab413224d7f6eab808ebbcf75e616ef3dba2 --- /dev/null +++ b/build/torch29-cxx11-cu130-aarch64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_0a30093 +ops = torch.ops._relu_cuda_0a30093 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_0a30093::{op_name}" diff --git a/build/torch29-cxx11-cu130-aarch64-linux/_relu_cuda_0a30093.abi3.so b/build/torch29-cxx11-cu130-aarch64-linux/_relu_cuda_0a30093.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..5997462796c0c0fae2d65122ae93293faf980c50 --- /dev/null +++ b/build/torch29-cxx11-cu130-aarch64-linux/_relu_cuda_0a30093.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aded10b971f8ad4dec30e40c70e4305a6a439aa34d6ec441488c8ca4c01e544a +size 2234680 diff --git a/build/torch29-cxx11-cu130-aarch64-linux/layers/__init__.py b/build/torch29-cxx11-cu130-aarch64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cu130-aarch64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cu130-aarch64-linux/metadata.json b/build/torch29-cxx11-cu130-aarch64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..66651b7d3f95ac9e5ce5fc2a641b6f0f50788f87 --- /dev/null +++ b/build/torch29-cxx11-cu130-aarch64-linux/metadata.json @@ -0,0 +1,19 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "11.0", + "12.0+PTX", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch29-cxx11-cu130-aarch64-linux/relu/__init__.py b/build/torch29-cxx11-cu130-aarch64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-cxx11-cu130-aarch64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-cu130-x86_64-linux/__init__.py b/build/torch29-cxx11-cu130-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-cu130-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-cu130-x86_64-linux/_ops.py b/build/torch29-cxx11-cu130-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..54798b7a318e36d9aea831cec37d64d7c39f74a8 --- /dev/null +++ b/build/torch29-cxx11-cu130-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_cuda_6261c06 +ops = torch.ops._relu_cuda_6261c06 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_cuda_6261c06::{op_name}" diff --git a/build/torch29-cxx11-cu130-x86_64-linux/_relu_cuda_6261c06.abi3.so b/build/torch29-cxx11-cu130-x86_64-linux/_relu_cuda_6261c06.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..995a6052cc1ed17481c96b3ee07d0ab4a4ff8c87 --- /dev/null +++ b/build/torch29-cxx11-cu130-x86_64-linux/_relu_cuda_6261c06.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fcbd057483865c6527156408ecd11a1b7dca95e70ae0b2f88b22857633624349 +size 2127936 diff --git a/build/torch29-cxx11-cu130-x86_64-linux/layers/__init__.py b/build/torch29-cxx11-cu130-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-cu130-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-cu130-x86_64-linux/metadata.json b/build/torch29-cxx11-cu130-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..66651b7d3f95ac9e5ce5fc2a641b6f0f50788f87 --- /dev/null +++ b/build/torch29-cxx11-cu130-x86_64-linux/metadata.json @@ -0,0 +1,19 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "cuda", + "archs": [ + "10.0", + "11.0", + "12.0+PTX", + "7.5", + "8.0", + "8.6", + "8.7", + "8.9", + "9.0" + ] + } +} diff --git a/build/torch29-cxx11-cu130-x86_64-linux/relu/__init__.py b/build/torch29-cxx11-cu130-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-cxx11-cu130-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-rocm63-x86_64-linux/__init__.py b/build/torch29-cxx11-rocm63-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-rocm63-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-rocm63-x86_64-linux/_ops.py b/build/torch29-cxx11-rocm63-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..8e7b0f2089a9f2116f110cb966637b43e1e26a82 --- /dev/null +++ b/build/torch29-cxx11-rocm63-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_rocm_525b056_dirty +ops = torch.ops._relu_rocm_525b056_dirty + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_rocm_525b056_dirty::{op_name}" diff --git a/build/torch29-cxx11-rocm63-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so b/build/torch29-cxx11-rocm63-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..aaab455d26649036d420759a99d502f484bd972a --- /dev/null +++ b/build/torch29-cxx11-rocm63-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6e4b03449e64df0b8289df19017805d6e95b566723505725ac6e1ebca4698e4 +size 1970504 diff --git a/build/torch29-cxx11-rocm63-x86_64-linux/layers/__init__.py b/build/torch29-cxx11-rocm63-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-rocm63-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-rocm63-x86_64-linux/metadata.json b/build/torch29-cxx11-rocm63-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..3e8d811f1dc42febd33121b2627f809447622baf --- /dev/null +++ b/build/torch29-cxx11-rocm63-x86_64-linux/metadata.json @@ -0,0 +1,17 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "rocm", + "archs": [ + "gfx1030", + "gfx1100", + "gfx1101", + "gfx906", + "gfx908", + "gfx90a", + "gfx942" + ] + } +} diff --git a/build/torch29-cxx11-rocm63-x86_64-linux/relu/__init__.py b/build/torch29-cxx11-rocm63-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch29-cxx11-rocm63-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-rocm64-x86_64-linux/__init__.py b/build/torch29-cxx11-rocm64-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-rocm64-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-rocm64-x86_64-linux/_ops.py b/build/torch29-cxx11-rocm64-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..8e7b0f2089a9f2116f110cb966637b43e1e26a82 --- /dev/null +++ b/build/torch29-cxx11-rocm64-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_rocm_525b056_dirty +ops = torch.ops._relu_rocm_525b056_dirty + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_rocm_525b056_dirty::{op_name}" diff --git a/build/torch29-cxx11-rocm64-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so b/build/torch29-cxx11-rocm64-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..73160397626c1032b02ad42b5d0f9778d3e3b150 --- /dev/null +++ b/build/torch29-cxx11-rocm64-x86_64-linux/_relu_rocm_525b056_dirty.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fef37945f525c2e66ca50abc9e3cfd8cacc186a8874e63d036c1ee3ead2998a7 +size 1989280 diff --git a/build/torch29-cxx11-rocm64-x86_64-linux/layers/__init__.py b/build/torch29-cxx11-rocm64-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-rocm64-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-rocm64-x86_64-linux/metadata.json b/build/torch29-cxx11-rocm64-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..3e8d811f1dc42febd33121b2627f809447622baf --- /dev/null +++ b/build/torch29-cxx11-rocm64-x86_64-linux/metadata.json @@ -0,0 +1,17 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "rocm", + "archs": [ + "gfx1030", + "gfx1100", + "gfx1101", + "gfx906", + "gfx908", + "gfx90a", + "gfx942" + ] + } +} diff --git a/build/torch29-cxx11-rocm64-x86_64-linux/relu/__init__.py b/build/torch29-cxx11-rocm64-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch29-cxx11-rocm64-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-cxx11-xpu20252-x86_64-linux/__init__.py b/build/torch29-cxx11-xpu20252-x86_64-linux/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-cxx11-xpu20252-x86_64-linux/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-cxx11-xpu20252-x86_64-linux/_ops.py b/build/torch29-cxx11-xpu20252-x86_64-linux/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..2ead74c44be8208224df0fe2e9a0f72b08aa2b1d --- /dev/null +++ b/build/torch29-cxx11-xpu20252-x86_64-linux/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_xpu_525b056_dirty +ops = torch.ops._relu_xpu_525b056_dirty + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_xpu_525b056_dirty::{op_name}" diff --git a/build/torch29-cxx11-xpu20252-x86_64-linux/_relu_xpu_525b056_dirty.abi3.so b/build/torch29-cxx11-xpu20252-x86_64-linux/_relu_xpu_525b056_dirty.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..1898a0cf3c4731e8c24b5b4f9336c16f7d7e2385 --- /dev/null +++ b/build/torch29-cxx11-xpu20252-x86_64-linux/_relu_xpu_525b056_dirty.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33b5d8fc5902b8531a1c7b18cc67ca1ca8e5d7f9005d0b33ff40f8bd5be6b7fa +size 2048352 diff --git a/build/torch29-cxx11-xpu20252-x86_64-linux/layers/__init__.py b/build/torch29-cxx11-xpu20252-x86_64-linux/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-cxx11-xpu20252-x86_64-linux/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-cxx11-xpu20252-x86_64-linux/metadata.json b/build/torch29-cxx11-xpu20252-x86_64-linux/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..b911d0a2549a35a1c65ab7e77d32e5aac23cd6ac --- /dev/null +++ b/build/torch29-cxx11-xpu20252-x86_64-linux/metadata.json @@ -0,0 +1,8 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [], + "backend": { + "type": "xpu" + } +} \ No newline at end of file diff --git a/build/torch29-cxx11-xpu20252-x86_64-linux/relu/__init__.py b/build/torch29-cxx11-xpu20252-x86_64-linux/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b2672c1cd85b74c1b3ded0fc0b2100e1aeac23 --- /dev/null +++ b/build/torch29-cxx11-xpu20252-x86_64-linux/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import importlib.util +import sys +from pathlib import Path +from types import ModuleType + + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py"))) diff --git a/build/torch29-metal-aarch64-darwin/__init__.py b/build/torch29-metal-aarch64-darwin/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1034928e8aa6d9cf8c85742c404d42d77ab38514 --- /dev/null +++ b/build/torch29-metal-aarch64-darwin/__init__.py @@ -0,0 +1,16 @@ +from typing import Optional + +import torch + +from ._ops import ops + +from . import layers + + +def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor: + if out is None: + out = torch.empty_like(x) + ops.relu(out, x) + return out + +__all__ = ["relu", "layers"] diff --git a/build/torch29-metal-aarch64-darwin/_ops.py b/build/torch29-metal-aarch64-darwin/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..7bd33b28b0ade8900ea62cf12a7e08d624681ece --- /dev/null +++ b/build/torch29-metal-aarch64-darwin/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _relu_metal_6261c06 +ops = torch.ops._relu_metal_6261c06 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_relu_metal_6261c06::{op_name}" diff --git a/build/torch29-metal-aarch64-darwin/_relu_metal_6261c06.abi3.so b/build/torch29-metal-aarch64-darwin/_relu_metal_6261c06.abi3.so new file mode 100644 index 0000000000000000000000000000000000000000..dc4c52088ebf4e86593c61676aeccfdefdc2275f Binary files /dev/null and b/build/torch29-metal-aarch64-darwin/_relu_metal_6261c06.abi3.so differ diff --git a/build/torch29-metal-aarch64-darwin/layers/__init__.py b/build/torch29-metal-aarch64-darwin/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6105a19101b9f01a5ed9f061f94cc92f3c3ab157 --- /dev/null +++ b/build/torch29-metal-aarch64-darwin/layers/__init__.py @@ -0,0 +1,11 @@ +import torch +import torch.nn as nn + +from .._ops import ops + + +class ReLU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.relu(out, x) + return out diff --git a/build/torch29-metal-aarch64-darwin/metadata.json b/build/torch29-metal-aarch64-darwin/metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..a5381dd80836f863378b9f33a559815688de9287 --- /dev/null +++ b/build/torch29-metal-aarch64-darwin/metadata.json @@ -0,0 +1,5 @@ +{ + "version": 1, + "license": "Apache-2.0", + "python-depends": [] +} \ No newline at end of file diff --git a/build/torch29-metal-aarch64-darwin/relu/__init__.py b/build/torch29-metal-aarch64-darwin/relu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..03dbc1afe1cf156661a2b1b22003cd5f599a0309 --- /dev/null +++ b/build/torch29-metal-aarch64-darwin/relu/__init__.py @@ -0,0 +1,26 @@ +import ctypes +import sys + +import importlib +from pathlib import Path +from types import ModuleType + +def _import_from_path(file_path: Path) -> ModuleType: + # We cannot use the module name as-is, after adding it to `sys.modules`, + # it would also be used for other imports. So, we make a module name that + # depends on the path for it to be unique using the hex-encoded hash of + # the path. + path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value) + module_name = path_hash + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None: + raise ImportError(f"Cannot load spec for {module_name} from {file_path}") + module = importlib.util.module_from_spec(spec) + if module is None: + raise ImportError(f"Cannot load module {module_name} from spec") + sys.modules[module_name] = module + spec.loader.exec_module(module) # type: ignore + return module + + +globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))