operator name stringclasses 180 values | used in model stringclasses 155 values | args stringlengths 19 5.24k |
|---|---|---|
aten._softmax.default | HuggingFace/GPTNeoForCausalLM | ((T([1, 16, 128, 128], f32), -1, False), {}) |
aten._softmax.default | HuggingFace/GPTNeoForSequenceClassification | ((T([1, 16, 128, 128], f32), -1, False), {}) |
aten._softmax.default | HuggingFace/DebertaV2ForMaskedLM | ((T([1, 24, 512, 512], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/DebertaV2ForQuestionAnswering | ((T([1, 24, 512, 512], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/ElectraForCausalLM | ((T([1, 4, 512, 512], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/YituTechConvBert | ((T([1, 6, 512, 512], f16), -1, False), {}) |
aten._softmax.default | TIMM/eca_halonext26ts | ((T([1024, 1, 64, 144], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/BlenderbotSmallForCausalLM | ((T([1024, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/BlenderbotSmallForConditionalGeneration | ((T([1024, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TIMM/eca_halonext26ts | ((T([1024, 4, 16, 144], f16), -1, False), {}) |
aten._softmax.default | TIMM/eca_halonext26ts | ((T([1024, 4, 64, 144], f16), -1, False), {}) |
aten._softmax.default | TIMM/swin_base_patch4_window7_224 | ((T([1024, 8, 49, 49], f16), -1, False), {}) |
aten._softmax.default | TIMM/tnt_s_patch16_224 | ((T([12544, 4, 16, 16], f16), -1, False), {}) |
aten._softmax.default | TIMM/levit_128 | ((T([128, 12, 16, 16], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/MBartForConditionalGeneration | ((T([128, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/PegasusForCausalLM | ((T([128, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/TrOCRForCausalLM | ((T([128, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TIMM/levit_128 | ((T([128, 16, 16, 49], f16), -1, False), {}) |
aten._softmax.default | TIMM/levit_128 | ((T([128, 4, 196, 196], f16), -1, False), {}) |
aten._softmax.default | TIMM/visformer_small | ((T([128, 6, 196, 196], f16), -1, False), {}) |
aten._softmax.default | TIMM/visformer_small | ((T([128, 6, 49, 49], f16), -1, False), {}) |
aten._softmax.default | TIMM/coat_lite_mini | ((T([128, 8, 197, 40], f16, stride=(189120, 40, 960, 1)), 2, False), {}) |
aten._softmax.default | TIMM/coat_lite_mini | ((T([128, 8, 3137, 8], f16, stride=(602304, 8, 192, 1)), 2, False), {}) |
aten._softmax.default | TIMM/levit_128 | ((T([128, 8, 49, 196], f16), -1, False), {}) |
aten._softmax.default | TIMM/levit_128 | ((T([128, 8, 49, 49], f16), -1, False), {}) |
aten._softmax.default | TIMM/coat_lite_mini | ((T([128, 8, 50, 64], f16, stride=(76800, 64, 1536, 1)), 2, False), {}) |
aten._softmax.default | TIMM/coat_lite_mini | ((T([128, 8, 785, 16], f16, stride=(301440, 16, 384, 1)), 2, False), {}) |
aten._softmax.default | HuggingFace/DistilBertForMaskedLM | ((T([16, 12, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TorchBench/BERT_pytorch | ((T([16, 12, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/LayoutLMForMaskedLM | ((T([16, 12, 512, 512], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/LayoutLMForSequenceClassification | ((T([16, 12, 512, 512], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/MobileBertForMaskedLM | ((T([16, 4, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/PLBartForCausalLM | ((T([192, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TorchBench/hf_Longformer | ((T([2, 1024, 12, 513], f16, stride=(6303744, 513, 525312, 1)), -1, True), {}) |
aten._softmax.default | TorchBench/hf_BigBird | ((T([2, 12, 12, 64, 512], f16), -1, False), {}) |
aten._softmax.default | TorchBench/hf_BigBird | ((T([2, 12, 64, 1024], f16), -1, False), {}) |
aten._softmax.default | TorchBench/hf_BigBird | ((T([2, 12, 64, 448], f16), -1, False), {}) |
aten._softmax.default | TIMM/cait_m36_384 | ((T([2, 16, 1, 577], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/MegatronBertForCausalLM | ((T([2, 16, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TIMM/cait_m36_384 | ((T([2, 16, 576, 576], f16, stride=(5308416, 1, 9216, 16)), -1, False), {}) |
aten._softmax.default | HuggingFace/AlbertForMaskedLM | ((T([2, 64, 512, 512], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/AlbertForQuestionAnswering | ((T([2, 64, 512, 512], f16), -1, False), {}) |
aten._softmax.default | TIMM/sebotnet33ts_256 | ((T([256, 1024, 1024], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/MBartForCausalLM | ((T([256, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/Speech2Text2ForCausalLM | ((T([256, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TIMM/swin_base_patch4_window7_224 | ((T([256, 16, 49, 49], f16), -1, False), {}) |
aten._softmax.default | TIMM/sebotnet33ts_256 | ((T([256, 256, 256], f16), -1, False), {}) |
aten._softmax.default | TIMM/mobilevit_s | ((T([256, 4, 16, 16], f16), -1, False), {}) |
aten._softmax.default | TIMM/mobilevit_s | ((T([256, 4, 256, 256], f16), -1, False), {}) |
aten._softmax.default | TIMM/mobilevit_s | ((T([256, 4, 64, 64], f16), -1, False), {}) |
aten._softmax.default | TIMM/sebotnet33ts_256 | ((T([256, 64, 64], f16), -1, False), {}) |
aten._softmax.default | TorchBench/attention_is_all_you_need_pytorch | ((T([256, 8, 31, 31], f16), -1, False), {}) |
aten._softmax.default | TorchBench/attention_is_all_you_need_pytorch | ((T([256, 8, 31, 33], f16), -1, False), {}) |
aten._softmax.default | TorchBench/attention_is_all_you_need_pytorch | ((T([256, 8, 33, 33], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/YituTechConvBert | ((T([3072, 9, 1], f16), 1, False), {}) |
aten._softmax.default | TIMM/twins_pcpvt_base | ((T([32, 1, 3136, 49], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/BartForConditionalGeneration | ((T([32, 1024, 1024], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/DistilBertForQuestionAnswering | ((T([32, 12, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/M2M100ForConditionalGeneration | ((T([32, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/XGLMForCausalLM | ((T([32, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TIMM/resnest101e | ((T([32, 2, 1, 128], f16), 1, False), {}) |
aten._softmax.default | TorchBench/timm_resnest | ((T([32, 2, 1, 128], f16), 1, False), {}) |
aten._softmax.default | TIMM/resnest101e | ((T([32, 2, 1, 256], f16), 1, False), {}) |
aten._softmax.default | TorchBench/timm_resnest | ((T([32, 2, 1, 256], f16), 1, False), {}) |
aten._softmax.default | TIMM/resnest101e | ((T([32, 2, 1, 512], f16), 1, False), {}) |
aten._softmax.default | TorchBench/timm_resnest | ((T([32, 2, 1, 512], f16), 1, False), {}) |
aten._softmax.default | TIMM/resnest101e | ((T([32, 2, 1, 64], f16), 1, False), {}) |
aten._softmax.default | TorchBench/timm_resnest | ((T([32, 2, 1, 64], f16), 1, False), {}) |
aten._softmax.default | TIMM/twins_pcpvt_base | ((T([32, 2, 784, 49], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/MobileBertForQuestionAnswering | ((T([32, 4, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TIMM/twins_pcpvt_base | ((T([32, 5, 196, 49], f16), -1, False), {}) |
aten._softmax.default | TIMM/twins_pcpvt_base | ((T([32, 8, 49, 49], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/GPT2ForSequenceClassification | ((T([4, 12, 1024, 1024], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/RobertaForCausalLM | ((T([4, 12, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/DebertaForMaskedLM | ((T([4, 12, 512, 512], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/DebertaForQuestionAnswering | ((T([4, 12, 512, 512], f16), -1, False), {}) |
aten._softmax.default | TorchBench/hf_Bert | ((T([4, 12, 512, 512], f16), -1, False), {}) |
aten._softmax.default | TorchBench/hf_GPT2 | ((T([4, 12, 512, 512], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/XLNetLMHeadModel | ((T([4, 16, 512, 512], f16), 3, False), {}) |
aten._softmax.default | TIMM/swin_base_patch4_window7_224 | ((T([4096, 4, 49, 49], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/OPTForCausalLM | ((T([48, 128, 128], f16), -1, True), {}) |
aten._softmax.default | TorchBench/hf_Bart | ((T([48, 512, 512], f16), -1, False), {}) |
aten._softmax.default | TIMM/botnet26t_256 | ((T([512, 256, 256], f16), -1, False), {}) |
aten._softmax.default | TIMM/eca_botnext26ts_256 | ((T([512, 256, 256], f16), -1, False), {}) |
aten._softmax.default | TIMM/botnet26t_256 | ((T([512, 64, 64], f16), -1, False), {}) |
aten._softmax.default | TIMM/eca_botnext26ts_256 | ((T([512, 64, 64], f16), -1, False), {}) |
aten._softmax.default | TorchBench/fastNLP_Bert | ((T([6, 12, 476, 476], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/BartForCausalLM | ((T([64, 1024, 1024], f16), -1, False), {}) |
aten._softmax.default | TIMM/volo_d1_224 | ((T([64, 12, 1, 197], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/BertForMaskedLM | ((T([64, 12, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/BertForQuestionAnswering | ((T([64, 12, 128, 128], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/RobertaForQuestionAnswering | ((T([64, 12, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TIMM/volo_d1_224 | ((T([64, 12, 196, 196], f16), -1, False), {}) |
aten._softmax.default | TIMM/beit_base_patch16_224 | ((T([64, 12, 197, 197], f16), -1, False), {}) |
aten._softmax.default | TIMM/vit_base_patch16_224 | ((T([64, 12, 197, 197], f16), -1, False), {}) |
aten._softmax.default | TIMM/deit_base_distilled_patch16_224 | ((T([64, 12, 198, 198], f16), -1, False), {}) |
aten._softmax.default | HuggingFace/PegasusForConditionalGeneration | ((T([64, 128, 128], f16), -1, False), {}) |
aten._softmax.default | TIMM/jx_nest_base | ((T([64, 16, 1, 196, 196], f16), -1, False), {}) |
aten._softmax.default | TIMM/pit_b_224 | ((T([64, 16, 65, 65], f16), -1, False), {}) |
aten._softmax.default | TIMM/swin_base_patch4_window7_224 | ((T([64, 32, 49, 49], f16), -1, False), {}) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.