From 824119ad8b6fcd095366a06934e40630d886fe88 Mon Sep 17 00:00:00 2001 From: dan Date: Thu, 25 Jan 2024 18:19:42 +0000 Subject: [PATCH] update fx_importer for symbolic changes upstream torch.ops.aten.sym_size -> torch.ops.aten.sym_size.int --- python/torch_mlir/extras/fx_importer.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/python/torch_mlir/extras/fx_importer.py b/python/torch_mlir/extras/fx_importer.py index 9ec90e766..fa1d34736 100644 --- a/python/torch_mlir/extras/fx_importer.py +++ b/python/torch_mlir/extras/fx_importer.py @@ -187,17 +187,18 @@ PY_BUILTIN_TO_TORCH_OP = { } SYMBOLIC_TORCH_OPS = { - torch.ops.aten.sym_size, - torch.ops.aten.sym_stride, - torch.ops.aten.sym_numel, + torch.ops.aten.sym_size.int, + torch.ops.aten.sym_stride.int, + torch.ops.aten.sym_numel.default, } +#pytorch now emits the .int, .default instead of just sym_* SYMBOLIC_OP_TO_TORCH_OP = { - (torch.ops.aten.sym_size, 1): torch.ops.aten.size.default, - (torch.ops.aten.sym_size, 2): torch.ops.aten.size.int, - (torch.ops.aten.sym_stride, 1): torch.ops.aten.stride.default, - (torch.ops.aten.sym_stride, 2): torch.ops.aten.stride.int, - (torch.ops.aten.sym_numel, 1): torch.ops.aten.numel.default, + torch.ops.aten.sym_size.default: torch.ops.aten.size.default, + torch.ops.aten.sym_size.int : torch.ops.aten.size.int, + torch.ops.aten.sym_stride.default : torch.ops.aten.stride.default, + torch.ops.aten.sym_stride.int : torch.ops.aten.stride.int, + torch.ops.aten.sym_numel.default : torch.ops.aten.numel.default, }