update fx_importer for symbolic changes upstream

torch.ops.aten.sym_size -> torch.ops.aten.sym_size.int
update_fx_import_for_sym_changes
dan 2024-01-25 18:19:42 +00:00
parent e581b33f96
commit 824119ad8b
1 changed files with 9 additions and 8 deletions

View File

@ -187,17 +187,18 @@ PY_BUILTIN_TO_TORCH_OP = {
} }
SYMBOLIC_TORCH_OPS = { SYMBOLIC_TORCH_OPS = {
torch.ops.aten.sym_size, torch.ops.aten.sym_size.int,
torch.ops.aten.sym_stride, torch.ops.aten.sym_stride.int,
torch.ops.aten.sym_numel, torch.ops.aten.sym_numel.default,
} }
#pytorch now emits the .int, .default instead of just sym_*
SYMBOLIC_OP_TO_TORCH_OP = { SYMBOLIC_OP_TO_TORCH_OP = {
(torch.ops.aten.sym_size, 1): torch.ops.aten.size.default, torch.ops.aten.sym_size.default: torch.ops.aten.size.default,
(torch.ops.aten.sym_size, 2): torch.ops.aten.size.int, torch.ops.aten.sym_size.int : torch.ops.aten.size.int,
(torch.ops.aten.sym_stride, 1): torch.ops.aten.stride.default, torch.ops.aten.sym_stride.default : torch.ops.aten.stride.default,
(torch.ops.aten.sym_stride, 2): torch.ops.aten.stride.int, torch.ops.aten.sym_stride.int : torch.ops.aten.stride.int,
(torch.ops.aten.sym_numel, 1): torch.ops.aten.numel.default, torch.ops.aten.sym_numel.default : torch.ops.aten.numel.default,
} }