build: manually update PyTorch version

Set PyTorch and TorchVision version to nightly release 2023-08-30.

Signed-Off By: Vivek Khandelwal <vivek@nod-labs.com>
pull/2428/head snapshot-20230831.947
Vivek Khandelwal 2023-08-25 06:07:30 +00:00
parent 6b02e9a926
commit aa15f0d4ca
5 changed files with 19 additions and 12 deletions

View File

@ -286,8 +286,17 @@ TORCHDYNAMO_XFAIL_SET = {
# 'linalg.depthwise_conv_2d_nchw_chw' op inferred input/output operand #1 has shape's dimension #0 to be 4, but found 8
"Conv2dWithPaddingDilationStrideStaticModule_depthwise_multiplier",
# Exception: Unsupported: node.meta['val'] is not a FakeTensor or list of FakeTensor's: _scaled_dot_product_flash_attention;
"ScaledDotProductAttentionSameModule_basic",
"ScaledDotProductAttentionDifferentModule_basic",
}
if torch_version_for_comparison() < version.parse("2.1.0.dev"):
TORCHDYNAMO_XFAIL_SET -= {
"ScaledDotProductAttentionSameModule_basic",
"ScaledDotProductAttentionDifferentModule_basic",
}
TORCHDYNAMO_CRASHING_SET = {
# No upstream decompositions.
# %6:4 = torch.operator "aten._embedding_bag_forward_only"(%1, %3, %5, %false, %int0, %false, %none, %false, %int-1) : (!torch.tensor<*,f32>, !torch.tensor<*,si64>, !torch.tensor<*,si64>, !torch.bool, !torch.int, !torch.bool, !torch.none, !torch.bool, !torch.int) -> (!torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor)
@ -1234,13 +1243,6 @@ MAKE_FX_TOSA_PASS_SET = (TOSA_PASS_SET | {
}) - {
### Test failing in make_fx_tosa but not in tosa
# failed to lower torch.aten.empty.memory_format
"BatchNorm1DModule_basic",
"BatchNorm1DWith2DInputModule_basic",
"BatchNorm2DModule_basic",
"BatchNorm3DModule_basic",
"BatchNorm1DStaticShapeModule_basic",
# Dynamic shape, has extra unsupported broadcast ops
"Matmul_3d",
@ -1261,6 +1263,13 @@ if torch_version_for_comparison() < version.parse("2.1.0.dev"):
MAKE_FX_TOSA_PASS_SET -= {
# 'tensor.expand_shape' op expected rank expansion, but found source rank 1 >= result rank 1
"ReshapeCollapseModule_basic",
# failed to lower torch.aten.empty.memory_format
"BatchNorm1DModule_basic",
"BatchNorm1DWith2DInputModule_basic",
"BatchNorm2DModule_basic",
"BatchNorm3DModule_basic",
"BatchNorm1DStaticShapeModule_basic",
}
LTC_CRASHING_SET = {

View File

@ -274,7 +274,6 @@ def _lower_mlir_module(verbose, output_type, module):
print("Torch Backend IR")
print(module)
# module.dump()
if output_type == OutputType.TORCH:
return module
@ -293,7 +292,6 @@ def _lower_mlir_module(verbose, output_type, module):
module,
"builtin.module(torch-backend-to-linalg-on-tensors-backend-pipeline)",
"Lowering Torch Backend IR -> Linalg-on-Tensors Backend IR")
# module.dump()
if verbose:
print("\n====================")
print("LINALG Backend IR")

View File

@ -1 +1 @@
4ce227bfb953d1f64c4d86cc913144ee2a210e57
b3874abf04e0579fdc0025195593d90b97178f0e

View File

@ -1,3 +1,3 @@
-f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
--pre
torch==2.1.0.dev20230820
torch==2.1.0.dev20230830

View File

@ -1,3 +1,3 @@
-f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
--pre
torchvision==0.16.0.dev20230820
torchvision==0.16.0.dev20230830