Add f8 types to fx importer (#3434)

Missing types for tracing float8 types.
pull/3437/head
Rob Suderman 2024-06-07 13:58:18 -07:00 committed by GitHub
parent f794582b18
commit 7f188eb824
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 16 additions and 0 deletions

View File

@ -99,6 +99,10 @@ from ..ir import (
FloatAttr, FloatAttr,
BF16Type, BF16Type,
ComplexType, ComplexType,
Float8E5M2Type,
Float8E4M3FNType,
Float8E5M2FNUZType,
Float8E4M3FNUZType,
F16Type, F16Type,
F32Type, F32Type,
F64Type, F64Type,
@ -147,6 +151,10 @@ TORCH_DTYPE_TO_MLIR_TYPE_ASM = {
torch.complex32: "complex<f16>", torch.complex32: "complex<f16>",
torch.complex64: "complex<f32>", torch.complex64: "complex<f32>",
torch.complex128: "complex<f64>", torch.complex128: "complex<f64>",
torch.float8_e5m2: "f8E5M2",
torch.float8_e4m3fn: "f8E4M3FN",
torch.float8_e5m2fnuz: "f8E5M2FNUZ",
torch.float8_e4m3fnuz: "f8E4M3FNUZ",
} }
TORCH_DTYPE_TO_MLIR_TYPE: Dict[torch.dtype, Callable[[], IrType]] = { TORCH_DTYPE_TO_MLIR_TYPE: Dict[torch.dtype, Callable[[], IrType]] = {
@ -165,6 +173,10 @@ TORCH_DTYPE_TO_MLIR_TYPE: Dict[torch.dtype, Callable[[], IrType]] = {
torch.complex32: lambda: ComplexType.get(F16Type.get()), torch.complex32: lambda: ComplexType.get(F16Type.get()),
torch.complex64: lambda: ComplexType.get(F32Type.get()), torch.complex64: lambda: ComplexType.get(F32Type.get()),
torch.complex128: lambda: ComplexType.get(F64Type.get()), torch.complex128: lambda: ComplexType.get(F64Type.get()),
torch.float8_e5m2: lambda: Float8E5M2Type.get(),
torch.float8_e5m2fnuz: lambda: Float8E5M2FNUZType.get(),
torch.float8_e4m3fn: lambda: Float8E4M3FNType.get(),
torch.float8_e4m3fnuz: lambda: Float8E4M3FNUZType.get(),
} }
TORCH_DTYPE_TO_NPY_TYPE = { TORCH_DTYPE_TO_NPY_TYPE = {
@ -203,6 +215,10 @@ TORCH_DTYPE_TO_INT = {
# torch.quint8: 13, # torch.quint8: 13,
# torch.qint32 14 # torch.qint32 14
torch.bfloat16: 15, torch.bfloat16: 15,
torch.float8_e5m2: 23,
torch.float8_e4m3fn: 24,
torch.float8_e5m2fnuz: 25,
torch.float8_e4m3fnuz: 26,
} }
TORCH_MEMORY_FORMAT_TO_INT = { TORCH_MEMORY_FORMAT_TO_INT = {