From ec611c1e6f44eb5b49c658fd98740000935a1058 Mon Sep 17 00:00:00 2001 From: Stella Laurenzo Date: Tue, 27 Jul 2021 17:48:47 -0700 Subject: [PATCH] Misc fixes for MacOS. (#255) * Change aligned_alloc -> malloc. It can fail (and does on MacOS) and is a bit over-aggressive optimization for a reference backend. * Fixed a fragile test that prints -0.0 on MacOS. * Fail the test (not the framework) on failure to trace (Torch on MacOS is missing features). * Fix .so -> .dylib for compiler runtime. --- .../torchscript/e2e_test/framework.py | 2 +- lib/RefBackend/Runtime/Runtime.cpp | 3 +- .../npcomp/compiler/generic/backend/refjit.py | 7 +- python/npcomp/types.py | 64 +++++++++---------- 4 files changed, 41 insertions(+), 35 deletions(-) diff --git a/frontends/pytorch/python/torch_mlir/torchscript/e2e_test/framework.py b/frontends/pytorch/python/torch_mlir/torchscript/e2e_test/framework.py index be0b31f46..a7f19c643 100644 --- a/frontends/pytorch/python/torch_mlir/torchscript/e2e_test/framework.py +++ b/frontends/pytorch/python/torch_mlir/torchscript/e2e_test/framework.py @@ -227,9 +227,9 @@ def run_tests(tests: List[Test], config: TestConfig) -> List[TestResult]: """Invoke the given `Test`'s with the provided `TestConfig`.""" results = [] for test in tests: - golden_trace = _generate_golden_trace(test) # TODO: Precompile everything in parallel. try: + golden_trace = _generate_golden_trace(test) compiled = config.compile(test.program_factory()) except Exception as e: results.append( diff --git a/lib/RefBackend/Runtime/Runtime.cpp b/lib/RefBackend/Runtime/Runtime.cpp index f2b04df89..cc8836b57 100644 --- a/lib/RefBackend/Runtime/Runtime.cpp +++ b/lib/RefBackend/Runtime/Runtime.cpp @@ -488,7 +488,8 @@ RtValue refbackrt::createRtValueFromOutputArgInfo(const OutputArgInfo &info) { switch (info.elementType) { case ElementType::F32: { auto byteSize = numel * sizeof(float); - data = static_cast(aligned_alloc(32, byteSize)); + data = static_cast(malloc(byteSize)); + assert(data && "could not allocate tensor"); memset(data, 0, byteSize); return RtValue(Tensor::create(shape, ElementType::F32, data)); break; diff --git a/python/npcomp/compiler/generic/backend/refjit.py b/python/npcomp/compiler/generic/backend/refjit.py index 5bba4609a..27d63e8bd 100644 --- a/python/npcomp/compiler/generic/backend/refjit.py +++ b/python/npcomp/compiler/generic/backend/refjit.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception import os +import platform _refjit = None @@ -40,7 +41,11 @@ def is_enabled() -> bool: def get_runtime_libs(): # The _refjit_resources directory is at the npcomp.compiler level. resources_dir = os.path.join(os.path.dirname(__file__)) - return [os.path.join(resources_dir, "libNPCOMPCompilerRuntimeShlib.so")] + suffix = ".so" + if platform.system() == "Darwin": + suffix = ".dylib" + shlib_name = f"libNPCOMPCompilerRuntimeShlib{suffix}" + return [os.path.join(resources_dir, shlib_name)] class JitModuleInvoker: diff --git a/python/npcomp/types.py b/python/npcomp/types.py index 569525272..84ac3d8f9 100644 --- a/python/npcomp/types.py +++ b/python/npcomp/types.py @@ -65,7 +65,7 @@ class _LiterateEnum(Enum): Traceback (most recent call last): ... ValueError: Cannot parse SampleEnum 1.0 - + """ @classmethod @@ -111,11 +111,11 @@ class TypeClass(_LiterateEnum): class ValueType: """The type a value can take in the npcomp language. - + Types of values in npcomp are always being refined and are therefore - mutable. Instances represent the type derived for a single value, not a + mutable. Instances represent the type derived for a single value, not a concept of "typeness" generally. - + >>> ValueType() Any >>> ValueType('NdArray') @@ -166,7 +166,7 @@ class ValueType: class ValueTypeList: """Models a list of ValueTypes. - + >>> v3 = ValueTypeList(3) >>> v3 (Any, Any, Any) @@ -178,7 +178,7 @@ class ValueTypeList: >>> v3[2] += Rank(2) >>> v3 (Any, Any, NdArray[Rank(2)]) - + With names: >>> v3 = ValueTypeList(3, [None, "b", None]) >>> v3[1] = 'NdArray' @@ -221,11 +221,11 @@ class ValueTypeList: class Signature: """A function signature. - + This currently only models a linear list of positional arguments and assumes that multiple results will be represented by some form of tuple type. - + >>> Signature() () -> Any >>> Signature(2) @@ -279,7 +279,7 @@ class Signature: class ArrayParams: """Represents parameters defining how to construct an array. - + >>> ArrayParams() ArrayParams(dtype=Unspec) >>> ArrayParams(np.float32) @@ -309,26 +309,26 @@ class ArrayParams: @classmethod def from_constraints(cls, constraints): """Constructs params for a TypeConstraints list. - + Unconstrained: >>> ArrayParams.from_constraints(TypeConstraints()) ArrayParams(dtype=Unspec) - + DType constrained: >>> ArrayParams.from_constraints(TypeConstraints(DType(np.float32))) ArrayParams(dtype=float32) - Rank constrained: + Rank constrained: >>> ArrayParams.from_constraints(TypeConstraints(Rank(2))) ArrayParams(dtype=Unspec, shape=(-1, -1)) - + Shape constrained: >>> ArrayParams.from_constraints(TypeConstraints(Shape(1, 2, 3))) ArrayParams(dtype=Unspec, shape=(1, 2, 3)) >>> ArrayParams.from_constraints(TypeConstraints( ... Rank(3), Shape(1, 2, 3))) ArrayParams(dtype=Unspec, shape=(1, 2, 3)) - + Shape constrained with dynamic dim constraint: >>> ArrayParams.from_constraints(TypeConstraints( ... Shape(1, 2, 3), DynamicDim(1))) @@ -336,7 +336,7 @@ class ArrayParams: >>> ArrayParams.from_constraints(TypeConstraints( ... Shape(1, 2, 3), DynamicDim((0, 2)))) ArrayParams(dtype=Unspec, shape=(-1, 2, -1)) - + Errors: >>> ArrayParams.from_constraints(TypeConstraints( ... Rank(4), Shape(1, 2, 3))) @@ -346,7 +346,7 @@ class ArrayParams: >>> ArrayParams.from_constraints(TypeConstraints( ... Shape(1, 2, 3), DynamicDim((0, 5)))) Traceback (most recent call last): - ... + ... ValueError: Out of range DimFlag(Dynamic, (0, 5)) for shape [-1, 2, 3] """ # TODO: Should have a 'canonicalize' method on TypeConstraints which @@ -395,7 +395,7 @@ class ArrayParams: @property def is_concrete(self): """Returns true if the parameters are sufficient to construct an ndarray. - + >>> ArrayParams().is_concrete False >>> ArrayParams(dtype=np.float32).is_concrete @@ -417,26 +417,26 @@ class ArrayParams: def mlir_tensor_type_asm(self): """Get a corresponding MLIR tensor type. - Fully Unspecified: + Fully Unspecified: >>> ArrayParams().mlir_tensor_type_asm 'tensor<*x!numpy.any_dtype>' - + Unranked: >>> ArrayParams(dtype=np.float32).mlir_tensor_type_asm 'tensor<*xf32>' - + Ranked: >>> ArrayParams(dtype=np.float32, rank=3).mlir_tensor_type_asm 'tensor' >>> ArrayParams(dtype=np.float32, shape=(-1, -1)).mlir_tensor_type_asm 'tensor' - + Scalar: >>> ArrayParams(dtype=np.float32, rank=0).mlir_tensor_type_asm 'tensor' >>> ArrayParams(dtype=np.float32, shape=()).mlir_tensor_type_asm 'tensor' - + Shaped: >>> ArrayParams(dtype=np.float32, shape=(2, 3)).mlir_tensor_type_asm 'tensor<2x3xf32>' @@ -460,12 +460,12 @@ class ArrayParams: def new_ndarray(self): """Creates a new ndarray from these params. - + >>> ArrayParams().new_ndarray() Traceback (most recent call last): ... ValueError: ArrayParams(dtype=Unspec) is not concrete - >>> ArrayParams(np.float32, (1, 2)).new_ndarray() * 0.0 + >>> (ArrayParams(np.float32, (1, 2)).new_ndarray() * 0.0 + 1.0) * 0.0 array([[0., 0.]], dtype=float32) """ if not self.is_concrete: @@ -480,7 +480,7 @@ class TypeConstraint: class TypeConstraints(list): """Collection of type constraints. - + >>> TypeConstraints([DynamicDim()]) TypeConstraints(DimFlag(Dynamic, Unspec)) >>> TypeConstraints([DynamicDim(), Rank(4)]) @@ -554,9 +554,9 @@ class ArrayConstraint(TypeConstraint): class DType(ArrayConstraint): """A constraint on a dtype. - + DType constraints are exclusive with only one permitted in a set. - + >>> DType(np.float32) DType(float32) >>> DType("foobar") @@ -597,7 +597,7 @@ class Rank(ArrayConstraint): Traceback (most recent call last): ... AssertionError - + """ __slots__ = ["_rank"] @@ -619,9 +619,9 @@ class Rank(ArrayConstraint): class Shape(ArrayConstraint): """Establishes a static shape for an array. - + All dimensions must be a non-negative integer or Unspec. - + >>> Shape(1, 2, 3) Shape(1, 2, 3) >>> Shape(Unspec, 1) @@ -665,9 +665,9 @@ class DimFlagEnum(_LiterateEnum): class DimFlag(ArrayConstraint): """Generic flag applying to one or more dimensions. - + If dims is Unspec, the flag applies to all dims. - + >>> DimFlag("Dynamic") DimFlag(Dynamic, Unspec) >>> DimFlag("Dynamic", 1)