Misc fixes for MacOS. (#255)

* Change aligned_alloc -> malloc. It can fail (and does on MacOS) and is a bit over-aggressive optimization for a reference backend.
* Fixed a fragile test that prints -0.0 on MacOS.
* Fail the test (not the framework) on failure to trace (Torch on MacOS is missing features).
* Fix .so -> .dylib for compiler runtime.
pull/256/head
Stella Laurenzo 2021-07-27 17:48:47 -07:00 committed by GitHub
parent 2dbab50444
commit ec611c1e6f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 41 additions and 35 deletions

View File

@ -227,9 +227,9 @@ def run_tests(tests: List[Test], config: TestConfig) -> List[TestResult]:
"""Invoke the given `Test`'s with the provided `TestConfig`.""" """Invoke the given `Test`'s with the provided `TestConfig`."""
results = [] results = []
for test in tests: for test in tests:
golden_trace = _generate_golden_trace(test)
# TODO: Precompile everything in parallel. # TODO: Precompile everything in parallel.
try: try:
golden_trace = _generate_golden_trace(test)
compiled = config.compile(test.program_factory()) compiled = config.compile(test.program_factory())
except Exception as e: except Exception as e:
results.append( results.append(

View File

@ -488,7 +488,8 @@ RtValue refbackrt::createRtValueFromOutputArgInfo(const OutputArgInfo &info) {
switch (info.elementType) { switch (info.elementType) {
case ElementType::F32: { case ElementType::F32: {
auto byteSize = numel * sizeof(float); auto byteSize = numel * sizeof(float);
data = static_cast<void *>(aligned_alloc(32, byteSize)); data = static_cast<void *>(malloc(byteSize));
assert(data && "could not allocate tensor");
memset(data, 0, byteSize); memset(data, 0, byteSize);
return RtValue(Tensor::create(shape, ElementType::F32, data)); return RtValue(Tensor::create(shape, ElementType::F32, data));
break; break;

View File

@ -3,6 +3,7 @@
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
import os import os
import platform
_refjit = None _refjit = None
@ -40,7 +41,11 @@ def is_enabled() -> bool:
def get_runtime_libs(): def get_runtime_libs():
# The _refjit_resources directory is at the npcomp.compiler level. # The _refjit_resources directory is at the npcomp.compiler level.
resources_dir = os.path.join(os.path.dirname(__file__)) resources_dir = os.path.join(os.path.dirname(__file__))
return [os.path.join(resources_dir, "libNPCOMPCompilerRuntimeShlib.so")] suffix = ".so"
if platform.system() == "Darwin":
suffix = ".dylib"
shlib_name = f"libNPCOMPCompilerRuntimeShlib{suffix}"
return [os.path.join(resources_dir, shlib_name)]
class JitModuleInvoker: class JitModuleInvoker:

View File

@ -65,7 +65,7 @@ class _LiterateEnum(Enum):
Traceback (most recent call last): Traceback (most recent call last):
... ...
ValueError: Cannot parse SampleEnum 1.0 ValueError: Cannot parse SampleEnum 1.0
""" """
@classmethod @classmethod
@ -111,11 +111,11 @@ class TypeClass(_LiterateEnum):
class ValueType: class ValueType:
"""The type a value can take in the npcomp language. """The type a value can take in the npcomp language.
Types of values in npcomp are always being refined and are therefore Types of values in npcomp are always being refined and are therefore
mutable. Instances represent the type derived for a single value, not a mutable. Instances represent the type derived for a single value, not a
concept of "typeness" generally. concept of "typeness" generally.
>>> ValueType() >>> ValueType()
Any Any
>>> ValueType('NdArray') >>> ValueType('NdArray')
@ -166,7 +166,7 @@ class ValueType:
class ValueTypeList: class ValueTypeList:
"""Models a list of ValueTypes. """Models a list of ValueTypes.
>>> v3 = ValueTypeList(3) >>> v3 = ValueTypeList(3)
>>> v3 >>> v3
(Any, Any, Any) (Any, Any, Any)
@ -178,7 +178,7 @@ class ValueTypeList:
>>> v3[2] += Rank(2) >>> v3[2] += Rank(2)
>>> v3 >>> v3
(Any, Any, NdArray[Rank(2)]) (Any, Any, NdArray[Rank(2)])
With names: With names:
>>> v3 = ValueTypeList(3, [None, "b", None]) >>> v3 = ValueTypeList(3, [None, "b", None])
>>> v3[1] = 'NdArray' >>> v3[1] = 'NdArray'
@ -221,11 +221,11 @@ class ValueTypeList:
class Signature: class Signature:
"""A function signature. """A function signature.
This currently only models a linear list of positional arguments and This currently only models a linear list of positional arguments and
assumes that multiple results will be represented by some form of tuple assumes that multiple results will be represented by some form of tuple
type. type.
>>> Signature() >>> Signature()
() -> Any () -> Any
>>> Signature(2) >>> Signature(2)
@ -279,7 +279,7 @@ class Signature:
class ArrayParams: class ArrayParams:
"""Represents parameters defining how to construct an array. """Represents parameters defining how to construct an array.
>>> ArrayParams() >>> ArrayParams()
ArrayParams(dtype=Unspec) ArrayParams(dtype=Unspec)
>>> ArrayParams(np.float32) >>> ArrayParams(np.float32)
@ -309,26 +309,26 @@ class ArrayParams:
@classmethod @classmethod
def from_constraints(cls, constraints): def from_constraints(cls, constraints):
"""Constructs params for a TypeConstraints list. """Constructs params for a TypeConstraints list.
Unconstrained: Unconstrained:
>>> ArrayParams.from_constraints(TypeConstraints()) >>> ArrayParams.from_constraints(TypeConstraints())
ArrayParams(dtype=Unspec) ArrayParams(dtype=Unspec)
DType constrained: DType constrained:
>>> ArrayParams.from_constraints(TypeConstraints(DType(np.float32))) >>> ArrayParams.from_constraints(TypeConstraints(DType(np.float32)))
ArrayParams(dtype=float32) ArrayParams(dtype=float32)
Rank constrained: Rank constrained:
>>> ArrayParams.from_constraints(TypeConstraints(Rank(2))) >>> ArrayParams.from_constraints(TypeConstraints(Rank(2)))
ArrayParams(dtype=Unspec, shape=(-1, -1)) ArrayParams(dtype=Unspec, shape=(-1, -1))
Shape constrained: Shape constrained:
>>> ArrayParams.from_constraints(TypeConstraints(Shape(1, 2, 3))) >>> ArrayParams.from_constraints(TypeConstraints(Shape(1, 2, 3)))
ArrayParams(dtype=Unspec, shape=(1, 2, 3)) ArrayParams(dtype=Unspec, shape=(1, 2, 3))
>>> ArrayParams.from_constraints(TypeConstraints( >>> ArrayParams.from_constraints(TypeConstraints(
... Rank(3), Shape(1, 2, 3))) ... Rank(3), Shape(1, 2, 3)))
ArrayParams(dtype=Unspec, shape=(1, 2, 3)) ArrayParams(dtype=Unspec, shape=(1, 2, 3))
Shape constrained with dynamic dim constraint: Shape constrained with dynamic dim constraint:
>>> ArrayParams.from_constraints(TypeConstraints( >>> ArrayParams.from_constraints(TypeConstraints(
... Shape(1, 2, 3), DynamicDim(1))) ... Shape(1, 2, 3), DynamicDim(1)))
@ -336,7 +336,7 @@ class ArrayParams:
>>> ArrayParams.from_constraints(TypeConstraints( >>> ArrayParams.from_constraints(TypeConstraints(
... Shape(1, 2, 3), DynamicDim((0, 2)))) ... Shape(1, 2, 3), DynamicDim((0, 2))))
ArrayParams(dtype=Unspec, shape=(-1, 2, -1)) ArrayParams(dtype=Unspec, shape=(-1, 2, -1))
Errors: Errors:
>>> ArrayParams.from_constraints(TypeConstraints( >>> ArrayParams.from_constraints(TypeConstraints(
... Rank(4), Shape(1, 2, 3))) ... Rank(4), Shape(1, 2, 3)))
@ -346,7 +346,7 @@ class ArrayParams:
>>> ArrayParams.from_constraints(TypeConstraints( >>> ArrayParams.from_constraints(TypeConstraints(
... Shape(1, 2, 3), DynamicDim((0, 5)))) ... Shape(1, 2, 3), DynamicDim((0, 5))))
Traceback (most recent call last): Traceback (most recent call last):
... ...
ValueError: Out of range DimFlag(Dynamic, (0, 5)) for shape [-1, 2, 3] ValueError: Out of range DimFlag(Dynamic, (0, 5)) for shape [-1, 2, 3]
""" """
# TODO: Should have a 'canonicalize' method on TypeConstraints which # TODO: Should have a 'canonicalize' method on TypeConstraints which
@ -395,7 +395,7 @@ class ArrayParams:
@property @property
def is_concrete(self): def is_concrete(self):
"""Returns true if the parameters are sufficient to construct an ndarray. """Returns true if the parameters are sufficient to construct an ndarray.
>>> ArrayParams().is_concrete >>> ArrayParams().is_concrete
False False
>>> ArrayParams(dtype=np.float32).is_concrete >>> ArrayParams(dtype=np.float32).is_concrete
@ -417,26 +417,26 @@ class ArrayParams:
def mlir_tensor_type_asm(self): def mlir_tensor_type_asm(self):
"""Get a corresponding MLIR tensor type. """Get a corresponding MLIR tensor type.
Fully Unspecified: Fully Unspecified:
>>> ArrayParams().mlir_tensor_type_asm >>> ArrayParams().mlir_tensor_type_asm
'tensor<*x!numpy.any_dtype>' 'tensor<*x!numpy.any_dtype>'
Unranked: Unranked:
>>> ArrayParams(dtype=np.float32).mlir_tensor_type_asm >>> ArrayParams(dtype=np.float32).mlir_tensor_type_asm
'tensor<*xf32>' 'tensor<*xf32>'
Ranked: Ranked:
>>> ArrayParams(dtype=np.float32, rank=3).mlir_tensor_type_asm >>> ArrayParams(dtype=np.float32, rank=3).mlir_tensor_type_asm
'tensor<?x?x?xf32>' 'tensor<?x?x?xf32>'
>>> ArrayParams(dtype=np.float32, shape=(-1, -1)).mlir_tensor_type_asm >>> ArrayParams(dtype=np.float32, shape=(-1, -1)).mlir_tensor_type_asm
'tensor<?x?xf32>' 'tensor<?x?xf32>'
Scalar: Scalar:
>>> ArrayParams(dtype=np.float32, rank=0).mlir_tensor_type_asm >>> ArrayParams(dtype=np.float32, rank=0).mlir_tensor_type_asm
'tensor<f32>' 'tensor<f32>'
>>> ArrayParams(dtype=np.float32, shape=()).mlir_tensor_type_asm >>> ArrayParams(dtype=np.float32, shape=()).mlir_tensor_type_asm
'tensor<f32>' 'tensor<f32>'
Shaped: Shaped:
>>> ArrayParams(dtype=np.float32, shape=(2, 3)).mlir_tensor_type_asm >>> ArrayParams(dtype=np.float32, shape=(2, 3)).mlir_tensor_type_asm
'tensor<2x3xf32>' 'tensor<2x3xf32>'
@ -460,12 +460,12 @@ class ArrayParams:
def new_ndarray(self): def new_ndarray(self):
"""Creates a new ndarray from these params. """Creates a new ndarray from these params.
>>> ArrayParams().new_ndarray() >>> ArrayParams().new_ndarray()
Traceback (most recent call last): Traceback (most recent call last):
... ...
ValueError: ArrayParams(dtype=Unspec) is not concrete ValueError: ArrayParams(dtype=Unspec) is not concrete
>>> ArrayParams(np.float32, (1, 2)).new_ndarray() * 0.0 >>> (ArrayParams(np.float32, (1, 2)).new_ndarray() * 0.0 + 1.0) * 0.0
array([[0., 0.]], dtype=float32) array([[0., 0.]], dtype=float32)
""" """
if not self.is_concrete: if not self.is_concrete:
@ -480,7 +480,7 @@ class TypeConstraint:
class TypeConstraints(list): class TypeConstraints(list):
"""Collection of type constraints. """Collection of type constraints.
>>> TypeConstraints([DynamicDim()]) >>> TypeConstraints([DynamicDim()])
TypeConstraints(DimFlag(Dynamic, Unspec)) TypeConstraints(DimFlag(Dynamic, Unspec))
>>> TypeConstraints([DynamicDim(), Rank(4)]) >>> TypeConstraints([DynamicDim(), Rank(4)])
@ -554,9 +554,9 @@ class ArrayConstraint(TypeConstraint):
class DType(ArrayConstraint): class DType(ArrayConstraint):
"""A constraint on a dtype. """A constraint on a dtype.
DType constraints are exclusive with only one permitted in a set. DType constraints are exclusive with only one permitted in a set.
>>> DType(np.float32) >>> DType(np.float32)
DType(float32) DType(float32)
>>> DType("foobar") >>> DType("foobar")
@ -597,7 +597,7 @@ class Rank(ArrayConstraint):
Traceback (most recent call last): Traceback (most recent call last):
... ...
AssertionError AssertionError
""" """
__slots__ = ["_rank"] __slots__ = ["_rank"]
@ -619,9 +619,9 @@ class Rank(ArrayConstraint):
class Shape(ArrayConstraint): class Shape(ArrayConstraint):
"""Establishes a static shape for an array. """Establishes a static shape for an array.
All dimensions must be a non-negative integer or Unspec. All dimensions must be a non-negative integer or Unspec.
>>> Shape(1, 2, 3) >>> Shape(1, 2, 3)
Shape(1, 2, 3) Shape(1, 2, 3)
>>> Shape(Unspec, 1) >>> Shape(Unspec, 1)
@ -665,9 +665,9 @@ class DimFlagEnum(_LiterateEnum):
class DimFlag(ArrayConstraint): class DimFlag(ArrayConstraint):
"""Generic flag applying to one or more dimensions. """Generic flag applying to one or more dimensions.
If dims is Unspec, the flag applies to all dims. If dims is Unspec, the flag applies to all dims.
>>> DimFlag("Dynamic") >>> DimFlag("Dynamic")
DimFlag(Dynamic, Unspec) DimFlag(Dynamic, Unspec)
>>> DimFlag("Dynamic", 1) >>> DimFlag("Dynamic", 1)