mirror of https://github.com/llvm/torch-mlir
Remove last mentions of IREE.
parent
9fc059e948
commit
5917f1dc47
|
@ -93,7 +93,7 @@ def ConvertTorchToLinalg : Pass<"convert-torch-to-linalg", "FuncOp"> {
|
||||||
4. All this code operates on ranked tensors, for which using individual
|
4. All this code operates on ranked tensors, for which using individual
|
||||||
SSA values for sizes (rather than a "shape type") seems to
|
SSA values for sizes (rather than a "shape type") seems to
|
||||||
work really well at this level of abstraction based on prior experience
|
work really well at this level of abstraction based on prior experience
|
||||||
in IREE. (unranked code tends to benefit from having a discrete
|
in other projects. (unranked code tends to benefit from having a discrete
|
||||||
"shape type" to model shapes).
|
"shape type" to model shapes).
|
||||||
|
|
||||||
We will see if we end up needing something like `shape.assuming`, but for
|
We will see if we end up needing something like `shape.assuming`, but for
|
||||||
|
|
|
@ -88,7 +88,7 @@ LOWERING_PIPELINE = ",".join([
|
||||||
|
|
||||||
|
|
||||||
class RefBackendLinalgOnTensorsBackend(LinalgOnTensorsBackend):
|
class RefBackendLinalgOnTensorsBackend(LinalgOnTensorsBackend):
|
||||||
"""Main entry-point for the backend."""
|
"""Main entry-point for the reference backend."""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
|
@ -102,10 +102,8 @@ class RefBackendLinalgOnTensorsBackend(LinalgOnTensorsBackend):
|
||||||
imported_module: The MLIR module consisting of funcs in the torch
|
imported_module: The MLIR module consisting of funcs in the torch
|
||||||
dialect.
|
dialect.
|
||||||
Returns:
|
Returns:
|
||||||
An opaque, backend specific module object that can be passed to load.
|
An opaque, backend specific compiled artifact object that can be
|
||||||
The object may actually be something more specific to the backend (i.e.
|
passed to `load`.
|
||||||
for IREE, it is a serialized VM flatbuffer) but the contract is that
|
|
||||||
it is operated on by methods on this class.
|
|
||||||
"""
|
"""
|
||||||
with imported_module.context:
|
with imported_module.context:
|
||||||
pm = PassManager.parse(LOWERING_PIPELINE)
|
pm = PassManager.parse(LOWERING_PIPELINE)
|
||||||
|
|
Loading…
Reference in New Issue