mirror of https://github.com/llvm/torch-mlir
Bump llvm-project to e31c77b1827fa4dd3511f21af11cfab18ecf6d38
Signed-off-by: Bairen Yi <yibairen.byron@bytedance.com>pull/185/head
parent
06373dcbbb
commit
53b01cb9ba
|
@ -1 +1 @@
|
|||
Subproject commit c68d2895a1f4019b387c69d1e5eec31b0eb5e7b0
|
||||
Subproject commit e31c77b1827fa4dd3511f21af11cfab18ecf6d38
|
|
@ -21,9 +21,7 @@ namespace py = pybind11;
|
|||
using namespace torch_mlir;
|
||||
|
||||
static py::object getMlirIrClass(const char *className) {
|
||||
// Note that the "mlir" module may be a loader which internally sets up
|
||||
// the child modules, so it must be resolved incrementally (vs "mlir.ir").
|
||||
return py::module::import("mlir").attr("ir").attr(className);
|
||||
return py::module::import("mlir.ir").attr(className);
|
||||
}
|
||||
|
||||
static py::object createPythonContextIfNone(py::object contextObj) {
|
||||
|
|
|
@ -221,8 +221,8 @@ MlirAttribute torch_mlir::converTensorToMlirElementsAttr(at::Tensor tensor,
|
|||
elementType = typeMapper.mapFromTorchScalarType(tensor.scalar_type());
|
||||
}
|
||||
std::vector<int64_t> shape(tensor.sizes().begin(), tensor.sizes().end());
|
||||
MlirType shapedType = mlirRankedTensorTypeGetChecked(
|
||||
shape.size(), shape.data(), elementType, loc);
|
||||
MlirType shapedType = mlirRankedTensorTypeGetChecked(loc, shape.size(),
|
||||
shape.data(), elementType);
|
||||
if (mlirTypeIsNull(shapedType)) {
|
||||
throwUnsupportedTensorError();
|
||||
}
|
||||
|
|
|
@ -356,7 +356,7 @@ def Basicpy_ExecOp : Basicpy_Op<"exec", [
|
|||
|
||||
let skipDefaultBuilders = 1;
|
||||
let builders = [
|
||||
OpBuilderDAG<(ins)>,
|
||||
OpBuilder<(ins)>,
|
||||
];
|
||||
let extraClassDeclaration = [{
|
||||
OpBuilder getBodyBuilder() {
|
||||
|
@ -404,7 +404,7 @@ def Basicpy_FuncTemplateCallOp : Basicpy_Op<"func_template_call", []> {
|
|||
let assemblyFormat = "$callee `(` $args `)` `kw` $arg_names attr-dict `:` functional-type($args, results)";
|
||||
let skipDefaultBuilders = 1;
|
||||
let builders = [
|
||||
OpBuilderDAG<(ins)>,
|
||||
OpBuilder<(ins)>,
|
||||
];
|
||||
}
|
||||
|
||||
|
@ -475,7 +475,7 @@ def Basicpy_FuncTemplateOp : Basicpy_Op<"func_template", [
|
|||
|
||||
let skipDefaultBuilders = 1;
|
||||
let builders = [
|
||||
OpBuilderDAG<(ins)>,
|
||||
OpBuilder<(ins)>,
|
||||
];
|
||||
let extraClassDeclaration = [{
|
||||
OpBuilder getBodyBuilder() {
|
||||
|
|
|
@ -291,7 +291,7 @@ def Torch_GlobalSlotInitOp : Torch_Op<"global_slot.init", [
|
|||
// ensureTerminator in the default builders for SingleBlockImplicitTerminator
|
||||
// on the parent torch.global_slot op.
|
||||
// TODO: Have a SingleBlockExplicitTerminator trait.
|
||||
let builders = [OpBuilderDAG<(ins), [{ /*nothing to do */ }]>];
|
||||
let builders = [OpBuilder<(ins), [{ /*nothing to do */ }]>];
|
||||
|
||||
let assemblyFormat = "$initialValue attr-dict `:` type($initialValue)";
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ add_library(NPCOMPBackendRefJITPythonModule
|
|||
target_link_libraries(NPCOMPBackendRefJITPythonModule
|
||||
pybind11::module
|
||||
MLIRExecutionEngine
|
||||
MLIRTargetLLVMIR
|
||||
MLIRTargetLLVMIRExport
|
||||
|
||||
NPCOMPRefBackendJITHelpers
|
||||
)
|
||||
|
|
|
@ -12,7 +12,7 @@ add_npcomp_library(NPCOMPCAPI
|
|||
LINK_LIBS PUBLIC
|
||||
MLIRExecutionEngine
|
||||
MLIRLLVMIR
|
||||
MLIRTargetLLVMIR
|
||||
MLIRTargetLLVMIRExport
|
||||
NPCOMPInitAll
|
||||
NPCOMPBasicpyDialect
|
||||
NPCOMPNumpyDialect
|
||||
|
|
|
@ -76,7 +76,7 @@ static MemRefType getShapeErasedMemRefType(MemRefType type) {
|
|||
shape[i] = -1;
|
||||
}
|
||||
return MemRefType::get(shape, type.getElementType(), type.getAffineMaps(),
|
||||
type.getMemorySpace());
|
||||
type.getMemorySpaceAsInt());
|
||||
}
|
||||
|
||||
/// Create a type cast to memref
|
||||
|
|
|
@ -192,7 +192,7 @@ ObjectGraphGlobalizer::recursivelyTraverseClassType(ClassTypeOp classType) {
|
|||
auto linkageName = llvm::join(nameStack, ".");
|
||||
auto globalSlot = globalBuilder.create<GlobalSlotOp>(
|
||||
attr->getLoc(), linkageName, /*sym_visibility=*/nullptr,
|
||||
TypeAttr::get(attr.type()));
|
||||
attr.type());
|
||||
if (attr.isPrivate())
|
||||
globalSlot.setVisibility(SymbolTable::Visibility::Private);
|
||||
AttrOfClass attrOfClass = {classType, attr.name()};
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
#include "npcomp/RefBackend/JITHelpers/JITModule.h"
|
||||
#include "mlir/ExecutionEngine/CRunnerUtils.h"
|
||||
#include "mlir/ExecutionEngine/OptUtils.h"
|
||||
#include "mlir/Target/LLVMIR.h"
|
||||
#include "mlir/Target/LLVMIR/Dialect/LLVMIR/LLVMToLLVMIRTranslation.h"
|
||||
#include "npcomp/RefBackend/RefBackend.h"
|
||||
|
||||
using namespace refback;
|
||||
|
|
|
@ -11,8 +11,8 @@ def _load_extension():
|
|||
import _npcomp
|
||||
sys.setdlopenflags(flags)
|
||||
|
||||
import mlir
|
||||
mlir._cext.globals.append_dialect_search_prefix("npcomp.dialects")
|
||||
from mlir._cext_loader import _cext
|
||||
_cext.globals.append_dialect_search_prefix("npcomp.dialects")
|
||||
return _npcomp
|
||||
|
||||
|
||||
|
|
|
@ -4,4 +4,4 @@
|
|||
|
||||
# Generated tablegen dialects expect to be able to find some symbols from
|
||||
# the mlir.dialects package.
|
||||
from mlir.dialects import _cext, segmented_accessor, equally_sized_accessor, extend_opview_class, get_default_loc_context
|
||||
from mlir.dialects._ods_common import _cext, segmented_accessor, equally_sized_accessor, extend_opview_class, get_default_loc_context
|
Loading…
Reference in New Issue