mirror of https://github.com/llvm/torch-mlir
Delete RestrictedCanonicalizer
It doesn't work properly with the new dialect registration framework. This was latent and only was exposed when running through npcomp-opt. Not worth investing the brainpower to fix now.pull/294/head
parent
d7320f3bda
commit
29e1b2fe89
|
@ -37,33 +37,4 @@ def LowerToLLVM : Pass<"refback-lower-to-llvm", "ModuleOp"> {
|
|||
let constructor = "mlir::NPCOMP::createLowerToLLVMPass();";
|
||||
}
|
||||
|
||||
// TODO: Move this pass to upstream.
|
||||
// TODO: This pass will still do "folding" on all ops.
|
||||
// The applyPatternsAndFoldGreedily driver will need to be changed to restrict
|
||||
// folding to the specified dialects as well.
|
||||
// Perhaps a better design is having a pass that uses the conversion framework.
|
||||
// The the pass constructor would take a set of op names, and it would
|
||||
// set up a conversion target that makes all those ops illegal, and uses
|
||||
// the canonicalization patterns from those ops to legalize them.
|
||||
def RestrictedCanonicalizer : Pass<"restricted-canonicalize"> {
|
||||
let summary = "Canonicalize operations";
|
||||
let description = [{
|
||||
This pass is the same as the regular `canonicalize` pass, but it only
|
||||
applies a restricted set of patterns.
|
||||
|
||||
This is useful when a particular canonicalization is actually needed for
|
||||
correctness of a lowering flow. For such cases, running a restricted set of
|
||||
canonicalizations makes it clearer which passes are needed for correctness
|
||||
and which passes are "just optimizations". This helps when debugging
|
||||
miscompiles and other situations where the compiler is not behaving as
|
||||
expected.
|
||||
}];
|
||||
let constructor = "mlir::NPCOMP::createRestrictedCanonicalizerPass()";
|
||||
let options = [
|
||||
ListOption<"includedDialects", "included-dialects", "std::string",
|
||||
"Which dialects should be canonicalized",
|
||||
"llvm::cl::MiscFlags::CommaSeparated">
|
||||
];
|
||||
}
|
||||
|
||||
#endif // NPCOMP_REFBACKEND_PASSES
|
||||
|
|
|
@ -116,57 +116,6 @@ mlir::NPCOMP::createLowerAllocMemRefOpsPass() {
|
|||
return std::make_unique<LowerAllocMemRefOps>();
|
||||
}
|
||||
|
||||
//===----------------------------------------------------------------------===//
|
||||
// RestrictedCanonicalizer
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
namespace {
|
||||
struct RestrictedCanonicalizer
|
||||
: public RestrictedCanonicalizerBase<RestrictedCanonicalizer> {
|
||||
void runOnOperation() override {
|
||||
auto *context = &getContext();
|
||||
|
||||
// Find the dialects from their names.
|
||||
DenseSet<StringRef> neededDialects;
|
||||
for (const std::string &dialectName : includedDialects)
|
||||
neededDialects.insert(dialectName);
|
||||
DenseSet<Dialect *> dialectsToCanonicalize;
|
||||
for (Dialect *dialect : context->getLoadedDialects()) {
|
||||
if (neededDialects.count(dialect->getNamespace())) {
|
||||
dialectsToCanonicalize.insert(dialect);
|
||||
// Erase the dialect so that we can report an error below for any
|
||||
// dialect names that are not loaded.
|
||||
neededDialects.erase(dialect->getNamespace());
|
||||
}
|
||||
}
|
||||
|
||||
// Report a helpful error if a dialect is not found.
|
||||
auto missingDialects = llvm::to_vector<6>(neededDialects);
|
||||
if (!missingDialects.empty()) {
|
||||
llvm::sort(missingDialects);
|
||||
std::string buf;
|
||||
llvm::raw_string_ostream os(buf);
|
||||
llvm::interleaveComma(missingDialects, os);
|
||||
llvm::report_fatal_error("restricted-canonicalize: unknown dialects: " +
|
||||
os.str());
|
||||
}
|
||||
|
||||
// Collect all canonicalization patterns from ops in the included dialects.
|
||||
RewritePatternSet patterns(context);
|
||||
for (AbstractOperation *op : context->getRegisteredOperations())
|
||||
if (dialectsToCanonicalize.count(&op->dialect))
|
||||
op->getCanonicalizationPatterns(patterns, context);
|
||||
|
||||
Operation *op = getOperation();
|
||||
(void)applyPatternsAndFoldGreedily(op->getRegions(), std::move(patterns));
|
||||
}
|
||||
};
|
||||
} // end anonymous namespace
|
||||
|
||||
std::unique_ptr<Pass> mlir::NPCOMP::createRestrictedCanonicalizerPass() {
|
||||
return std::make_unique<RestrictedCanonicalizer>();
|
||||
}
|
||||
|
||||
//===----------------------------------------------------------------------===//
|
||||
// createRefBackendLoweringPipeline
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
@ -194,15 +143,8 @@ void mlir::NPCOMP::createRefBackendLoweringPipeline(
|
|||
pm.addNestedPass<FuncOp>(createConvertShapeConstraintsPass());
|
||||
// Run shape canonicalizations. In particular, this erases shape.assuming,
|
||||
// now that we have converted shape constraints.
|
||||
// TODO: This is kind of ugly. Either we use pass options or a constructor
|
||||
// that takes C++ data structures. The former makes the pass usable on the
|
||||
// command line (including reproducers), the latter makes the pass more
|
||||
// convenient.
|
||||
std::unique_ptr<Pass> shapeCanonicalizer =
|
||||
createRestrictedCanonicalizerPass();
|
||||
if (failed(shapeCanonicalizer->initializeOptions("included-dialects=shape")))
|
||||
llvm::report_fatal_error("couldn't initialize restricted-canonicalize");
|
||||
pm.addPass(std::move(shapeCanonicalizer));
|
||||
// TODO: Don't canonicalize everything.
|
||||
pm.addNestedPass<FuncOp>(createCanonicalizerPass());
|
||||
|
||||
// Lower shape ops to std.
|
||||
pm.addPass(createConvertShapeToStandardPass());
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
// RUN: npcomp-opt -restricted-canonicalize=included-dialects=std <%s -split-input-file \
|
||||
// RUN: | FileCheck %s --check-prefix=STDONLY --dump-input=fail
|
||||
// RUN: npcomp-opt -restricted-canonicalize=included-dialects=shape <%s -split-input-file \
|
||||
// RUN: | FileCheck %s --check-prefix=SHAPEONLY --dump-input=fail
|
||||
// RUN: npcomp-opt -restricted-canonicalize=included-dialects=std,shape <%s -split-input-file \
|
||||
// RUN: | FileCheck %s --check-prefix=STDANDSHAPE --dump-input=fail
|
||||
// RUN: not --crash npcomp-opt -restricted-canonicalize=included-dialects=notreal2,notreal1 <%s -split-input-file 2>&1 \
|
||||
// RUN: | FileCheck %s --check-prefix=ERROR --dump-input=fail
|
||||
|
||||
// ERROR: restricted-canonicalize: unknown dialects: notreal1, notreal2
|
||||
|
||||
// STDONLY-LABEL: func @mixed_dialects
|
||||
// SHAPEONLY-LABEL: func @mixed_dialects
|
||||
// STDANDSHAPE-LABEL: func @mixed_dialects
|
||||
func @mixed_dialects(%arg0: i32) -> i32 {
|
||||
|
||||
// Do we canonicalize away the shape.assuming?
|
||||
// STDONLY: shape.assuming
|
||||
// SHAPEOONLY-NOT: shape.assuming
|
||||
// STDANDSHAPE-NOT: shape.assuming
|
||||
%w = shape.const_witness true
|
||||
%0 = shape.assuming %w -> (i32) {
|
||||
%c0 = constant 0 : i32
|
||||
shape.assuming_yield %c0 : i32
|
||||
}
|
||||
|
||||
// Do we canonicalize away the std.br?
|
||||
// STDONLY-NOT: br
|
||||
// SHAPEOONLY: br
|
||||
// STDANDSHAPE-NOT: br
|
||||
br ^bb1
|
||||
^bb1:
|
||||
return %0 : i32
|
||||
}
|
Loading…
Reference in New Issue