2020-05-07 09:41:54 +08:00
|
|
|
//===-------------------------------------------------------*- tablegen -*-===//
|
|
|
|
//
|
|
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
|
|
|
#ifndef TCP_OPS
|
|
|
|
#define TCP_OPS
|
|
|
|
|
|
|
|
include "npcomp/Dialect/TCP/IR/TCPBase.td"
|
|
|
|
include "mlir/Dialect/Shape/IR/ShapeBase.td"
|
2020-05-22 04:09:06 +08:00
|
|
|
include "mlir/Interfaces/SideEffectInterfaces.td"
|
2020-05-07 09:41:54 +08:00
|
|
|
include "mlir/Interfaces/InferTypeOpInterface.td"
|
2020-07-11 08:31:24 +08:00
|
|
|
include "mlir/IR/SymbolInterfaces.td"
|
2020-05-07 09:41:54 +08:00
|
|
|
|
|
|
|
class TCP_Op<string mnemonic, list<OpTrait> traits = []>
|
|
|
|
: Op<TCP_Dialect, mnemonic, traits> {
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: clarify allowed tensor element types.
|
|
|
|
// TODO: HasParent is too restrictive? can't have an island with loop.for with
|
|
|
|
// further ops inside it?
|
|
|
|
def TCP_AddOp
|
|
|
|
: TCP_Op<"add", []> {
|
|
|
|
let summary = "Adds two tensors.";
|
|
|
|
let description = [{
|
|
|
|
Adds two tensors.
|
|
|
|
}];
|
|
|
|
let arguments = (ins AnyRankedTensor:$lhs, AnyRankedTensor:$rhs);
|
|
|
|
let results = (outs AnyRankedTensor:$result);
|
|
|
|
}
|
|
|
|
|
|
|
|
def TCP_BroadcastToOp : TCP_Op<"broadcast_to"> {
|
|
|
|
let summary = "Broadcasts an operand to a given shape.";
|
|
|
|
let description = [{
|
|
|
|
Broadcasts `operand` to the shape `shape`.
|
|
|
|
|
|
|
|
It is undefined behavior if such a broadcast is not legal.
|
|
|
|
}];
|
2020-08-03 13:06:12 +08:00
|
|
|
let arguments = (ins AnyRankedTensor:$operand, Shape_ExtentTensorType:$shape);
|
2020-05-07 09:41:54 +08:00
|
|
|
let results = (outs AnyRankedTensor:$result);
|
|
|
|
}
|
|
|
|
|
2020-05-12 12:22:40 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Ops that need to be factored to a proper home.
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// TODO: Find a home for these.
|
|
|
|
|
2020-05-07 09:41:54 +08:00
|
|
|
// TODO: This probably doesn't belong in the tcp dialect.
|
|
|
|
def TCP_AllocMemRefOp : TCP_Op<"alloc_memref", []> {
|
|
|
|
let summary = "Allocates a memref of the given shape.";
|
|
|
|
let description = [{
|
|
|
|
Allocates a memref of the given shape.
|
|
|
|
}];
|
2020-08-03 13:06:12 +08:00
|
|
|
let arguments = (ins Shape_ExtentTensorType:$shape);
|
2020-05-07 09:41:54 +08:00
|
|
|
let results = (outs AnyMemRef:$memref);
|
2020-05-12 06:20:49 +08:00
|
|
|
let assemblyFormat = "$shape attr-dict `:` type($memref)";
|
2020-05-07 09:41:54 +08:00
|
|
|
}
|
|
|
|
|
2020-07-11 08:31:24 +08:00
|
|
|
def TCP_GlobalOp : TCP_Op<"global", [Symbol]> {
|
|
|
|
let summary = "Represents a global variable";
|
|
|
|
let description = [{
|
|
|
|
Represents a global variable.
|
|
|
|
|
|
|
|
Currently, only constant tensors are supported, and they are not
|
|
|
|
considered to be exported.
|
|
|
|
}];
|
|
|
|
let arguments = (ins StrAttr:$sym_name, ElementsAttr:$value);
|
|
|
|
let results = (outs);
|
|
|
|
|
|
|
|
let printer = [{ return ::print$cppClass(p, *this); }];
|
|
|
|
let parser = [{ return ::parse$cppClass(parser, result); }];
|
|
|
|
}
|
|
|
|
|
|
|
|
def TCP_GetGlobalMemrefOp : TCP_Op<"get_global_memref"> {
|
|
|
|
let summary = "Obtain a memref pointing at the given global";
|
|
|
|
let description = [{
|
|
|
|
Obtain a memref pointing at the given global.
|
|
|
|
}];
|
|
|
|
let arguments = (ins FlatSymbolRefAttr:$global);
|
|
|
|
let results = (outs AnyMemRef:$memref);
|
|
|
|
let assemblyFormat = "$global attr-dict `:` type($memref)";
|
|
|
|
let verifier = "return ::verify$cppClass(*this);";
|
|
|
|
}
|
|
|
|
|
2020-05-15 06:19:37 +08:00
|
|
|
// TODO: Change to a more principled error handling mechanism.
|
2020-05-07 09:41:54 +08:00
|
|
|
// This op probably doesn't need to exist eventually.
|
2020-05-15 06:19:37 +08:00
|
|
|
// This op is also not correctly modeled right now, since it itself doesn't
|
|
|
|
// produce the error in practice. The ops like shape.broadcast itself, when
|
|
|
|
// lowered, immediately produce errors.
|
2020-05-19 04:35:25 +08:00
|
|
|
// TODO: This should eventually be moved to a shape dialect.
|
2020-08-02 06:23:57 +08:00
|
|
|
def TCP_ShapeObserveErrorOp : TCP_Op<"shape_observe_error", []> {
|
2020-05-19 04:35:25 +08:00
|
|
|
let summary = "Observes the fact that a shape might be an error.";
|
2020-05-07 09:41:54 +08:00
|
|
|
let description = [{
|
2020-05-19 04:35:25 +08:00
|
|
|
This op is a structural placeholder that captures a shape such that it
|
|
|
|
is not erased. This will keep around shape computations that are later
|
|
|
|
lowered into eager error handling code.
|
|
|
|
|
|
|
|
The interaction of this op, especially with control flow and side
|
|
|
|
effecting ops, is not very well-defined, and needs to be worked
|
|
|
|
on/redesigned.
|
2020-05-07 09:41:54 +08:00
|
|
|
}];
|
2020-08-03 13:06:12 +08:00
|
|
|
let arguments = (ins Shape_ShapeOrExtentTensorType:$shape);
|
2020-05-15 06:19:37 +08:00
|
|
|
// TODO: ODS seems to create redeclared class members if we remove this,
|
|
|
|
// resulting in C++ compilation errors.
|
|
|
|
let results = (outs NoneType:$dummy);
|
2020-05-07 09:41:54 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif // TCP_OPS
|