torch-mlir/include/npcomp/Dialect/Torch/IR/GeneratedPrimOps.td

212 lines
5.8 KiB
TableGen

//===-------------------------------------------------------*- tablegen -*-===//
//
// This file is licensed under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
// Operation summaries and descriptions were systematically derived from public
// API docstrings and are licensed accordingly:
// https://github.com/pytorch/pytorch/blob/master/LICENSE
//===----------------------------------------------------------------------===//
//
// This file is automatically generated. Please do not edit.
// Generated via:
// python -m torch_mlir_utils.codegen.torch_ods_gen
//
//===----------------------------------------------------------------------===//
def Torch_PrimLayoutOp : Torch_Op<"prim.layout", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::layout : (Tensor) -> (int)`";
let arguments = (ins
AnyTorchTensorType:$a
);
let results = (outs
Torch_IntType:$result
);
let assemblyFormat = "$a attr-dict `:` type($a) `->` type($result)";
}
def Torch_PrimTupleIndexOp : Torch_Op<"prim.TupleIndex", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::TupleIndex : (Any, int) -> (Any)`";
let arguments = (ins
AnyTorchType:$tup,
Torch_IntType:$i
);
let results = (outs
AnyTorchType:$result
);
let assemblyFormat = "$tup `,` $i attr-dict `:` type($tup) `,` type($i) `->` type($result)";
}
def Torch_PrimDeviceOp : Torch_Op<"prim.device", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::device : (Tensor) -> (Device)`";
let arguments = (ins
AnyTorchTensorType:$a
);
let results = (outs
Torch_DeviceType:$result
);
let assemblyFormat = "$a attr-dict `:` type($a) `->` type($result)";
}
def Torch_PrimDtypeOp : Torch_Op<"prim.dtype", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::dtype : (Tensor) -> (int)`";
let arguments = (ins
AnyTorchTensorType:$a
);
let results = (outs
Torch_IntType:$result
);
let assemblyFormat = "$a attr-dict `:` type($a) `->` type($result)";
}
def Torch_PrimTupleUnpackOp : Torch_Op<"prim.TupleUnpack", [
AllowsTypeRefinement
]> {
let summary = "Generated op for `prim::TupleUnpack : (Any) -> (...)`";
let arguments = (ins
AnyTorchType:$tup
);
let results = (outs
Variadic<AnyTorchType>:$results
);
let assemblyFormat = "$tup attr-dict `:` type($tup) `->` type($results)";
}
def Torch_PrimNumToTensorScalarOp : Torch_Op<"prim.NumToTensor.Scalar", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::NumToTensor.Scalar : (Scalar) -> (Tensor)`";
let arguments = (ins
AnyTorchScalarType:$a
);
let results = (outs
AnyTorchTensorType:$result
);
let assemblyFormat = "$a attr-dict `:` type($a) `->` type($result)";
}
def Torch_PrimMinSelfIntOp : Torch_Op<"prim.min.self_int", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::min.self_int : (int[]) -> (int)`";
let arguments = (ins
AnyTorchIntListType:$self
);
let results = (outs
Torch_IntType:$result
);
let assemblyFormat = "$self attr-dict `:` type($self) `->` type($result)";
}
def Torch_PrimMinIntOp : Torch_Op<"prim.min.int", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::min.int : (int, int) -> (int)`";
let arguments = (ins
Torch_IntType:$a,
Torch_IntType:$b
);
let results = (outs
Torch_IntType:$result
);
let assemblyFormat = "$a `,` $b attr-dict `:` type($a) `,` type($b) `->` type($result)";
}
def Torch_PrimMaxSelfIntOp : Torch_Op<"prim.max.self_int", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::max.self_int : (int[]) -> (int)`";
let arguments = (ins
AnyTorchIntListType:$self
);
let results = (outs
Torch_IntType:$result
);
let assemblyFormat = "$self attr-dict `:` type($self) `->` type($result)";
}
def Torch_PrimMaxIntOp : Torch_Op<"prim.max.int", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::max.int : (int, int) -> (int)`";
let arguments = (ins
Torch_IntType:$a,
Torch_IntType:$b
);
let results = (outs
Torch_IntType:$result
);
let assemblyFormat = "$a `,` $b attr-dict `:` type($a) `,` type($b) `->` type($result)";
}
def Torch_PrimRaiseExceptionOp : Torch_Op<"prim.RaiseException", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::RaiseException : (str) -> ()`";
let arguments = (ins
Torch_StringType:$msg
);
let results = (outs
);
let assemblyFormat = "$msg attr-dict `:` type($msg)";
}
def Torch_PrimUninitializedOp : Torch_Op<"prim.Uninitialized", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::Uninitialized : () -> (Any)`";
let arguments = (ins
);
let results = (outs
AnyTorchType:$result
);
let assemblyFormat = " attr-dict `:` type($result)";
}
def Torch_PrimUncheckedCastOp : Torch_Op<"prim.unchecked_cast", [
AllowsTypeRefinement,
HasValueSemantics
]> {
let summary = "Generated op for `prim::unchecked_cast : (t) -> (t)`";
let arguments = (ins
AnyTorchType:$x
);
let results = (outs
AnyTorchType:$result
);
let assemblyFormat = "$x attr-dict `:` type($x) `->` type($result)";
}
def Torch_PrimPrintOp : Torch_Op<"prim.Print", [
AllowsTypeRefinement
]> {
let summary = "Generated op for `prim::Print : (...) -> ()`";
let arguments = (ins
Variadic<AnyTorchType>:$operands
);
let results = (outs
);
let assemblyFormat = "`(` $operands `)` attr-dict `:` type($operands)";
}