Added support for embedding_dense_backward (#889)

pull/890/head
Henry Tu 2022-06-03 13:33:43 -04:00 committed by GitHub
parent a29903dfc8
commit bfe8ff4b42
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 28 additions and 0 deletions

View File

@ -7487,6 +7487,33 @@ def Torch_AtenNativeLayerNormBackwardOp : Torch_Op<"aten.native_layer_norm_backw
}];
}
def Torch_AtenEmbeddingDenseBackwardOp : Torch_Op<"aten.embedding_dense_backward", [
AllowsTypeRefinement,
HasValueSemantics,
ReadOnly
]> {
let summary = "Generated op for `aten::embedding_dense_backward : (Tensor, Tensor, int, int, bool) -> (Tensor)`";
let arguments = (ins
AnyTorchTensorType:$grad_output,
AnyTorchTensorType:$indices,
Torch_IntType:$num_weights,
Torch_IntType:$padding_idx,
Torch_BoolType:$scale_grad_by_freq
);
let results = (outs
AnyTorchTensorType:$result
);
let hasCustomAssemblyFormat = 1;
let extraClassDefinition = [{
ParseResult AtenEmbeddingDenseBackwardOp::parse(OpAsmParser &parser, OperationState &result) {
return parseDefaultTorchOp(parser, result, 5, 1);
}
void AtenEmbeddingDenseBackwardOp::print(OpAsmPrinter &printer) {
printDefaultTorchOp(printer, *this, 5, 1);
}
}];
}
def Torch_PrimLayoutOp : Torch_Op<"prim.layout", [
AllowsTypeRefinement,
HasValueSemantics,

View File

@ -537,6 +537,7 @@ def emit_ops(emitter_td: TextEmitter, registry: Registry):
emit("aten::gelu_backward : (Tensor, Tensor, str) -> (Tensor)")
emit("aten::_log_softmax_backward_data : (Tensor, Tensor, int, int) -> (Tensor)")
emit("aten::native_layer_norm_backward : (Tensor, Tensor, int[], Tensor, Tensor, Tensor?, Tensor?, bool[]) -> (Tensor, Tensor, Tensor)")
emit("aten::embedding_dense_backward : (Tensor, Tensor, int, int, bool) -> (Tensor)")
# ==========================================================================
# `prim::` namespace.