// RUN: npcomp-opt -bypass-shapes <%s | FileCheck %s --dump-input=fail // CHECK-LABEL: func @tcp_broadcast_to func @tcp_broadcast_to(%arg0: tensor, %arg1: tensor) { // CHECK: %0 = refback.shaped_results %arg1 %0 = tcp.broadcast_to %arg0, %arg1 : (tensor, tensor) -> tensor return } // CHECK-LABEL: func @tcp_add( // CHECK-SAME: %[[LHS:.*]]: tensor, // CHECK-SAME: %[[RHS:.*]]: tensor) -> tensor { // CHECK: %[[LHSSHAPE:.*]] = shape.shape_of %[[LHS]] // CHECK: %[[RET:.*]] = refback.shaped_results %[[LHSSHAPE]] // CHECK: return %[[RET:.*]] : tensor // CHECK: } func @tcp_add(%arg0: tensor, %arg1: tensor) -> tensor { %0 = tcp.add %arg0, %arg1 : (tensor, tensor) -> tensor return %0 : tensor } // TODO: Don't create too many duplicate tests for binary elementwise ops. // CHECK-LABEL: func @tcp_max( // CHECK-SAME: %[[LHS:.*]]: tensor, // CHECK-SAME: %[[RHS:.*]]: tensor) -> tensor { // CHECK: %[[LHSSHAPE:.*]] = shape.shape_of %[[LHS]] // CHECK: %[[RET:.*]] = refback.shaped_results %[[LHSSHAPE]] // CHECK: return %[[RET:.*]] : tensor // CHECK: } func @tcp_max(%arg0: tensor, %arg1: tensor) -> tensor { %0 = tcp.max %arg0, %arg1 : (tensor, tensor) -> tensor return %0 : tensor } // CHECK-LABEL: func @tcp_matmul( // CHECK-SAME: %[[LHS:.*]]: tensor, // CHECK-SAME: %[[RHS:.*]]: tensor) -> tensor { // CHECK: %[[C0:.*]] = constant 0 : index // CHECK: %[[LHSCOLS:.*]] = dim %[[LHS]], %[[C0]] // CHECK: %[[C1:.*]] = constant 1 : index // CHECK: %[[RHSROWS:.*]] = dim %[[RHS]], %[[C1]] // CHECK: %[[RESULTSHAPE:.*]] = tensor_from_elements %[[LHSCOLS]], %[[RHSROWS]] // CHECK: %[[RET:.*]] = refback.shaped_results %[[RESULTSHAPE]] { // CHECK: return %[[RET:.*]] : tensor // CHECK: } func @tcp_matmul(%arg0: tensor, %arg1: tensor) -> tensor { %0 = tcp.matmul %arg0, %arg1 : (tensor, tensor) -> tensor return %0 : tensor }