Fixing implicit double->float truncation warnings. (#2733)

Floating-point literals should use the correct type specifier.
pull/2736/head snapshot-20240109.1078
Ben Vanik 2024-01-08 14:26:38 -08:00 committed by GitHub
parent 985e7796a4
commit 4dd17f0b71
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 7 additions and 7 deletions

View File

@ -184,8 +184,8 @@ void mlir::torch::onnx_c::populateDefaultDomainAtoF(
binder.tensorOperandAtIndex(bias, 2) ||
binder.tensorOperandAtIndex(runningMean, 3) ||
binder.tensorOperandAtIndex(runningVar, 4) ||
binder.f32FloatAttr(momentum, "momentum", 0.9) ||
binder.f32FloatAttr(eps, "epsilon", 1e-05) ||
binder.f32FloatAttr(momentum, "momentum", 0.9f) ||
binder.f32FloatAttr(eps, "epsilon", 1e-05f) ||
binder.tensorResultType(resultType))
return failure();

View File

@ -34,8 +34,8 @@ void mlir::torch::onnx_c::populateDefaultDomainGtoP(
Value tensorOperand;
float alpha, beta;
if (binder.tensorOperand(tensorOperand) ||
binder.f32FloatAttr(alpha, "alpha", 0.2) ||
binder.f32FloatAttr(beta, "beta", 0.5) ||
binder.f32FloatAttr(alpha, "alpha", 0.2f) ||
binder.f32FloatAttr(beta, "beta", 0.5f) ||
binder.tensorResultType(resultType))
return failure();
@ -276,8 +276,8 @@ void mlir::torch::onnx_c::populateDefaultDomainGtoP(
binder.tensorOperandAtIndex(c, 2) ||
binder.s64IntegerAttr(transA, "transA", 0) ||
binder.s64IntegerAttr(transB, "transB", 0) ||
binder.f32FloatAttr(alpha, "alpha", 1.0) ||
binder.f32FloatAttr(beta, "beta", 1.0) ||
binder.f32FloatAttr(alpha, "alpha", 1.0f) ||
binder.f32FloatAttr(beta, "beta", 1.0f) ||
binder.tensorResultType(resultType))
return failure();
@ -417,7 +417,7 @@ void mlir::torch::onnx_c::populateDefaultDomainGtoP(
float alpha;
if (binder.tensorOperand(operand) ||
binder.tensorResultType(resultType) ||
binder.f32FloatAttr(alpha, "alpha", 0.01))
binder.f32FloatAttr(alpha, "alpha", 0.01f))
return failure();
Value constAlpha = rewriter.create<Torch::ConstantFloatOp>(
binder.getLoc(), rewriter.getType<Torch::FloatType>(),