提交 e427954b authored 作者: Trey Wenger's avatar Trey Wenger 提交者: Ricardo Vieira

prevent log10 L_op from upcasting

上级 61c15af3
......@@ -3030,7 +3030,7 @@ class Log10(UnaryScalarOp):
else:
return [x.zeros_like()]
return (gz / (x * np.log(10.0)),)
return (gz / (x * np.asarray(math.log(10.0)).astype(x.dtype)),)
def c_code(self, node, name, inputs, outputs, sub):
(x,) = inputs
......
......@@ -544,3 +544,14 @@ def test_shape():
assert b.shape.type.ndim == 1
assert b.shape.type.shape == (0,)
assert b.shape.type.dtype == "int64"
def test_grad_log10():
# Ensure that log10 does not upcast gradient
# This is a regression test for
# https://github.com/pymc-devs/pytensor/issues/667
a = float32("log10_a")
b = log10(a)
b_grad = pytensor.gradient.grad(b, a)
assert b.dtype == "float32"
assert b_grad.dtype == "float32"
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论