Unverified 提交 51c2b4c1 authored 作者: David M Bridgeland's avatar David M Bridgeland 提交者: GitHub

remove sigmoid optimization for extreme values (#32)

* Allow single dimension slices in mgrid and ogrid * Use requirements.txt and pin black version * Update black formatting * use sphinx section names * Generalize the type interface to tensor.zeros and tensor.ones * Add axis argument to squeeze * remove optimization from ScalarSigmoid.c_code() Remove the optimization from ScalarSigmoid.c_code(), as it was incorrect and did not optimize much. Add test case that passes without that optimization and failed with it. Co-authored-by: 's avatarBrandon T. Willard <brandonwillard@users.noreply.github.com> Co-authored-by: 's avatarOriol (ZBook) <oriol.abril.pla@gmail.com>
上级 23f5877b
......@@ -88,7 +88,8 @@ TestSoftplusBroadcast = makeBroadcastTester(
),
good=dict(
copymod(
_good_broadcast_unary_normal_no_complex, without=["uint8", "uint16"]
_good_broadcast_unary_normal_no_complex,
without=["uint8", "uint16", "big_scalar"],
), # numpy function overflows with uint16.
uint8=[
np.arange(0, 89, dtype="uint8")
......
......@@ -1290,6 +1290,7 @@ _good_broadcast_unary_normal_no_complex = dict(
uint16=[np.arange(0, 89, dtype="uint16")],
corner_case=[corner_case],
empty=[np.asarray([], dtype=config.floatX)],
big_scalar=[np.arange(17.0, 29.0, 0.5, dtype=floatX)],
)
_grad_broadcast_unary_normal_no_complex = dict(
......
......@@ -80,15 +80,9 @@ class ScalarSigmoid(scalar.UnaryScalarOp):
node.inputs[0].type == scalar.float32
or node.inputs[0].type == scalar.float16
):
return (
"""%(z)s = %(x)s < -88.0f ? 0.0 : %(x)s > 15.0f ? 1.0f : 1.0f /(1.0f + exp(-%(x)s));"""
% locals()
)
return """%(z)s = 1.0f / (1.0f + exp(-%(x)s));""" % locals()
elif node.inputs[0].type == scalar.float64:
return (
"""%(z)s = %(x)s < -709.0 ? 0.0 : %(x)s > 19.0 ? 1.0 : 1.0 /(1.0+exp(-%(x)s));"""
% locals()
)
return """%(z)s = 1.0 / (1.0 + exp(-%(x)s));""" % locals()
else:
raise NotImplementedError("only floatingpoint is implemented")
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论