提交 bb6b88b8 authored 作者: Frederic Bastien's avatar Frederic Bastien

Revert a change and comment why it was done like this.

上级 247769b6
......@@ -736,8 +736,9 @@ class LogSoftmax(gof.Op):
logsoftmax_op = LogSoftmax()
@opt.register_stabilize('fast_compile')
@opt.register_specialize('fast_compile')
# This is not registered in stabilize, as it cause some crossentropy
# optimization to not be inserted.
@opt.register_specialize('stabilize', 'fast_compile')
@gof.local_optimizer([tensor.Elemwise])
def local_logsoftmax(node):
"""
......@@ -757,8 +758,9 @@ def local_logsoftmax(node):
return [ret]
@opt.register_stabilize('fast_compile')
@opt.register_specialize('fast_compile')
# This is not registered in stabilize, as it cause some crossentropy
# optimization to not be inserted.
@opt.register_specialize('stabilize', 'fast_compile')
@gof.local_optimizer([SoftmaxGrad])
def local_logsoftmax_grad(node):
"""
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论