提交 0041ce67 authored 作者: Arnaud Bergeron's avatar Arnaud Bergeron

Remove the try .. except construct since it masked some errors in the opt.

上级 994c8d35
...@@ -744,17 +744,14 @@ def local_logsoftmax(node): ...@@ -744,17 +744,14 @@ def local_logsoftmax(node):
Note: only forward pass is affected Note: only forward pass is affected
""" """
try:
if (isinstance(node.op, tensor.Elemwise) and if (isinstance(node.op, tensor.Elemwise) and
isinstance(node.op.scalar_op, scalar.basic.Log) and isinstance(node.op.scalar_op, scalar.basic.Log) and
len(node.inputs) == 1 and len(node.inputs) == 1 and
node.inputs[0].owner and node.inputs[0].owner is not None and
isinstance(node.inputs[0].owner.op, Softmax)): isinstance(node.inputs[0].owner.op, Softmax)):
inVars = node.inputs[0].owner.inputs[0] inVars = node.inputs[0].owner.inputs[0]
new_op = LogSoftmax() new_op = LogSoftmax()
return [new_op(inVars)] return [new_op(inVars)]
except AttributeError:
pass
@opt.register_specialize('stabilize', 'fast_compile') @opt.register_specialize('stabilize', 'fast_compile')
...@@ -765,16 +762,19 @@ def local_logsoftmax_grad(node): ...@@ -765,16 +762,19 @@ def local_logsoftmax_grad(node):
Note: only grad is affected Note: only grad is affected
""" """
try:
if (isinstance(node.op, SoftmaxGrad) and if (isinstance(node.op, SoftmaxGrad) and
len(node.inputs) == 2 and len(node.inputs) == 2 and
node.inputs[0].owner is not None and
isinstance(node.inputs[0].owner.op, tensor.Elemwise) and isinstance(node.inputs[0].owner.op, tensor.Elemwise) and
len(node.inputs[0].owner.inputs) >= 2 and
node.inputs[0].owner.inputs[1].owner is not None and
node.inputs[0].owner.inputs[1].owner.op == softmax_op and node.inputs[0].owner.inputs[1].owner.op == softmax_op and
node.inputs[1] == node.inputs[0].owner.inputs[1] and node.inputs[1] == node.inputs[0].owner.inputs[1] and
not ( not (
# skip if it will be optimized by # skip if it will be optimized by
# local_advanced_indexing_crossentropy_onehot_grad # local_advanced_indexing_crossentropy_onehot_grad
node.inputs[0].owner.op == tensor.true_div and node.inputs[0].owner.op == tensor.true_div and
node.inputs[0].owner.inputs[0].owner is not None and
isinstance(node.inputs[0].owner.inputs[0].owner.op, isinstance(node.inputs[0].owner.inputs[0].owner.op,
subtensor.AdvancedIncSubtensor))): subtensor.AdvancedIncSubtensor))):
# get parameters from unoptimized op # get parameters from unoptimized op
...@@ -785,8 +785,6 @@ def local_logsoftmax_grad(node): ...@@ -785,8 +785,6 @@ def local_logsoftmax_grad(node):
grads = tensor.alloc(grads, grads.shape[0], sm.shape[1]) grads = tensor.alloc(grads, grads.shape[0], sm.shape[1])
return [grads - tensor.sum(grads, axis=1, keepdims=True) * sm] return [grads - tensor.sum(grads, axis=1, keepdims=True) * sm]
except AttributeError:
pass
def softmax_graph(c): def softmax_graph(c):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论