提交 2963f85a authored 作者: affanv14's avatar affanv14

Changed two lines which called grad to L_op

上级 0dfa0d58
......@@ -448,7 +448,7 @@ class Softmax(gof.Op):
# is the same as the grad
if None in eval_points:
return [None]
return self.grad(inputs, eval_points)
return self.L_op(inputs, [self(*inputs)], eval_points)
def infer_shape(self, node, shape):
return shape
......@@ -1054,7 +1054,7 @@ class CrossentropySoftmaxArgmax1HotWithBias(gof.Op):
db_terms.append(db)
if not isinstance(g_sm.type, DisconnectedType):
dx, db = softmax_with_bias.grad((x, b), (g_sm, ))
dx, db = softmax_with_bias.L_op((x, b), [softmax_with_bias(x, b)], (g_sm, ))
dx_terms.append(dx)
db_terms.append(db)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论