提交 0e7eb0cc authored 作者: Ian Goodfellow's avatar Ian Goodfellow

fixed bug where tensor.grad could try to add DisconnectedType to other

variables
上级 dc6ecb37
...@@ -681,6 +681,10 @@ def _populate_grad_dict(var_to_node_to_idx, ...@@ -681,6 +681,10 @@ def _populate_grad_dict(var_to_node_to_idx,
"encountered a NaN. " +\ "encountered a NaN. " +\
term.type.why_null) term.type.why_null)
#Don't try to sum up DisconnectedType placeholders
if isinstance(term.type, DisconnectedType):
continue
terms.append(term) terms.append(term)
#the next line is like sum(terms) but doesn't add an #the next line is like sum(terms) but doesn't add an
#extraneous TensorConstant(0) #extraneous TensorConstant(0)
......
...@@ -332,7 +332,7 @@ class Scalar(Type): ...@@ -332,7 +332,7 @@ class Scalar(Type):
return ''' return '''
template <> %(mytype)s & %(mytype)s::operator=<%(othertype)s>(const %(othertype)s & y) template <> %(mytype)s & %(mytype)s::operator=<%(othertype)s>(const %(othertype)s & y)
{ this->real=y; this->imag=0; return *this; } { this->real=y; this->imag=0; return *this; }
''' % dict(mytype = mytype, othertype = othertype) ''' % dict(mytype=mytype, othertype=othertype)
def operator_eq_cplx(mytype, othertype): def operator_eq_cplx(mytype, othertype):
return ''' return '''
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论