提交 e85ff654 authored 作者: Frédéric Bastien's avatar Frédéric Bastien

Merge pull request #1534 from lamblin/fix_disconnected_sum

Update check for non-available gradient.
...@@ -693,7 +693,7 @@ class Elemwise(Op): ...@@ -693,7 +693,7 @@ class Elemwise(Op):
#sum out the broadcasted dimensions #sum out the broadcasted dimensions
for i, ipt in enumerate(inputs): for i, ipt in enumerate(inputs):
if rval[i] is None: if isinstance(rval[i].type, (NullType, DisconnectedType)):
continue continue
# list of all the dimensions that are broadcastable for input[i] so # list of all the dimensions that are broadcastable for input[i] so
......
...@@ -197,6 +197,14 @@ class test_Broadcast(unittest.TestCase): ...@@ -197,6 +197,14 @@ class test_Broadcast(unittest.TestCase):
f(xv, yv) f(xv, yv)
assert (xv == yv).all() assert (xv == yv).all()
def test_fill_grad(self):
# Fix bug reported at
# https://groups.google.com/d/topic/theano-users/nQshB8gUA6k/discussion
x = TensorType(config.floatX, [0, 1, 0])('x')
y = TensorType(config.floatX, [0, 1, 0])('y')
e = tensor.second(x, y)
theano.grad(e.sum(), y)
def test_weird_strides(self): def test_weird_strides(self):
if not theano.config.cxx: if not theano.config.cxx:
raise SkipTest("G++ not available, so we need to skip this test.") raise SkipTest("G++ not available, so we need to skip this test.")
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论