提交 8f81cc61 authored 作者: Ian Goodfellow's avatar Ian Goodfellow

removed some outdated comments

上级 a1838faa
...@@ -138,16 +138,12 @@ class test_grad_sources_inputs(unittest.TestCase): ...@@ -138,16 +138,12 @@ class test_grad_sources_inputs(unittest.TestCase):
self.assertTrue(g[i] is one) self.assertTrue(g[i] is one)
def test_unimplemented_grad_func(): def test_unimplemented_grad_func():
#tests that function compilation catches unimplemented grads in the graph # tests that function compilation catches unimplemented grads in the graph
a = theano.tensor.vector() a = theano.tensor.vector()
b = theano.gradient.grad_not_implemented(theano.tensor.add, 0, a) b = theano.gradient.grad_not_implemented(theano.tensor.add, 0, a)
try: try:
f = theano.function([a], b, on_unused_input = 'ignore') f = theano.function([a], b, on_unused_input = 'ignore')
assert 0 assert 0
#Note: it's important that the NotImplementedGradOp is caught
#at COMPILATION time, not execution time.
#If the uncomputable variable is, for example, multiplied by 0,
#it could be optimized out of the final graph.
except TypeError: except TypeError:
pass pass
...@@ -158,10 +154,6 @@ def test_undefined_grad_func(): ...@@ -158,10 +154,6 @@ def test_undefined_grad_func():
try: try:
f = theano.function([a],b, on_unused_input = 'ignore') f = theano.function([a],b, on_unused_input = 'ignore')
assert 0 assert 0
#Note: it's important that the GradUndefinedOp is caught at
#COMPILATION time, not execution time.
#If the uncomputable variable is, for example, multiplied by0,
#it could be optimized out of the final graph
except TypeError: except TypeError:
pass pass
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论