提交 0b8f1da0 authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Merge pull request #1633 from nouiz/debugmode

Make DebugMode raise a better error when an optimization insert an op
......@@ -192,6 +192,10 @@ class BadThunkOutput(DebugModeError):
return ret
class OptimizationInsertError(DebugModeError):
pass
class BadOptimization(DebugModeError):
"""Exception: some variable and its substitute take different
runtime values.
......@@ -1792,6 +1796,25 @@ class _Linker(gof.link.LocalLinker):
# shouldn't have put it into the list in
# the first place
thunk_py = None
except Exception, e:
# I think that only 1 optimization can
# insert a given apply node. If that is not True,
# we would need to loop over all node outputs,
# But this make the output uglier.
reason = fgraph.equivalence_tracker.reasons[
node.outputs[0]]
opt = str(reason[0][0])
msg = (
"An optimization (probably %s ) inserted an apply node that raise an error." % opt +
"The information we have about this optimizations is:" + str(reason) +
"The original exception: " + str(e))
new_e = OptimizationInsertError(msg)
exc_type, exc_value, exc_trace = sys.exc_info()
exc_type = OptimizationInsertError
exc_value = new_e
raise_with_op(node, thunk_c,
(exc_type, exc_value, exc_trace))
if thunk_py:
# check output values for type-correctness
......@@ -1869,8 +1892,24 @@ class _Linker(gof.link.LocalLinker):
## First time, with None in output_storage
try:
thunk_c()
except Exception:
raise_with_op(node, thunk_c)
except Exception, e:
# I think that only 1 optimization can
# insert a given apply node. If that is not True,
# we would need to loop over all node outputs,
# But this make the output uglier.
reason = fgraph.equivalence_tracker.reasons[
node.outputs[0]]
opt = str(reason[0][0])
msg = (
"An optimization (probably %s ) inserted an apply node that raise an error." % opt +
"The information we have about this optimizations is:" + str(reason) +
"The original exception: " + str(e))
new_e = OptimizationInsertError(msg)
exc_type, exc_value, exc_trace = sys.exc_info()
exc_type = OptimizationInsertError
exc_value = new_e
raise_with_op(node, thunk_c,
(exc_type, exc_value, exc_trace))
for r in node.outputs:
# check output values for type-correctness
......
......@@ -248,6 +248,39 @@ def test_badoptimization():
assert False
def test_badoptimization_opt_err():
"""This variant of test_badoptimization() replace the working code
with a new apply node that will raise an error.
"""
@gof.local_optimizer([theano.tensor.add])
def insert_bigger_b_add(node):
if node.op == theano.tensor.add:
inputs = list(node.inputs)
if inputs[-1].owner is None:
inputs[-1] = theano.tensor.concatenate((inputs[-1],
inputs[-1]))
return [node.op(*inputs)]
return False
edb = gof.EquilibriumDB()
edb.register('insert_bigger_b_add', insert_bigger_b_add, 'all')
opt = edb.query('+all')
a = theano.tensor.dvector()
b = theano.tensor.dvector()
f = theano.function([a, b], a + b,
mode=debugmode.DebugMode(optimizer=opt))
try:
f([1.0, 2.0, 3.0], [2, 3, 4],)
except debugmode.OptimizationInsertError, e:
assert 'insert_bigger_b_add' in e.message
return # TEST PASS
assert False
def test_stochasticoptimization():
# this optimization alternates between triggering and not triggering.
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论