提交 51368a6b authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Merge pull request #2905 from nouiz/tests

Fix test due to new error type.
...@@ -361,7 +361,7 @@ def test_opt_gpujoin_joinvectors_negativeaxes(): ...@@ -361,7 +361,7 @@ def test_opt_gpujoin_joinvectors_negativeaxes():
f = theano.function(inputs=[], outputs=t) f = theano.function(inputs=[], outputs=t)
f() f()
assert(False) assert(False)
except ValueError: except IndexError:
assert(True) assert(True)
......
...@@ -356,15 +356,25 @@ def register_specialize(lopt, *tags, **kwargs): ...@@ -356,15 +356,25 @@ def register_specialize(lopt, *tags, **kwargs):
def register_uncanonicalize(lopt, *tags, **kwargs): def register_uncanonicalize(lopt, *tags, **kwargs):
name = (kwargs and kwargs.pop('name')) or lopt.__name__ if type(lopt) == str:
compile.optdb['uncanonicalize'].register(name, lopt, 'fast_run', *tags) def register(inner_lopt):
return lopt return register_uncanonicalize(inner_lopt, lopt, *tags, **kwargs)
return register
else:
name = (kwargs and kwargs.pop('name')) or lopt.__name__
compile.optdb['uncanonicalize'].register(name, lopt, 'fast_run', *tags)
return lopt
def register_specialize_device(lopt, *tags, **kwargs): def register_specialize_device(lopt, *tags, **kwargs):
name = (kwargs and kwargs.pop('name')) or lopt.__name__ if type(lopt) == str:
compile.optdb['specialize_device'].register(name, lopt, 'fast_run', *tags) def register(inner_lopt):
return lopt return register_specialize_device(inner_lopt, lopt, *tags, **kwargs)
return register
else:
name = (kwargs and kwargs.pop('name')) or lopt.__name__
compile.optdb['specialize_device'].register(name, lopt, 'fast_run', *tags)
return lopt
# Register merge_optimizer as a global opt during canonicalize # Register merge_optimizer as a global opt during canonicalize
...@@ -1240,9 +1250,10 @@ class ShapeOptimizer(Optimizer): ...@@ -1240,9 +1250,10 @@ class ShapeOptimizer(Optimizer):
def apply(self, fgraph): def apply(self, fgraph):
pass pass
# -1 should make it run right before the first merge # Register it after merge1 optimization at 0. We don't want to track
# the shape of merged node.
theano.compile.mode.optdb.register('ShapeOpt', ShapeOptimizer(), theano.compile.mode.optdb.register('ShapeOpt', ShapeOptimizer(),
-1, 'fast_run', 'fast_compile') 0.1, 'fast_run', 'fast_compile')
@register_specialize @register_specialize
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论