提交 b9977855 authored 作者: nouiz's avatar nouiz

Merge pull request #373 from lamblin/fix_xent_thing_without_infershape

Make mlp_test test a more sensible case.
...@@ -306,27 +306,15 @@ def test_mlp(): ...@@ -306,27 +306,15 @@ def test_mlp():
theano.printing.debugprint(train_model, print_type=True) theano.printing.debugprint(train_model, print_type=True)
assert any([isinstance(i.op,T.nnet.CrossentropySoftmax1HotWithBiasDx) for i in train_model.maker.env.toposort()]) assert any([isinstance(i.op,T.nnet.CrossentropySoftmax1HotWithBiasDx) for i in train_model.maker.env.toposort()])
# Now, this case works, too!
train_model =theano.function( inputs = [index],
updates = updates2,
mode=mode.excluding('local_track_shape_i'),
givens={
x:train_set_x[index*batch_size:(index+1)*batch_size],
y:train_set_y[index*batch_size:(index+1)*batch_size]})
print
print 'MODEL 2'
theano.printing.debugprint(train_model, print_type=True)
assert any([isinstance(i.op,T.nnet.CrossentropySoftmax1HotWithBiasDx) for i in train_model.maker.env.toposort()])
# Even without FeatureShape # Even without FeatureShape
train_model =theano.function( inputs = [index], train_model =theano.function( inputs = [index],
updates = updates2, updates = updates2,
mode=mode.excluding('local_shape_to_shape_i'), mode=mode.excluding('ShapeOpt'),
givens={ givens={
x:train_set_x[index*batch_size:(index+1)*batch_size], x:train_set_x[index*batch_size:(index+1)*batch_size],
y:train_set_y[index*batch_size:(index+1)*batch_size]}) y:train_set_y[index*batch_size:(index+1)*batch_size]})
print print
print 'MODEL 3' print 'MODEL 2'
theano.printing.debugprint(train_model, print_type=True) theano.printing.debugprint(train_model, print_type=True)
assert any([isinstance(i.op,T.nnet.CrossentropySoftmax1HotWithBiasDx) for i in train_model.maker.env.toposort()]) assert any([isinstance(i.op,T.nnet.CrossentropySoftmax1HotWithBiasDx) for i in train_model.maker.env.toposort()])
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论