提交 ea902b07 authored 作者: Frederic Bastien's avatar Frederic Bastien

remove some errors printing from optimization with Softmax that are reverted by the env.

上级 dc84f81b
......@@ -395,7 +395,11 @@ def local_softmax_with_bias(node):
except:
#if our arguments have the wrong types, then forget about it
return
return [sm_bias]
if sm_bias.type == node.outputs[0].type:
#This condition is not always true. See the test
#nnet/tests/test_nnet.py:T_SoftmaxWithBias.test_broadcast
return [sm_bias]
def softmax_simplifier(numerators, denominators):
for numerator in list(numerators):
......
......@@ -92,6 +92,27 @@ class T_SoftmaxWithBias(unittest.TestCase):
return softmax_with_bias(a, b)[:,3]
utt.verify_grad(f, [numpy.random.rand(3,4),
numpy.random.rand(4)])
def test_broadcast(self):
#test that we don't raise an error during optimization for no good
#reason as softmax_with_bias don't support correctly some/all
#broadcasted inputs pattern
initial_W = numpy.asarray( [[0.1,0.1,0.1], \
[0.1,0.1,0.1], \
[0.1,0.1,0.1]], \
dtype = theano.config.floatX)
W = theano.shared(value = initial_W, name = 'W')
vbias=theano.shared(value=0.1, name='vbias') #0.01
hid=T.vector('hid')
f = theano.function([hid],
T.nnet.softmax(T.dot(hid, W.T) + vbias))
ops = [node.op for node in f.maker.env.toposort()]
assert softmax_with_bias not in ops
assert softmax in ops
print f([0,1,0])
print f.maker.env.toposort()
def test_infer_shape(self):
fff=theano.function([],outputs=softmax_with_bias(numpy.random.rand(3,4),numpy.random.rand(4)).shape)
assert all(fff()==[3,4])
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论