提交 549f125a authored 作者: Frederic's avatar Frederic

Add a test for the softmax with the new keep_dims parameter.

上级 4c2d3478
...@@ -1270,6 +1270,20 @@ class Test_softmax_opt: ...@@ -1270,6 +1270,20 @@ class Test_softmax_opt:
assert softmax in f_ops assert softmax in f_ops
f(self.rng.rand(3, 4).astype(config.floatX)) f(self.rng.rand(3, 4).astype(config.floatX))
def test_basic_keepdims(self):
c = T.matrix()
p_y = T.exp(c) / T.exp(c).sum(axis=1, keepdims=True)
# test that function contains softmax and no div.
f = theano.function([c], p_y, mode=self.mode)
f_ops = [n.op for n in f.maker.fgraph.toposort()]
#print '--- f ='
#printing.debugprint(f)
#print '==='
assert len(f_ops) == 1
assert softmax in f_ops
f(self.rng.rand(3, 4).astype(config.floatX))
def test_grad(self): def test_grad(self):
c = T.matrix() c = T.matrix()
p_y = T.exp(c) / T.exp(c).sum(axis=1).dimshuffle(0, 'x') p_y = T.exp(c) / T.exp(c).sum(axis=1).dimshuffle(0, 'x')
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论