提交 5b7ab732 authored 作者: Eric Larsen's avatar Eric Larsen 提交者: Frederic

testing infer_shape: op CrossentropySoftmax1HotWithBiasDx

上级 ac09d883
...@@ -857,6 +857,9 @@ class CrossentropySoftmax1HotWithBiasDx (gof.Op): ...@@ -857,6 +857,9 @@ class CrossentropySoftmax1HotWithBiasDx (gof.Op):
dx[i, y_idx[i]] -= dy[i] # scalar decrement dx[i, y_idx[i]] -= dy[i] # scalar decrement
output_storage[0][0] = dx output_storage[0][0] = dx
def infer_shape(self, node, shapes):
return [shapes[1]]
def grad(self, inp, grads): def grad(self, inp, grads):
dy, sm, y_idx = inp dy, sm, y_idx = inp
g_dx, = grads g_dx, = grads
......
...@@ -17,12 +17,13 @@ from theano.tensor.nnet import (categorical_crossentropy, ...@@ -17,12 +17,13 @@ from theano.tensor.nnet import (categorical_crossentropy,
crossentropy_softmax_1hot_with_bias, crossentropy_softmax_1hot_with_bias,
crossentropy_softmax_1hot_with_bias_dx, crossentropy_softmax_1hot_with_bias_dx,
crossentropy_softmax_argmax_1hot_with_bias, crossentropy_softmax_argmax_1hot_with_bias,
CrossentropySoftmax1HotWithBiasDx,
sigmoid, softplus, sigmoid, softplus,
Softmax, softmax, SoftmaxWithBias, softmax_grad, Softmax, softmax, SoftmaxWithBias, softmax_grad,
softmax_with_bias, SoftmaxGrad, softmax_with_bias, SoftmaxGrad,
Prepend_scalar_constant_to_each_row, Prepend_scalar_constant_to_each_row,
Prepend_scalar_to_each_row) Prepend_scalar_to_each_row)
from theano.tensor import dmatrix, dvector from theano.tensor import dmatrix, dvector, lvector
class T_sigmoid(unittest.TestCase): class T_sigmoid(unittest.TestCase):
def setUp(self): def setUp(self):
...@@ -162,9 +163,8 @@ class T_CrossentropySoftmax1Hot(unittest.TestCase): ...@@ -162,9 +163,8 @@ class T_CrossentropySoftmax1Hot(unittest.TestCase):
return crossentropy_softmax_1hot(T.shape_padleft(a)+b, y_idx)[0] return crossentropy_softmax_1hot(T.shape_padleft(a)+b, y_idx)[0]
utt.verify_grad(f, [numpy.random.rand(4), numpy.random.rand(4)]) utt.verify_grad(f, [numpy.random.rand(4), numpy.random.rand(4)])
class T_CrossentropySoftmax1HotWithBiasDx(unittest.TestCase): class T_CrossentropySoftmax1HotWithBiasDx(utt.InferShapeTester):
def setUp(self):
utt.seed_rng()
def test0(self): def test0(self):
def f(sm): def f(sm):
return (theano.tensor.nnet.crossentropy_softmax_1hot_with_bias_dx( return (theano.tensor.nnet.crossentropy_softmax_1hot_with_bias_dx(
...@@ -186,6 +186,21 @@ class T_CrossentropySoftmax1HotWithBiasDx(unittest.TestCase): ...@@ -186,6 +186,21 @@ class T_CrossentropySoftmax1HotWithBiasDx(unittest.TestCase):
rng.randint(low=0, high=5, size=10))) rng.randint(low=0, high=5, size=10)))
utt.verify_grad(f, [rng.rand(10)]) utt.verify_grad(f, [rng.rand(10)])
def test_infer_shape(self):
admat = dmatrix()
advec = dvector()
alvec = lvector()
rng = numpy.random.RandomState(utt.fetch_seed())
admat_val = rng.rand(10, 5)
admat_val /= admat_val.sum(axis=1).reshape(10, 1)
advec_val = rng.rand(10)
alvec_val = rng.randint(low=0, high=5, size=10)
self._compile_and_check([advec, admat, alvec],
[CrossentropySoftmax1HotWithBiasDx()(advec, admat, alvec)],
[advec_val, admat_val, alvec_val],
CrossentropySoftmax1HotWithBiasDx)
class T_CrossentropySoftmaxArgmax1HotWithBias(unittest.TestCase): class T_CrossentropySoftmaxArgmax1HotWithBias(unittest.TestCase):
def setUp(self): def setUp(self):
utt.seed_rng() utt.seed_rng()
...@@ -1108,7 +1123,7 @@ class Test_softmax_opt: ...@@ -1108,7 +1123,7 @@ class Test_softmax_opt:
if __name__ == '__main__': if __name__ == '__main__':
t = T_SoftmaxGrad('setUp') t = T_CrossentropySoftmax1HotWithBiasDx('setUp')
t.setUp() t.setUp()
t.test_infer_shape() t.test_infer_shape()
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论