提交 dcdd6cce authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Move test definition at the right place (not in the middle of another one)

mq is nice, but sometimes it screws up where the patch gets applied...
上级 fd54945c
...@@ -480,13 +480,6 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase): ...@@ -480,13 +480,6 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase):
b = T.dvector('b') b = T.dvector('b')
y = T.lvector('y') y = T.lvector('y')
def print_graph(func):
for i, node in enumerate(func.maker.env.toposort()):
print i, node
# Last node should be the output
print i, pprint(node.outputs[0])
print
## Basic case ## Basic case
expressions = [ expressions = [
T.sum(-T.log(softmax(x)[T.arange(y.shape[0]), y])), T.sum(-T.log(softmax(x)[T.arange(y.shape[0]), y])),
...@@ -495,56 +488,13 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase): ...@@ -495,56 +488,13 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase):
T.sum(-T.log(softmax(x))[T.arange(y.shape[0]), y]) T.sum(-T.log(softmax(x))[T.arange(y.shape[0]), y])
] ]
for expr in expressions:
# Verify the optimizer worked on the expressions
f = theano.function([x,y], expr, mode=mode)
if verbose: print_graph(f)
try:
assert len(f.maker.env.toposort()) == 4
f(x_val, y_val)
except:
theano.printing.debugprint(f)
raise
# Also verify the gradient wrt x
g = theano.function([x,y], T.grad(expr, x), mode=mode)
if verbose: print_graph(g)
try:
assert len(g.maker.env.toposort()) == 4
g(x_val, y_val)
except:
theano.printing.debugprint(g)
raise
def test_xent_thing_int32(self):
verbose = 0
mode = theano.compile.mode.get_default_mode()
if mode == theano.compile.mode.get_mode('FAST_COMPILE'):
mode = 'FAST_RUN'
rng = numpy.random.RandomState(utt.fetch_seed())
x_val = rng.randn(3,5)
b_val = rng.randn(5)
y_val = numpy.asarray([2,4,1], dtype='int64')
x = T.dmatrix('x')
b = T.dvector('b')
y = T.lvector('y')
yi = T.cast(y, 'int32')
expressions = [
T.sum(-T.log(softmax(x)[T.arange(yi.shape[0]), yi])),
-T.sum(T.log(softmax(x)[T.arange(yi.shape[0]), yi])),
-T.sum(T.log(softmax(x))[T.arange(yi.shape[0]), yi]),
T.sum(-T.log(softmax(x))[T.arange(yi.shape[0]), yi])
]
for expr in expressions: for expr in expressions:
# Verify the optimizer worked on the expressions # Verify the optimizer worked on the expressions
f = theano.function([x,y], expr, mode=mode) f = theano.function([x,y], expr, mode=mode)
if verbose: if verbose:
theano.printing.debugprint(f) theano.printing.debugprint(f)
try: try:
assert len(f.maker.env.toposort()) == 5 assert len(f.maker.env.toposort()) == 4
f(x_val, y_val) f(x_val, y_val)
except: except:
theano.printing.debugprint(f) theano.printing.debugprint(f)
...@@ -555,13 +505,12 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase): ...@@ -555,13 +505,12 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase):
if verbose: if verbose:
theano.printing.debugprint(g) theano.printing.debugprint(g)
try: try:
assert len(g.maker.env.toposort()) == 5 assert len(g.maker.env.toposort()) == 4
g(x_val, y_val) g(x_val, y_val)
except: except:
theano.printing.debugprint(g) theano.printing.debugprint(g)
raise raise
## Test that a biased softmax is optimized correctly ## Test that a biased softmax is optimized correctly
bias_expressions = [ bias_expressions = [
T.sum(-T.log(softmax(x+b)[T.arange(y.shape[0]), y])), T.sum(-T.log(softmax(x+b)[T.arange(y.shape[0]), y])),
...@@ -645,6 +594,52 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase): ...@@ -645,6 +594,52 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase):
theano.printing.debugprint(g) theano.printing.debugprint(g)
raise raise
def test_xent_thing_int32(self):
verbose = 0
mode = theano.compile.mode.get_default_mode()
if mode == theano.compile.mode.get_mode('FAST_COMPILE'):
mode = 'FAST_RUN'
rng = numpy.random.RandomState(utt.fetch_seed())
x_val = rng.randn(3,5)
b_val = rng.randn(5)
y_val = numpy.asarray([2,4,1], dtype='int64')
x = T.dmatrix('x')
b = T.dvector('b')
y = T.lvector('y')
yi = T.cast(y, 'int32')
expressions = [
T.sum(-T.log(softmax(x)[T.arange(yi.shape[0]), yi])),
-T.sum(T.log(softmax(x)[T.arange(yi.shape[0]), yi])),
-T.sum(T.log(softmax(x))[T.arange(yi.shape[0]), yi]),
T.sum(-T.log(softmax(x))[T.arange(yi.shape[0]), yi])
]
for expr in expressions:
# Verify the optimizer worked on the expressions
f = theano.function([x,y], expr, mode=mode)
if verbose:
theano.printing.debugprint(f)
try:
assert len(f.maker.env.toposort()) == 5
f(x_val, y_val)
except:
theano.printing.debugprint(f)
raise
# Also verify the gradient wrt x
g = theano.function([x,y], T.grad(expr, x), mode=mode)
if verbose:
theano.printing.debugprint(g)
try:
assert len(g.maker.env.toposort()) == 5
g(x_val, y_val)
except:
theano.printing.debugprint(g)
raise
def test_optimize_xent_vector(self): def test_optimize_xent_vector(self):
verbose = 0 verbose = 0
mode = theano.compile.mode.get_default_mode() mode = theano.compile.mode.get_default_mode()
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论