提交 93b50c70 authored 作者: Vincent Michalski's avatar Vincent Michalski

added unittests checking for stack trace for nnet.py optimizations

上级 8ed70923
...@@ -139,6 +139,14 @@ class T_SoftmaxWithBias(utt.InferShapeTester): ...@@ -139,6 +139,14 @@ class T_SoftmaxWithBias(utt.InferShapeTester):
f([0, 1, 0]) f([0, 1, 0])
# print f.maker.fgraph.toposort() # print f.maker.fgraph.toposort()
def test_softmax_with_bias_trace(self):
a = theano.shared(
numpy.random.randn(3).astype(config.floatX))
b = theano.shared(numpy.float32(numpy.random.randn()))
sm = T.softmax(a + b)
f = theano.function([], sm)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
def test_infer_shape(self): def test_infer_shape(self):
admat = matrix() admat = matrix()
advec = vector() advec = vector()
...@@ -242,6 +250,7 @@ class T_LogSoftmax(utt.InferShapeTester): ...@@ -242,6 +250,7 @@ class T_LogSoftmax(utt.InferShapeTester):
sm = tensor.nnet.softmax(x) sm = tensor.nnet.softmax(x)
logsm = tensor.log(sm) logsm = tensor.log(sm)
f = theano.function([x], logsm) f = theano.function([x], logsm)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
assert isinstance(f.maker.fgraph.outputs[0].owner.op, assert isinstance(f.maker.fgraph.outputs[0].owner.op,
theano.tensor.nnet.nnet.LogSoftmax) theano.tensor.nnet.nnet.LogSoftmax)
...@@ -265,6 +274,8 @@ class T_LogSoftmax(utt.InferShapeTester): ...@@ -265,6 +274,8 @@ class T_LogSoftmax(utt.InferShapeTester):
return logsm return logsm
# We set step to 0.1 because for big values we need a big epsilon # We set step to 0.1 because for big values we need a big epsilon
utt.verify_grad(myfunc, [a], eps=0.1, mode=m) utt.verify_grad(myfunc, [a], eps=0.1, mode=m)
f = theano.function([], myfunc(a))
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
class T_SoftmaxGrad(utt.InferShapeTester): class T_SoftmaxGrad(utt.InferShapeTester):
...@@ -642,6 +653,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester): ...@@ -642,6 +653,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
fgraph = gof.FunctionGraph( fgraph = gof.FunctionGraph(
[x, one_of_n], [x, one_of_n],
[g_x]) [g_x])
self.assertTrue(hasattr(fgraph.outputs[0].tag, 'trace'))
# print 'BEFORE' # print 'BEFORE'
# for node in fgraph.toposort(): # for node in fgraph.toposort():
...@@ -737,6 +749,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester): ...@@ -737,6 +749,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
for expr in expressions: for expr in expressions:
# Verify the optimizer worked on the expressions # Verify the optimizer worked on the expressions
f = theano.function([x, y], expr, mode=mode) f = theano.function([x, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
if verbose: if verbose:
theano.printing.debugprint(f) theano.printing.debugprint(f)
try: try:
...@@ -752,6 +765,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester): ...@@ -752,6 +765,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
# Also verify the gradient wrt x # Also verify the gradient wrt x
g = theano.function([x, y], T.grad(expr, x), mode=mode) g = theano.function([x, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
if verbose: if verbose:
theano.printing.debugprint(g) theano.printing.debugprint(g)
try: try:
...@@ -774,6 +788,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester): ...@@ -774,6 +788,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
for expr in bias_expressions: for expr in bias_expressions:
f = theano.function([x, b, y], expr, mode=mode) f = theano.function([x, b, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
if verbose: if verbose:
theano.printing.debugprint(f) theano.printing.debugprint(f)
try: try:
...@@ -785,6 +800,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester): ...@@ -785,6 +800,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
theano.printing.debugprint(f) theano.printing.debugprint(f)
raise raise
g = theano.function([x, b, y], T.grad(expr, x), mode=mode) g = theano.function([x, b, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
if verbose: if verbose:
theano.printing.debugprint(g) theano.printing.debugprint(g)
try: try:
...@@ -807,6 +823,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester): ...@@ -807,6 +823,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
for expr in mean_expressions: for expr in mean_expressions:
f = theano.function([x, y], expr, mode=mode) f = theano.function([x, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
if verbose: if verbose:
theano.printing.debugprint(f) theano.printing.debugprint(f)
try: try:
...@@ -821,6 +838,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester): ...@@ -821,6 +838,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
raise raise
g = theano.function([x, y], T.grad(expr, x), mode=mode) g = theano.function([x, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
if verbose: if verbose:
theano.printing.debugprint(g) theano.printing.debugprint(g)
try: try:
...@@ -844,6 +862,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester): ...@@ -844,6 +862,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
for expr in mean_bias_expressions: for expr in mean_bias_expressions:
f = theano.function([x, b, y], expr, mode=mode) f = theano.function([x, b, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
if verbose: if verbose:
theano.printing.debugprint(f) theano.printing.debugprint(f)
try: try:
...@@ -856,6 +875,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester): ...@@ -856,6 +875,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
theano.printing.debugprint(f) theano.printing.debugprint(f)
raise raise
g = theano.function([x, b, y], T.grad(expr, x), mode=mode) g = theano.function([x, b, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
if verbose: if verbose:
theano.printing.debugprint(g) theano.printing.debugprint(g)
try: try:
...@@ -1269,6 +1289,7 @@ def test_argmax_pushdown(): ...@@ -1269,6 +1289,7 @@ def test_argmax_pushdown():
fgraph = gof.FunctionGraph( fgraph = gof.FunctionGraph(
[x], [x],
[out]) [out])
assert hasattr(fgraph.outputs[0].tag, 'trace')
backup = config.warn.argmax_pushdown_bug backup = config.warn.argmax_pushdown_bug
config.warn.argmax_pushdown_bug = False config.warn.argmax_pushdown_bug = False
...@@ -1621,7 +1642,6 @@ def test_h_softmax(): ...@@ -1621,7 +1642,6 @@ def test_h_softmax():
############# #############
x_mat = numpy.random.normal(size=(batch_size, input_size)).astype(floatX) x_mat = numpy.random.normal(size=(batch_size, input_size)).astype(floatX)
y_mat = numpy.random.randint(0, output_size, batch_size).astype('int32') y_mat = numpy.random.randint(0, output_size, batch_size).astype('int32')
tg_output = fun_output_tg(x_mat, y_mat) tg_output = fun_output_tg(x_mat, y_mat)
all_outputs = fun_output(x_mat) all_outputs = fun_output(x_mat)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论