提交 c720cafa authored 作者: AdeB's avatar AdeB 提交者: Pascal Lamblin

Replace former stack asserts in nnet/tests/ by the helper function check_stack_trace

上级 9b320500
......@@ -7,6 +7,7 @@ from nose.tools import assert_raises
import theano
from theano import tensor
from theano.gof.opt import check_stack_trace
from theano.tests import unittest_tools as utt
from theano.tensor.nnet import corr, abstract_conv as conv
from theano.tensor.nnet.abstract_conv import get_conv_output_shape
......@@ -134,7 +135,7 @@ class BaseTestConv2d(unittest.TestCase):
assert any([isinstance(n.op, target_op) for n
in f.maker.fgraph.toposort()])
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
self.assertTrue(check_stack_trace(f, ops_to_check='all'))
res_ref = numpy.array(f_ref())
res = numpy.array(f())
utt.assert_allclose(res_ref, res)
......@@ -177,7 +178,7 @@ class BaseTestConv2d(unittest.TestCase):
subsample=subsample,
conv_mode=conv_mode)
f = theano.function([], c, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
self.assertTrue(check_stack_trace(f, ops_to_check='all'))
f_ref = theano.function([], c_ref, mode='FAST_RUN')
if target_op is not None:
......@@ -227,7 +228,7 @@ class BaseTestConv2d(unittest.TestCase):
border_mode=border_mode, subsample=subsample,
conv_mode=conv_mode)
f = theano.function([], c, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
self.assertTrue(check_stack_trace(f, ops_to_check='all'))
f_ref = theano.function([], c_ref, mode='FAST_RUN')
if target_op is not None:
......
......@@ -10,6 +10,7 @@ except ImportError:
from six.moves import xrange
import theano
from theano.gof.opt import check_stack_trace
from theano.tensor.nnet.conv3d2d import *
import theano.tests.unittest_tools as utt
......@@ -73,10 +74,10 @@ def pyconv3d(signals, filters):
r_i += o_i[Tf2:o_i_sh0-Tf2, Hf2:-Hf2, Wf2:-Wf2]
return rval
def check_diagonal_subtensor_view_traces(fn):
for apply_node in fn.maker.fgraph.apply_nodes:
if isinstance(apply_node.op, (DiagonalSubtensor, IncDiagonalSubtensor)):
assert hasattr(apply_node.outputs[0].tag, 'trace')
assert check_stack_trace(fn, [DiagonalSubtensor, IncDiagonalSubtensor])
def test_conv3d(mode=mode_without_gpu, shared=theano.tensor._shared):
if ndimage is None:
......
......@@ -10,6 +10,7 @@ from theano import config
from theano import tensor as T
from theano import tensor
from theano import gof
from theano.gof.opt import check_stack_trace
from theano.tests import unittest_tools as utt
from theano import printing
from theano.tensor.nnet import (categorical_crossentropy,
......@@ -150,8 +151,7 @@ class T_SoftmaxWithBias(utt.InferShapeTester):
b = theano.shared(numpy.float32(numpy.random.randn()))
sm = T.nnet.softmax(a + b)
f = theano.function([], sm)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
print('f.maker.fgraph.outputs[0]: {0}'.format(f.maker.fgraph.outputs[0], ))
assert check_stack_trace(f, ops_to_check='last')
def test_infer_shape(self):
admat = matrix()
......@@ -256,9 +256,10 @@ class T_LogSoftmax(utt.InferShapeTester):
sm = tensor.nnet.softmax(x)
logsm = tensor.log(sm)
f = theano.function([x], logsm)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
assert isinstance(f.maker.fgraph.outputs[0].owner.op,
theano.tensor.nnet.nnet.LogSoftmax)
assert check_stack_trace(
f, ops_to_check=theano.tensor.nnet.nnet.LogSoftmax)
def test_local_softmax_grad_optimization_and_big_input(self):
"""Test the Logsoftmax's grad substitution.
......@@ -272,7 +273,8 @@ class T_LogSoftmax(utt.InferShapeTester):
m.check_isfinite = False
# some inputs that are large to make the gradient explode in the non
# optimized case
a = numpy.exp(10 * numpy.random.rand(5, 10).astype(theano.config.floatX))
a = numpy.exp(
10 * numpy.random.rand(5, 10).astype(theano.config.floatX))
def myfunc(x):
sm = tensor.nnet.softmax(x)
......@@ -281,7 +283,7 @@ class T_LogSoftmax(utt.InferShapeTester):
# We set step to 0.1 because for big values we need a big epsilon
utt.verify_grad(myfunc, [a], eps=0.1, mode=m)
f = theano.function([], myfunc(a))
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(f, ops_to_check='last')
class T_SoftmaxGrad(utt.InferShapeTester):
......@@ -659,7 +661,7 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
fgraph = gof.FunctionGraph(
[x, one_of_n],
[g_x])
self.assertTrue(hasattr(fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(fgraph, ops_to_check='last')
# print 'BEFORE'
# for node in fgraph.toposort():
......@@ -755,7 +757,8 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
for expr in expressions:
# Verify the optimizer worked on the expressions
f = theano.function([x, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(
f, ops_to_check=crossentropy_softmax_argmax_1hot_with_bias)
if verbose:
theano.printing.debugprint(f)
try:
......@@ -771,7 +774,9 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
# Also verify the gradient wrt x
g = theano.function([x, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(
g, ops_to_check=[crossentropy_softmax_1hot_with_bias_dx,
softmax_op])
if verbose:
theano.printing.debugprint(g)
try:
......@@ -794,7 +799,8 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
for expr in bias_expressions:
f = theano.function([x, b, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(
f, ops_to_check=crossentropy_softmax_argmax_1hot_with_bias)
if verbose:
theano.printing.debugprint(f)
try:
......@@ -806,7 +812,9 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
theano.printing.debugprint(f)
raise
g = theano.function([x, b, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(
g, ops_to_check=[crossentropy_softmax_1hot_with_bias_dx,
softmax_with_bias])
if verbose:
theano.printing.debugprint(g)
try:
......@@ -829,7 +837,8 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
for expr in mean_expressions:
f = theano.function([x, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(
f, ops_to_check=[crossentropy_softmax_argmax_1hot_with_bias])
if verbose:
theano.printing.debugprint(f)
try:
......@@ -844,7 +853,9 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
raise
g = theano.function([x, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(
g, ops_to_check=[crossentropy_softmax_1hot_with_bias_dx,
softmax_op])
if verbose:
theano.printing.debugprint(g)
try:
......@@ -868,7 +879,8 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
for expr in mean_bias_expressions:
f = theano.function([x, b, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(
f, ops_to_check=crossentropy_softmax_argmax_1hot_with_bias)
if verbose:
theano.printing.debugprint(f)
try:
......@@ -881,7 +893,9 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
theano.printing.debugprint(f)
raise
g = theano.function([x, b, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
assert check_stack_trace(
g, ops_to_check=[crossentropy_softmax_1hot_with_bias_dx,
softmax_with_bias])
if verbose:
theano.printing.debugprint(g)
try:
......@@ -1295,7 +1309,7 @@ def test_argmax_pushdown():
fgraph = gof.FunctionGraph(
[x],
[out])
assert hasattr(fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(fgraph, ops_to_check='all')
backup = config.warn.argmax_pushdown_bug
config.warn.argmax_pushdown_bug = False
......
from __future__ import absolute_import, print_function, division
import theano
from theano import tensor
from theano.tensor.nnet.blocksparse import sparse_block_dot
from theano.gof.opt import check_stack_trace
from theano.tensor.nnet.blocksparse import sparse_block_dot, \
sparse_block_gemv_inplace, sparse_block_outer_inplace
def test_blocksparse_inplace_gemv_opt():
......@@ -14,7 +16,7 @@ def test_blocksparse_inplace_gemv_opt():
o = sparse_block_dot(W, h, iIdx, b, oIdx)
f = theano.function([W, h, iIdx, b, oIdx], o)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check=sparse_block_gemv_inplace)
if theano.config.mode == "FAST_COMPILE":
assert not f.maker.fgraph.toposort()[-1].op.inplace
......@@ -35,7 +37,7 @@ def test_blocksparse_inplace_outer_opt():
f = theano.function([W, h, iIdx, b, oIdx],
[o, tensor.grad(o.sum(), wrt=W)])
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check=sparse_block_outer_inplace)
if theano.config.mode == "FAST_COMPILE":
assert not f.maker.fgraph.toposort()[-1].op.inplace
......
......@@ -8,6 +8,7 @@ import theano.tensor.inplace
from theano.tensor import basic as tensor
from theano import tensor as T
from theano import config
from theano.gof.opt import check_stack_trace
from theano.tests import unittest_tools as utt
from theano.tensor.nnet import (sigmoid, sigmoid_inplace,
softplus, ultra_fast_sigmoid, hard_sigmoid)
......@@ -126,40 +127,35 @@ class T_sigmoid_opts(unittest.TestCase):
# tests inv_1_plus_exp
f = theano.function([x], T.fill(x, 1.0) / (1 + T.exp(-x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check=sigmoid)
assert [node.op for node in f.maker.fgraph.toposort()] == [sigmoid]
f(data)
f = theano.function([x], T.fill(x, 1.0) / (2 + T.exp(-x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid]
f(data)
f = theano.function([x], T.fill(x, 1.0) / (1 - T.exp(-x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid]
f(data)
f = theano.function([x], T.fill(x, 1.1) / (1 + T.exp(-x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid]
f(data)
# tests inv_1_plus_exp with neg
f = theano.function([x], T.fill(x, -1.0) / (1 + T.exp(-x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(
f, ops_to_check=[sigmoid, theano.tensor.inplace.neg_inplace])
assert [node.op for node in f.maker.fgraph.toposort()] == [sigmoid,
theano.tensor.inplace.neg_inplace]
f(data)
f = theano.function([x], T.fill(x, -1.0) / (1 - T.exp(-x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid,
theano.tensor.inplace.neg_inplace]
f(data)
f = theano.function([x], T.fill(x, -1.0) / (2 + T.exp(-x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid,
theano.tensor.inplace.neg_inplace]
f(data)
f = theano.function([x], T.fill(x, -1.1) / (1 + T.exp(-x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid,
theano.tensor.inplace.neg_inplace]
f(data)
......@@ -170,37 +166,32 @@ class T_sigmoid_opts(unittest.TestCase):
# = - (sigm(x) * sigm(x))
f = theano.function([x], (T.fill(x, -1.0) * T.exp(x)) /
((1 + T.exp(x)) * (1 + T.exp(-x))), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check=[sigmoid, T.mul])
assert [node.op for node in f.maker.fgraph.toposort()] == [sigmoid,
T.mul]
f(data)
f = theano.function([x], (T.fill(x, -1.1) * T.exp(x)) /
((1 + T.exp(x)) * (1 + T.exp(-x))), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid,
T.mul, theano.tensor.inplace.neg_inplace]
f(data)
f = theano.function([x], (T.fill(x, -1.0) * T.exp(x)) /
((2 + T.exp(x)) * (1 + T.exp(-x))), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid,
T.mul, theano.tensor.inplace.neg_inplace]
f(data)
f = theano.function([x], (T.fill(x, -1.0) * T.exp(x)) /
((1 + T.exp(x)) * (2 + T.exp(-x))), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid,
T.mul, theano.tensor.inplace.neg_inplace]
f(data)
f = theano.function([x], (T.fill(x, -1.0) * T.exp(x)) /
((1 + T.exp(x)) * (1 + T.exp(x))), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid,
T.mul, theano.tensor.inplace.neg_inplace]
f(data)
f = theano.function([x], (T.fill(x, -1.0) * T.exp(x)) /
((1 + T.exp(x)) * (2 + T.exp(-x))), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert [node.op for node in f.maker.fgraph.toposort()] != [sigmoid,
T.mul, theano.tensor.inplace.neg_inplace]
f(data)
......@@ -218,13 +209,13 @@ class T_sigmoid_opts(unittest.TestCase):
# tests exp_over_1_plus_exp
f = theano.function([x], 1 - T.exp(x) / (1 + T.exp(x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check=[tensor.neg, sigmoid_inplace])
assert [node.op for node in f.maker.fgraph.toposort()] == [
tensor.neg, sigmoid_inplace]
# tests inv_1_plus_exp
f = theano.function([x], 1 - T.fill(x, 1.0) / (1 + T.exp(-x)), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check=[tensor.neg, sigmoid_inplace])
assert [node.op for node in f.maker.fgraph.toposort()] == [tensor.neg,
sigmoid_inplace]
......@@ -237,19 +228,17 @@ class T_sigmoid_opts(unittest.TestCase):
def match(func, ops):
# print [node.op.scalar_op for node in func.maker.fgraph.toposort()]
assert [node.op for node in func.maker.fgraph.toposort()] == ops
assert check_stack_trace(f, ops_to_check=ops)
m = self.get_mode(excluding=['local_elemwise_fusion', 'inplace'])
x, y = tensor.vectors('x', 'y')
f = theano.function([x], sigmoid(-x) * tensor.exp(x), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
match(f, [sigmoid])
f = theano.function([x], sigmoid(x) * tensor.exp(-x), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
match(f, [tensor.neg, sigmoid])
f = theano.function([x], -(-(-(sigmoid(x)))) * tensor.exp(-x), mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
match(f, [tensor.neg, sigmoid, tensor.neg])
f = theano.function(
......@@ -257,7 +246,6 @@ class T_sigmoid_opts(unittest.TestCase):
(sigmoid(x) * sigmoid(-y) * -tensor.exp(-x) *
tensor.exp(x * y) * tensor.exp(y)),
mode=m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
match(f, [sigmoid, tensor.mul, tensor.neg, tensor.exp, sigmoid,
tensor.mul])
......@@ -328,14 +316,14 @@ class T_sigmoid_opts(unittest.TestCase):
mode = self.get_mode('local_ultra_fast_sigmoid')
f = theano.function([x], s, mode=mode)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check=sigmoid)
topo = f.maker.fgraph.toposort()
assert len(topo) == 1
assert topo[0].op == sigmoid
mode = self.get_mode().including('local_ultra_fast_sigmoid')
f = theano.function([x], s, mode=mode)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check=ultra_fast_sigmoid)
topo = f.maker.fgraph.toposort()
assert topo[0].op == ultra_fast_sigmoid
assert len(topo) == 1
......@@ -347,14 +335,14 @@ class T_sigmoid_opts(unittest.TestCase):
mode = self.get_mode('local_hard_sigmoid')
f = theano.function([x], s, mode=mode)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check=sigmoid)
topo = f.maker.fgraph.toposort()
assert topo[0].op == sigmoid
assert len(topo) == 1
mode = self.get_mode().including('local_hard_sigmoid')
f = theano.function([x], s, mode=mode)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(f, ops_to_check='all')
topo = f.maker.fgraph.toposort()
assert len(topo) > 1
assert not any([n.op == sigmoid for n in topo])
......@@ -377,13 +365,14 @@ class T_softplus_opts(unittest.TestCase):
out = T.log(sigmoid(x))
f = theano.function([x], out, mode=self.m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
ops_to_check = [theano.scalar.Neg,
theano.tensor.nnet.sigm.ScalarSoftplus,
theano.scalar.Neg]
assert check_stack_trace(f, ops_to_check=ops_to_check)
topo = f.maker.fgraph.toposort()
assert len(topo) == 3
assert isinstance(topo[0].op.scalar_op, theano.scalar.Neg)
assert isinstance(topo[1].op.scalar_op,
theano.tensor.nnet.sigm.ScalarSoftplus)
assert isinstance(topo[2].op.scalar_op, theano.scalar.Neg)
for i, op in enumerate(ops_to_check):
assert isinstance(topo[i].op.scalar_op, op)
f(numpy.random.rand(54).astype(config.floatX))
def test_log1msigm_to_softplus(self):
......@@ -401,7 +390,9 @@ class T_softplus_opts(unittest.TestCase):
# Same test with a flatten
out = T.log(1 - T.flatten(sigmoid(x)))
f = theano.function([x], out, mode=self.m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(
f, ops_to_check=[theano.tensor.nnet.sigm.ScalarSoftplus,
theano.scalar.Neg])
topo = f.maker.fgraph.toposort()
assert len(topo) == 3
assert tensor.is_flat(topo[0].outputs[0])
......@@ -430,7 +421,8 @@ class T_softplus_opts(unittest.TestCase):
out = T.log(1 + T.exp(x))
f = theano.function([x], out, mode=self.m)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
assert check_stack_trace(
f, ops_to_check=theano.tensor.nnet.sigm.ScalarSoftplus)
topo = f.maker.fgraph.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op.scalar_op,
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论