提交 b4b80693 authored 作者: Thomas Mesnard's avatar Thomas Mesnard

Try to fix Travis problems

上级 ac8b48d0
......@@ -10,6 +10,7 @@ from theano.compile import function
from theano.compile import UnusedInputError
from theano.gof import MissingInputError
from theano.compat import exc_message
from theano.tests.unittest_tools import SkipTest
from theano import tensor
from theano import tensor as T
......@@ -41,7 +42,7 @@ class T_function(unittest.TestCase):
rval = fn()
if rval == []:
raise SkipTest("Not yet implemented")
#raise KnownFailureTest('See #254: Using None as function output leads to [] return value')
# See #254: Using None as function output leads to [] return value
else:
assert rval is None
......
......@@ -71,7 +71,6 @@ def may_fail(msg, EClass):
except Exception as e:
if isinstance(e, EClass):
raise SkipTest("Not yet implemented")
# raise KnownFailureTest(msg, e)
raise
wrapper.__name__ = f.__name__
return wrapper
......
......@@ -15,6 +15,7 @@ import theano.sandbox.rng_mrg
from theano import tensor
from theano.compile.pfunc import rebuild_collect_shared
from theano.tests import unittest_tools as utt
from theano.tests.unittest_tools import SkipTest
from numpy.testing.noseclasses import KnownFailureTest
from test_utils import *
......@@ -473,8 +474,8 @@ class TestScan(unittest.TestCase):
# error is marked as KnownFailure
raise SkipTest("Not yet implemented")
#raise KnownFailureTest('Work-in-progress sandbox ScanOp is not fully '
# 'functional yet')
# Work-in-progress sandbox ScanOp is not fully
# functional yet
def f_pow2(x_tm1):
return 2 * x_tm1
......@@ -510,9 +511,9 @@ class TestScan(unittest.TestCase):
# place (even when told not to by DebugMode). As this op will change
# soon, and it is in the sandbox and not for user consumption, the
# error is marked as KnownFailure
raise KnownFailureTest('Work-in-progress sandbox ScanOp is not fully '
'functional yet')
raise SkipTest("Not yet implemented")
# Work-in-progress sandbox ScanOp is not fully
# functional yet
def f_rnn(u_t, x_tm1, W_in, W):
return u_t * W_in + x_tm1 * W
......
......@@ -3274,9 +3274,8 @@ class T_Scan(unittest.TestCase):
def test_alloc_inputs2(self):
raise SkipTest("Not yet implemented")
# raise KnownFailureTest((
# "This tests depends on an optimization for scan "
# "that has not been implemented yet."))
# This tests depends on an optimization for scan
# that has not been implemented yet.
W1 = tensor.matrix()
W2 = tensor.matrix()
h0 = tensor.vector()
......@@ -3504,8 +3503,8 @@ class T_Scan(unittest.TestCase):
def test_infershape_seq_shorter_nsteps(self):
raise SkipTest("Not yet implemented")
# raise KnownFailureTest('This is a generic problem with infershape'
# ' that has to be discussed and figured out')
# This is a generic problem with infershape
# that has to be discussed and figured out
x = tensor.vector('x')
[o1, o2], _ = theano.scan(lambda x, y: (x + 1, y + x),
sequences=x,
......
......@@ -6123,9 +6123,7 @@ class test_arithmetic_cast(unittest.TestCase):
# in progress), so in the meantime we just
# mark this test as a known failure.
raise SkipTest("Not yet implemented")
# raise KnownFailureTest('Known issue with '
# 'numpy >= 1.6.x see #761')
# Known issue with numpy >= 1.6.x see #761'
# In any other situation: something wrong is
# going on!
assert False
......
......@@ -1559,8 +1559,8 @@ def test_log_add():
assert numpy.allclose(f([10000], [10000]), 20000)
except AssertionError:
raise SkipTest("Not yet implemented")
# raise KnownFailureTest(('log(add(exp)) is not stabilized when adding '
# 'more than 2 elements, see #623'))
# log(add(exp)) is not stabilized when adding
# more than 2 elements, see #623
# TODO: test that the optimization works in the presence of broadcasting.
......@@ -4027,9 +4027,8 @@ def test_constant_get_stabilized():
except (AssertionError, theano.compile.debugmode.InvalidValueError):
raise SkipTest("Not yet implemented")
# raise KnownFailureTest((
# "Theano optimizes constant before stabilization. "
# "This breaks stabilization optimization in some cases. See #504."))
# Theano optimizes constant before stabilization.
# This breaks stabilization optimization in some cases. See #504.
class T_local_switch_sink(unittest.TestCase):
......@@ -4287,8 +4286,7 @@ class T_local_erfc(unittest.TestCase):
# TODO: fix this problem
if theano.config.floatX == "float32" and theano.config.mode in ["DebugMode", "DEBUG_MODE"]:
raise SkipTest("Not yet implemented")
# raise KnownFailureTest(
# "the python code upcast somewhere internally some value of float32 to python float for part of its computation. That make that the c and python code don't generate the same value. You can ignore this error.")
# The python code upcast somewhere internally some value of float32 to python float for part of its computation. That make that the c and python code don't generate the same value. You can ignore this error.
assert all(numpy.isfinite(f(val)))
def test_local_grad_log_erfc_neg(self):
......
......@@ -21,6 +21,7 @@ import numpy
from theano.gof import Op, Apply
from theano.gradient import grad_undefined
from numpy.testing.noseclasses import KnownFailureTest
from theano.tests.unittest_tools import SkipTest
from theano.tensor.signal.downsample import DownsampleFactorMax
from theano.tensor.nnet import conv
......@@ -191,9 +192,9 @@ class RopLop_checker(unittest.TestCase):
if known_fail:
raise SkipTest("Not yet implemented")
# raise KnownFailureTest("Rop doesn't handle non-differentiable "
# "inputs correctly. Bug exposed by fixing Add.grad"
# " method.")
# Rop doesn't handle non-differentiable
# inputs correctly. Bug exposed by fixing Add.grad
# method.
class test_RopLop(RopLop_checker):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论