提交 03e7a048 authored 作者: Frederic's avatar Frederic

Remove the useless theano.tensor.blas_scipy.optimization_enabled mechanism.

We should use optimization tags to enable, disable them. This also could cause problem on test in case there is a bug, teardown would not be executed per James comments.
上级 3a96775d
......@@ -264,14 +264,11 @@ class TestGpuGemv(TestCase, BaseGemv,
class TestGpuGer(TestGer):
def setUp(self):
self.mode = mode_with_gpu
self.mode = self.mode.excluding('c_blas')
dtype = self.dtype = 'float32' # optimization isn't dtype-dependent
self.A = tensor.tensor(dtype=dtype, broadcastable=(False, False))
self.a = tensor.tensor(dtype=dtype, broadcastable=())
self.x = tensor.tensor(dtype=dtype, broadcastable=(False,))
self.y = tensor.tensor(dtype=dtype, broadcastable=(False,))
self.origval = theano.tensor.blas_scipy.optimizations_enabled
theano.tensor.blas_scipy.optimizations_enabled = False
self.ger = gpu_ger_no_inplace
self.ger_destructive = gpu_ger_inplace
self.gemm = tcn.blas.gpu_gemm_no_inplace
......
......@@ -23,10 +23,9 @@ try:
numpy.dtype('complex64'):scipy.linalg.blas.fblas.cgeru,
numpy.dtype('complex128'):scipy.linalg.blas.fblas.zgeru,
}
optimizations_enabled = True
except ImportError, e:
have_fblas = False
optimizations_enabled = False
class ScipyGer(Ger):
......@@ -62,13 +61,11 @@ class ScipyGer(Ger):
@local_optimizer([ger, ger_destructive])
def use_scipy_ger(node):
if not optimizations_enabled: return
if node.op == ger:
return [ScipyGer(False)(*node.inputs)]
@local_optimizer([ScipyGer(False)])
def make_ger_destructive(node):
if not optimizations_enabled: return
if node.op == ScipyGer(False):
return [ScipyGer(True)(*node.inputs)]
......
......@@ -1284,21 +1284,16 @@ class TestGer(TestCase, unittest_tools.TestOptimizationMixin):
def setUp(self):
self.mode = theano.compile.get_default_mode().including('fast_run')
self.mode = self.mode.excluding('c_blas')
self.mode = self.mode.excluding('c_blas', 'scipy_blas')
dtype = self.dtype = 'float64' # optimization isn't dtype-dependent
self.A = T.tensor(dtype=dtype, broadcastable=(False, False))
self.a = T.tensor(dtype=dtype, broadcastable=())
self.x = T.tensor(dtype=dtype, broadcastable=(False,))
self.y = T.tensor(dtype=dtype, broadcastable=(False,))
self.origval = theano.tensor.blas_scipy.optimizations_enabled
theano.tensor.blas_scipy.optimizations_enabled = False
self.ger = ger
self.ger_destructive = ger_destructive
self.gemm = gemm_no_inplace
def tearDown(self):
theano.tensor.blas_scipy.optimizations_enabled = self.origval
def function(self, inputs, outputs, updates={}):
return theano.function(inputs, outputs, self.mode, updates=updates)
......
......@@ -21,7 +21,7 @@ class TestScipyGer(TestCase, TestOptimizationMixin):
self.Aval = numpy.ones((2,3), dtype=dtype)
self.xval = numpy.asarray([1,2], dtype=dtype)
self.yval = numpy.asarray([1.5,2.7,3.9], dtype=dtype)
if not theano.tensor.blas_scipy.optimizations_enabled:
if not theano.tensor.blas_scipy.have_fblas:
self.SkipTest()
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论