提交 08ca46e2 authored 作者: James Bergstra's avatar James Bergstra 提交者: Frederic

disable CBLAS optimizations for scipy blas tests

上级 fe3fedb0
...@@ -444,13 +444,13 @@ def make_c_gemv_destructive(node): ...@@ -444,13 +444,13 @@ def make_c_gemv_destructive(node):
# Optimizers # Optimizers
####### ####### ####### ####### ####### #######
blas_optdb.register('c_blas', blas_optdb.register('use_c_blas',
EquilibriumOptimizer([ EquilibriumOptimizer([
use_c_ger, use_c_ger,
use_c_gemv, use_c_gemv,
], ],
max_use_ratio=5), max_use_ratio=5),
20, 'fast_run') 20, 'fast_run', 'c_blas')
print 'BLAS_OPTDB' print 'BLAS_OPTDB'
print blas_optdb print blas_optdb
...@@ -461,4 +461,4 @@ optdb.register('c_blas_destructive', ...@@ -461,4 +461,4 @@ optdb.register('c_blas_destructive',
make_c_gemv_destructive, make_c_gemv_destructive,
], ],
max_use_ratio=5), max_use_ratio=5),
70.0, 'fast_run', 'inplace') 70.0, 'fast_run', 'inplace', 'c_blas')
...@@ -9,7 +9,9 @@ from test_blas import TestCase, TestOptimizationMixin, gemm_no_inplace ...@@ -9,7 +9,9 @@ from test_blas import TestCase, TestOptimizationMixin, gemm_no_inplace
class TestScipyGer(TestCase, TestOptimizationMixin): class TestScipyGer(TestCase, TestOptimizationMixin):
def setUp(self): def setUp(self):
self.mode = theano.compile.get_default_mode().including('fast_run') self.mode = theano.compile.get_default_mode()
self.mode = self.mode.including('fast_run')
self.mode = self.mode.excluding('c_blas') # c_blas trumps scipy Ops
dtype = self.dtype = 'float64' # optimization isn't dtype-dependent dtype = self.dtype = 'float64' # optimization isn't dtype-dependent
self.A = tensor.tensor(dtype=dtype, broadcastable=(False, False)) self.A = tensor.tensor(dtype=dtype, broadcastable=(False, False))
self.a = tensor.tensor(dtype=dtype, broadcastable=()) self.a = tensor.tensor(dtype=dtype, broadcastable=())
...@@ -21,6 +23,7 @@ class TestScipyGer(TestCase, TestOptimizationMixin): ...@@ -21,6 +23,7 @@ class TestScipyGer(TestCase, TestOptimizationMixin):
if not theano.tensor.blas_scipy.optimizations_enabled: if not theano.tensor.blas_scipy.optimizations_enabled:
self.SkipTest() self.SkipTest()
def function(self, inputs, outputs): def function(self, inputs, outputs):
return theano.function(inputs, outputs, self.mode) return theano.function(inputs, outputs, self.mode)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论