提交 7d3e5d67 authored 作者: Frederic Bastien's avatar Frederic Bastien

fix cuda test bug by making the difference between the presence of cuda and if cuda is available.

-side effect, the cuda test are not enabled by default, will be on the build bot. -also, now the use fct don't have a default value.
上级 daf4ff52
...@@ -18,16 +18,19 @@ def debug(*msg): ...@@ -18,16 +18,19 @@ def debug(*msg):
# Compile cuda_ndarray.cu # Compile cuda_ndarray.cu
# This need that nvcc (part of cuda) is installed. If it is not, a warning is # This need that nvcc (part of cuda) is installed. If it is not, a warning is
# printed and this module will not be working properly (we set `enable_cuda` # printed and this module will not be working properly (we set `cuda_available`
# to False). # to False).
# This variable is True by default, and set to False if something goes wrong # This variable is True by default, and set to False if something goes wrong
# when trying to initialize cuda. # when trying to initialize cuda.
enable_cuda = True cuda_available = True
# Global variable to avoid displaying the same warning multiple times. # Global variable to avoid displaying the same warning multiple times.
cuda_warning_is_displayed = False cuda_warning_is_displayed = False
#This variable is set to True when we enable the cuda.(i.e. when use() is called)
cuda_enabled = False
# Code factorized within a function so that it may be called from multiple # Code factorized within a function so that it may be called from multiple
# places (which is not currently the case, but may be useful in the future). # places (which is not currently the case, but may be useful in the future).
def set_cuda_disabled(): def set_cuda_disabled():
...@@ -38,8 +41,8 @@ def set_cuda_disabled(): ...@@ -38,8 +41,8 @@ def set_cuda_disabled():
Note that there is no point calling this function from outside of Note that there is no point calling this function from outside of
`cuda.__init__`, since it has no effect once the module is loaded. `cuda.__init__`, since it has no effect once the module is loaded.
""" """
global enable_cuda, cuda_warning_is_displayed global cuda_available, cuda_warning_is_displayed
enable_cuda = False cuda_available = False
if not cuda_warning_is_displayed: if not cuda_warning_is_displayed:
cuda_warning_is_displayed = True cuda_warning_is_displayed = True
warning('Cuda is disabled, cuda-based code will thus not be ' warning('Cuda is disabled, cuda-based code will thus not be '
...@@ -70,7 +73,7 @@ try: ...@@ -70,7 +73,7 @@ try:
if not nvcc_compiler.is_nvcc_available(): if not nvcc_compiler.is_nvcc_available():
set_cuda_disabled() set_cuda_disabled()
if enable_cuda: if cuda_available:
code = open(os.path.join(cuda_path, "cuda_ndarray.cu")).read() code = open(os.path.join(cuda_path, "cuda_ndarray.cu")).read()
if not os.path.exists(cuda_ndarray_loc): if not os.path.exists(cuda_ndarray_loc):
...@@ -84,7 +87,7 @@ except Exception, e: ...@@ -84,7 +87,7 @@ except Exception, e:
error( "Failed to compile cuda_ndarray.cu: %s" % str(e)) error( "Failed to compile cuda_ndarray.cu: %s" % str(e))
set_cuda_disabled() set_cuda_disabled()
if enable_cuda: if cuda_available:
#check if their is an old cuda_ndarray that was loading instead of the one we compiled! #check if their is an old cuda_ndarray that was loading instead of the one we compiled!
import cuda_ndarray.cuda_ndarray import cuda_ndarray.cuda_ndarray
if os.path.join(config.compiledir,'cuda_ndarray','cuda_ndarray.so')!=cuda_ndarray.cuda_ndarray.__file__: if os.path.join(config.compiledir,'cuda_ndarray','cuda_ndarray.so')!=cuda_ndarray.cuda_ndarray.__file__:
...@@ -104,7 +107,8 @@ if enable_cuda: ...@@ -104,7 +107,8 @@ if enable_cuda:
import cuda_ndarray import cuda_ndarray
def use(device=config.device): def use(device):
global cuda_enabled, enabled_cuda
if device.startswith('gpu'): if device.startswith('gpu'):
device = int(device[3:]) device = int(device[3:])
elif device == 'cpu': elif device == 'cpu':
...@@ -122,8 +126,10 @@ def use(device=config.device): ...@@ -122,8 +126,10 @@ def use(device=config.device):
gpu_init(device) gpu_init(device)
handle_shared_float32(True) handle_shared_float32(True)
use.device_number = device use.device_number = device
cuda_enabled = True
except RuntimeError, e: except RuntimeError, e:
_logger.warning("ERROR: Not using GPU. Initialisation of device %i failed. %s" %(device, e)) _logger.warning("ERROR: Not using GPU. Initialisation of device %i failed. %s" %(device, e))
enabled_cuda = False
elif use.device_number != device: elif use.device_number != device:
logging.getLogger('theano.sandbox.cuda').warning("WARNING: ignoring call to use(%s), GPU number %i is already in use." %(str(device), use.device_number)) logging.getLogger('theano.sandbox.cuda').warning("WARNING: ignoring call to use(%s), GPU number %i is already in use." %(str(device), use.device_number))
optdb.add_tags('gpu', optdb.add_tags('gpu',
...@@ -144,5 +150,6 @@ def handle_shared_float32(tf): ...@@ -144,5 +150,6 @@ def handle_shared_float32(tf):
else: else:
raise NotImplementedError('removing our handler') raise NotImplementedError('removing our handler')
if enable_cuda and config.device.startswith('gpu'): if cuda_available and config.device.startswith('gpu'):
use() use(config.device)
...@@ -11,7 +11,7 @@ import theano.tensor as T ...@@ -11,7 +11,7 @@ import theano.tensor as T
# Skip test if cuda_ndarray is not available. # Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
import theano.sandbox.cuda as cuda_ndarray import theano.sandbox.cuda as cuda_ndarray
if cuda_ndarray.enable_cuda == False: if cuda_ndarray.cuda_enabled == False:
raise SkipTest('Optional package cuda disabled') raise SkipTest('Optional package cuda disabled')
import theano.sandbox.cuda as tcn import theano.sandbox.cuda as tcn
......
...@@ -270,7 +270,7 @@ def test_bench_elemwise(n_iter=1000, **kwargs): ...@@ -270,7 +270,7 @@ def test_bench_elemwise(n_iter=1000, **kwargs):
# Skip test if cuda_ndarray is not available. # Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
import theano.sandbox.cuda as cuda_ndarray import theano.sandbox.cuda as cuda_ndarray
if cuda_ndarray.enable_cuda == False: if cuda_ndarray.cuda_enabled == False:
raise SkipTest('Optional package cuda disabled') raise SkipTest('Optional package cuda disabled')
import theano.sandbox.cuda import theano.sandbox.cuda
theano.sandbox.cuda.use() theano.sandbox.cuda.use()
......
...@@ -8,7 +8,7 @@ import numpy ...@@ -8,7 +8,7 @@ import numpy
# Skip test if cuda_ndarray is not available. # Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
import theano.sandbox.cuda as cuda_ndarray import theano.sandbox.cuda as cuda_ndarray
if cuda_ndarray.enable_cuda == False: if cuda_ndarray.cuda_enabled == False:
raise SkipTest('Optional package cuda disabled') raise SkipTest('Optional package cuda disabled')
import theano.sandbox.cuda as tcn import theano.sandbox.cuda as tcn
......
...@@ -5,7 +5,7 @@ import theano ...@@ -5,7 +5,7 @@ import theano
# Skip test if cuda_ndarray is not available. # Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
import theano.sandbox.cuda as cuda_ndarray import theano.sandbox.cuda as cuda_ndarray
if cuda_ndarray.enable_cuda == False: if cuda_ndarray.cuda_enabled == False:
raise SkipTest('Optional package cuda disabled') raise SkipTest('Optional package cuda disabled')
cuda_tensor4 = cuda_ndarray.CudaNdarrayType([False]*4) cuda_tensor4 = cuda_ndarray.CudaNdarrayType([False]*4)
......
...@@ -3,7 +3,7 @@ import theano ...@@ -3,7 +3,7 @@ import theano
import theano.sandbox.cuda as cuda_ndarray import theano.sandbox.cuda as cuda_ndarray
# Skip test if cuda_ndarray is not available. # Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
if cuda_ndarray.enable_cuda == False: if cuda_ndarray.cuda_enabled == False:
raise SkipTest('Optional package cuda disabled') raise SkipTest('Optional package cuda disabled')
import numpy import numpy
......
...@@ -14,7 +14,7 @@ import numpy ...@@ -14,7 +14,7 @@ import numpy
# Skip test if cuda_ndarray is not available. # Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
import theano.sandbox.cuda as cuda_ndarray import theano.sandbox.cuda as cuda_ndarray
if cuda_ndarray.enable_cuda == False: if cuda_ndarray.cuda_enabled == False:
raise SkipTest('Optional package cuda disabled') raise SkipTest('Optional package cuda disabled')
import theano.sandbox.cuda as tcn import theano.sandbox.cuda as tcn
......
...@@ -8,7 +8,7 @@ import numpy ...@@ -8,7 +8,7 @@ import numpy
# Skip test if cuda_ndarray is not available. # Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
import theano.sandbox.cuda as cuda_ndarray import theano.sandbox.cuda as cuda_ndarray
if cuda_ndarray.enable_cuda == False: if cuda_ndarray.cuda_available == False:
raise SkipTest('Optional package cuda disabled') raise SkipTest('Optional package cuda disabled')
import theano.compile.mode import theano.compile.mode
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论