提交 afd11ed2 authored 作者: Frédéric Bastien's avatar Frédéric Bastien 提交者: GitHub

Merge pull request #5682 from nouiz/dnn_enabled

dnn.enabled=no_check
...@@ -30,7 +30,7 @@ For Ubuntu 16.04 with cuda 7.5 ...@@ -30,7 +30,7 @@ For Ubuntu 16.04 with cuda 7.5
.. code-block:: bash .. code-block:: bash
sudo apt-get install python-numpy python-scipy python-dev python-pip python-nose g++ libopenblas-dev git sudo apt-get install python-numpy python-scipy python-dev python-pip python-nose g++ libopenblas-dev git graphviz
sudo pip install Theano sudo pip install Theano
# cuda 7.5 don't support the default g++ version. Install an supported version and make it the default. # cuda 7.5 don't support the default g++ version. Install an supported version and make it the default.
......
...@@ -692,6 +692,8 @@ import theano and print the config variable, as in: ...@@ -692,6 +692,8 @@ import theano and print the config variable, as in:
If ``'False'``, do not use cuDNN or check if it is available. If ``'False'``, do not use cuDNN or check if it is available.
If ``'no_check'``, assume present and the version between header and library match (so less compilation at context init)",
.. attribute:: config.conv.assert_shape .. attribute:: config.conv.assert_shape
If True, AbstractConv* ops will verify that user-provided If True, AbstractConv* ops will verify that user-provided
......
...@@ -670,7 +670,7 @@ class Function(object): ...@@ -670,7 +670,7 @@ class Function(object):
if name: if name:
message = name message = name
else: else:
message = str(maker.profile.message) + " copy" message = str(profile.message) + " copy"
profile = theano.compile.profiling.ProfileStats(message=message) profile = theano.compile.profiling.ProfileStats(message=message)
# profile -> object # profile -> object
elif type(profile) == str: elif type(profile) == str:
......
...@@ -414,8 +414,9 @@ AddConfigVar('dnn.enabled', ...@@ -414,8 +414,9 @@ AddConfigVar('dnn.enabled',
"'auto', use cuDNN if available, but silently fall back" "'auto', use cuDNN if available, but silently fall back"
" to not using it if not present." " to not using it if not present."
" If True and cuDNN can not be used, raise an error." " If True and cuDNN can not be used, raise an error."
" If False, disable cudnn", " If False, disable cudnn even if present."
EnumStr("auto", "True", "False"), " If no_check, assume present and the version between header and library match (so less compilation at context init)",
EnumStr("auto", "True", "False", "no_check"),
in_c_key=False) in_c_key=False)
# This flag determines whether or not to raise error/warning message if # This flag determines whether or not to raise error/warning message if
......
...@@ -152,7 +152,10 @@ def dnn_present(): ...@@ -152,7 +152,10 @@ def dnn_present():
dnn_present.avail = False dnn_present.avail = False
return False return False
dnn_present.avail, dnn_present.msg = _dnn_check_compile() if config.dnn.enabled == "no_check":
dnn_present.avail, dnn_present.msg = True, "presence check disabled by dnn.enabled flag"
else:
dnn_present.avail, dnn_present.msg = _dnn_check_compile()
if dnn_present.avail: if dnn_present.avail:
dnn_present.avail, dnn_present.msg = _dnn_check_version() dnn_present.avail, dnn_present.msg = _dnn_check_version()
if not dnn_present.avail: if not dnn_present.avail:
......
...@@ -282,6 +282,8 @@ def dnn_available(): ...@@ -282,6 +282,8 @@ def dnn_available():
if dnn_available.avail is None and not cuda_available: if dnn_available.avail is None and not cuda_available:
dnn_available.msg = "CUDA not available" dnn_available.msg = "CUDA not available"
dnn_available.avail = False dnn_available.avail = False
elif config.dnn.enabled == "no_check":
raise RuntimeException("The old gpu back-end do not support the flag dnn.enabled=no_check")
elif dnn_available.avail is None: elif dnn_available.avail is None:
dev = active_device_number() dev = active_device_number()
if device_properties(dev)['major'] < 3: if device_properties(dev)['major'] < 3:
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论