提交 8cd8e5c2 authored 作者: Frederic Bastien's avatar Frederic Bastien

Add more Theano flags as not being in the c key.

上级 cff49dcb
......@@ -42,6 +42,7 @@ AddConfigVar('floatX',
"Note: float16 support is experimental, use at your own risk.",
EnumStr('float64', 'float32', 'float16',
convert=floatX_convert,),
in_c_key=False
)
AddConfigVar('warn_float64',
......@@ -343,11 +344,15 @@ def default_dnn_path(suffix):
AddConfigVar('dnn.include_path',
"Location of the cudnn header (defaults to the cuda root)",
StrParam(default_dnn_path('include')))
StrParam(default_dnn_path('include')),
# Added elsewhere in the c key only when needed.
in_c_key=False)
AddConfigVar('dnn.library_path',
"Location of the cudnn header (defaults to the cuda root)",
StrParam(default_dnn_path('lib' if sys.platform == 'darwin' else 'lib64')))
StrParam(default_dnn_path('lib' if sys.platform == 'darwin' else 'lib64')),
# Added elsewhere in the c key only when needed.
in_c_key=False)
AddConfigVar('dnn.enabled',
"'auto', use cuDNN if available, but silently fall back"
......@@ -543,7 +548,9 @@ AddConfigVar(
AddConfigVar(
'lib.amdlibm',
"Use amd's amdlibm numerical library",
BoolParam(False))
BoolParam(False),
# Added elsewhere in the c key only when needed.
in_c_key=False)
AddConfigVar(
'gpuelemwise.sync',
......@@ -863,7 +870,8 @@ AddConfigVar(
"any optimized graph and its optimization. Actually slow downs a lot "
"the first optimization, and could possibly still contains some bugs. "
"Use at your own risks.",
BoolParam(False))
BoolParam(False),
in_c_key=False)
def good_seed_param(seed):
......@@ -1072,7 +1080,9 @@ AddConfigVar('optdb.max_use_ratio',
AddConfigVar('gcc.cxxflags',
"Extra compiler flags for gcc",
StrParam(""))
StrParam(""),
# Added elsewhere in the c key only when needed.
in_c_key=False)
AddConfigVar(
'cmodule.mac_framework_link',
......@@ -1096,7 +1106,8 @@ AddConfigVar('cmodule.remove_gxx_opt',
AddConfigVar('cmodule.compilation_warning',
"If True, will print compilation warnings.",
BoolParam(False))
BoolParam(False),
in_c_key=False)
AddConfigVar('cmodule.preload_cache',
......@@ -1315,7 +1326,9 @@ def try_blas_flag(flags):
AddConfigVar('blas.ldflags',
"lib[s] to include for [Fortran] level-3 blas implementation",
StrParam(default_blas_ldflags))
StrParam(default_blas_ldflags),
# Added elsewhere in the c key only when needed.
in_c_key=False)
AddConfigVar(
'metaopt.verbose',
......@@ -1399,12 +1412,14 @@ AddConfigVar(
AddConfigVar('scan.allow_gc',
"Allow/disallow gc inside of Scan (default: False)",
BoolParam(False))
BoolParam(False),
in_c_key=False)
AddConfigVar('scan.allow_output_prealloc',
"Allow/disallow memory preallocation for outputs inside of scan "
"(default: True)",
BoolParam(True))
BoolParam(True),
in_c_key=False)
AddConfigVar('pycuda.init',
"""If True, always initialize PyCUDA when Theano want to
......@@ -1419,7 +1434,9 @@ AddConfigVar('pycuda.init',
AddConfigVar('cublas.lib',
"""Name of the cuda blas library for the linker.""",
StrParam('cublas'))
StrParam('cublas'),
# Added elsewhere in the c key only when needed.
in_c_key=False)
AddConfigVar('lib.cnmem',
"""Do we enable CNMeM or not (a faster CUDA memory allocator).
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论