提交 68137d93 authored 作者: Olivier Delalleau's avatar Olivier Delalleau

Added option to config options to specify that they do not affect C code

This avoids adding extra keys into the cache whenever these options change.
上级 49e6f8c3
...@@ -35,7 +35,8 @@ AddConfigVar('device', ...@@ -35,7 +35,8 @@ AddConfigVar('device',
'gpu4', 'gpu5', 'gpu6', 'gpu7', 'gpu4', 'gpu5', 'gpu6', 'gpu7',
'gpu8', 'gpu9', 'gpu10', 'gpu11', 'gpu8', 'gpu9', 'gpu10', 'gpu11',
'gpu12', 'gpu13', 'gpu14', 'gpu15', 'gpu12', 'gpu13', 'gpu14', 'gpu15',
allow_override=False) allow_override=False),
in_c_key=False,
) )
AddConfigVar('init_gpu_device', AddConfigVar('init_gpu_device',
...@@ -48,13 +49,13 @@ AddConfigVar('init_gpu_device', ...@@ -48,13 +49,13 @@ AddConfigVar('init_gpu_device',
'gpu4', 'gpu5', 'gpu6', 'gpu7', 'gpu4', 'gpu5', 'gpu6', 'gpu7',
'gpu8', 'gpu9', 'gpu10', 'gpu11', 'gpu8', 'gpu9', 'gpu10', 'gpu11',
'gpu12', 'gpu13', 'gpu14', 'gpu15', 'gpu12', 'gpu13', 'gpu14', 'gpu15',
allow_override=False) allow_override=False),
) in_c_key=False)
AddConfigVar('force_device', AddConfigVar('force_device',
"Raise an error if we can't use the specified device", "Raise an error if we can't use the specified device",
BoolParam(False, allow_override=False) BoolParam(False, allow_override=False),
) in_c_key=False)
#Don't add FAST_RUN_NOGC to this list(as well as other ALL CAPS short cut) #Don't add FAST_RUN_NOGC to this list(as well as other ALL CAPS short cut)
#The way to get FAST_RUN_NOGC is with the flag 'linker=c|py_nogc' #The way to get FAST_RUN_NOGC is with the flag 'linker=c|py_nogc'
...@@ -62,7 +63,8 @@ AddConfigVar('force_device', ...@@ -62,7 +63,8 @@ AddConfigVar('force_device',
AddConfigVar('mode', AddConfigVar('mode',
"Default compilation mode", "Default compilation mode",
EnumStr('Mode', 'ProfileMode', 'DebugMode', 'FAST_RUN', EnumStr('Mode', 'ProfileMode', 'DebugMode', 'FAST_RUN',
'FAST_COMPILE', 'PROFILE_MODE', 'DEBUG_MODE')) 'FAST_COMPILE', 'PROFILE_MODE', 'DEBUG_MODE'),
in_c_key=False)
# Test whether or not gcc is present: disable C code if it is not # Test whether or not gcc is present: disable C code if it is not
try: try:
...@@ -70,12 +72,14 @@ try: ...@@ -70,12 +72,14 @@ try:
# Keep the default linker the same as the one for the mode FAST_RUN # Keep the default linker the same as the one for the mode FAST_RUN
AddConfigVar('linker', AddConfigVar('linker',
"Default linker used if the theano flags mode is Mode or ProfileMode", "Default linker used if the theano flags mode is Mode or ProfileMode",
EnumStr('c|py', 'py', 'c', 'c|py_nogc', 'c&py')) EnumStr('c|py', 'py', 'c', 'c|py_nogc', 'c&py'),
in_c_key=False)
except OSError: except OSError:
# gcc is not present, linker should default to python only # gcc is not present, linker should default to python only
AddConfigVar('linker', AddConfigVar('linker',
"Default linker used if the theano flags mode is Mode or ProfileMode", "Default linker used if the theano flags mode is Mode or ProfileMode",
EnumStr('py', 'c|py', 'c', 'c|py_nogc', 'c&py')) EnumStr('py', 'c|py', 'c', 'c|py_nogc', 'c&py'),
in_c_key=False)
warning('GCC not detected ! Theano will be unable to execute optimized '+ warning('GCC not detected ! Theano will be unable to execute optimized '+
'C-implementations (for both CPU and GPU) and will default to '+ 'C-implementations (for both CPU and GPU) and will default to '+
'Python implementations. Performance will be severely degraded.') 'Python implementations. Performance will be severely degraded.')
...@@ -83,32 +87,39 @@ except OSError: ...@@ -83,32 +87,39 @@ except OSError:
#Keep the default optimizer the same as the one for the mode FAST_RUN #Keep the default optimizer the same as the one for the mode FAST_RUN
AddConfigVar('optimizer', AddConfigVar('optimizer',
"Default optimizer. If not None, will use this linker with the Mode object(not ProfileMode or DebugMode)", "Default optimizer. If not None, will use this linker with the Mode object(not ProfileMode or DebugMode)",
EnumStr('fast_run', 'merge', 'fast_compile', 'None')) EnumStr('fast_run', 'merge', 'fast_compile', 'None'),
in_c_key=False)
AddConfigVar('on_opt_error', AddConfigVar('on_opt_error',
"What to do when an optimization crashes: warn and skip it, or raise the exception", "What to do when an optimization crashes: warn and skip it, or raise the exception",
EnumStr('warn', 'raise')) EnumStr('warn', 'raise'),
in_c_key=False)
AddConfigVar('home', AddConfigVar('home',
"User home directory", "User home directory",
StrParam(os.getenv("HOME", os.path.expanduser('~')))) StrParam(os.getenv("HOME", os.path.expanduser('~'))),
in_c_key=False)
#This expanduser works on windows (see discussion on theano-users, July 13 2010) #This expanduser works on windows (see discussion on theano-users, July 13 2010)
AddConfigVar('nocleanup', AddConfigVar('nocleanup',
"Suppress the deletion of code files that did not compile cleanly", "Suppress the deletion of code files that did not compile cleanly",
BoolParam(False)) BoolParam(False),
in_c_key=False)
AddConfigVar('tensor.cmp_sloppy', AddConfigVar('tensor.cmp_sloppy',
"Relax tensor._allclose (0) not at all, (1) a bit, (2) more", "Relax tensor._allclose (0) not at all, (1) a bit, (2) more",
IntParam(0, lambda i: i in (0,1,2))) IntParam(0, lambda i: i in (0,1,2)),
in_c_key=False)
AddConfigVar('tensor.local_elemwise_fusion', AddConfigVar('tensor.local_elemwise_fusion',
"Enable or not in fast_run mode(fast_run optimization) the elemwise fusion optimization", "Enable or not in fast_run mode(fast_run optimization) the elemwise fusion optimization",
BoolParam(True)) BoolParam(True),
in_c_key=False)
AddConfigVar('gpu.local_elemwise_fusion', AddConfigVar('gpu.local_elemwise_fusion',
"Enable or not in fast_run mode(fast_run optimization) the gpu elemwise fusion optimization", "Enable or not in fast_run mode(fast_run optimization) the gpu elemwise fusion optimization",
BoolParam(True)) BoolParam(True),
in_c_key=False)
#http://developer.amd.com/CPU/LIBRARIES/LIBM/Pages/default.aspx #http://developer.amd.com/CPU/LIBRARIES/LIBM/Pages/default.aspx
AddConfigVar('lib.amdlibm', AddConfigVar('lib.amdlibm',
...@@ -145,41 +156,47 @@ AddConfigVar('numpy.seterr_all', ...@@ -145,41 +156,47 @@ AddConfigVar('numpy.seterr_all',
"by the following flags: seterr_divide, seterr_over, " "by the following flags: seterr_divide, seterr_over, "
"seterr_under and seterr_invalid."), "seterr_under and seterr_invalid."),
EnumStr('ignore', 'warn', 'raise', 'call', 'print', 'log', 'None', EnumStr('ignore', 'warn', 'raise', 'call', 'print', 'log', 'None',
allow_override=False)) allow_override=False),
in_c_key=False)
AddConfigVar('numpy.seterr_divide', AddConfigVar('numpy.seterr_divide',
("Sets numpy's behavior for division by zero, see numpy.seterr. " ("Sets numpy's behavior for division by zero, see numpy.seterr. "
"'None' means using the default, defined by numpy.seterr_all."), "'None' means using the default, defined by numpy.seterr_all."),
EnumStr('None', 'ignore', 'warn', 'raise', 'call', 'print', 'log', EnumStr('None', 'ignore', 'warn', 'raise', 'call', 'print', 'log',
allow_override=False)) allow_override=False),
in_c_key=False)
AddConfigVar('numpy.seterr_over', AddConfigVar('numpy.seterr_over',
("Sets numpy's behavior for floating-point overflow, " ("Sets numpy's behavior for floating-point overflow, "
"see numpy.seterr. " "see numpy.seterr. "
"'None' means using the default, defined by numpy.seterr_all."), "'None' means using the default, defined by numpy.seterr_all."),
EnumStr('None', 'ignore', 'warn', 'raise', 'call', 'print', 'log', EnumStr('None', 'ignore', 'warn', 'raise', 'call', 'print', 'log',
allow_override=False)) allow_override=False),
in_c_key=False)
AddConfigVar('numpy.seterr_under', AddConfigVar('numpy.seterr_under',
("Sets numpy's behavior for floating-point underflow, " ("Sets numpy's behavior for floating-point underflow, "
"see numpy.seterr. " "see numpy.seterr. "
"'None' means using the default, defined by numpy.seterr_all."), "'None' means using the default, defined by numpy.seterr_all."),
EnumStr('None', 'ignore', 'warn', 'raise', 'call', 'print', 'log', EnumStr('None', 'ignore', 'warn', 'raise', 'call', 'print', 'log',
allow_override=False)) allow_override=False),
in_c_key=False)
AddConfigVar('numpy.seterr_invalid', AddConfigVar('numpy.seterr_invalid',
("Sets numpy's behavior for invalid floating-point operation, " ("Sets numpy's behavior for invalid floating-point operation, "
"see numpy.seterr. " "see numpy.seterr. "
"'None' means using the default, defined by numpy.seterr_all."), "'None' means using the default, defined by numpy.seterr_all."),
EnumStr('None', 'ignore', 'warn', 'raise', 'call', 'print', 'log', EnumStr('None', 'ignore', 'warn', 'raise', 'call', 'print', 'log',
allow_override=False)) allow_override=False),
in_c_key=False)
### ###
### To disable some warning about old bug that are fixed now. ### To disable some warning about old bug that are fixed now.
### ###
AddConfigVar('warn.ignore_bug_before', AddConfigVar('warn.ignore_bug_before',
"If 'None', we warn about all Theano bugs found by default. If 'all', we don't warn about Theano bugs found by default. If a version, we print only the warnings relative to Theano bugs found after that version. Warning for specific bugs can be configured with specific [warn] flags.", "If 'None', we warn about all Theano bugs found by default. If 'all', we don't warn about Theano bugs found by default. If a version, we print only the warnings relative to Theano bugs found after that version. Warning for specific bugs can be configured with specific [warn] flags.",
EnumStr('None', 'all', '0.3', allow_override=False)) EnumStr('None', 'all', '0.3', allow_override=False),
in_c_key=False)
default_0_3 = True default_0_3 = True
if config.warn.ignore_bug_before == 'None': if config.warn.ignore_bug_before == 'None':
...@@ -192,16 +209,20 @@ elif config.warn.ignore_bug_before >= '0.3': ...@@ -192,16 +209,20 @@ elif config.warn.ignore_bug_before >= '0.3':
AddConfigVar('warn.argmax_pushdown_bug', AddConfigVar('warn.argmax_pushdown_bug',
"Warn if in past version of Theano we generated a bug with the optimisation theano.tensor.nnet.nnet.local_argmax_pushdown optimization. Was fixed 27 may 2010", "Warn if in past version of Theano we generated a bug with the optimisation theano.tensor.nnet.nnet.local_argmax_pushdown optimization. Was fixed 27 may 2010",
BoolParam(default_0_3)) BoolParam(default_0_3),
in_c_key=False)
AddConfigVar('warn.gpusum_01_011_0111_bug', AddConfigVar('warn.gpusum_01_011_0111_bug',
"Warn if we are in a case where old version of Theano had a silent bug with GpuSum pattern 01,011 and 0111 when the first dimensions was bigger then 4096. Was fixed 31 may 2010", "Warn if we are in a case where old version of Theano had a silent bug with GpuSum pattern 01,011 and 0111 when the first dimensions was bigger then 4096. Was fixed 31 may 2010",
BoolParam(default_0_3)) BoolParam(default_0_3),
in_c_key=False)
AddConfigVar('warn.sum_sum_bug', AddConfigVar('warn.sum_sum_bug',
"Warn if we are in a case where Theano version between version 9923a40c7b7a and the 2 august 2010(fixed date), generated an error in that case. This happen when their is 2 consecutive sum in the graph, bad code was generated. Was fixed 2 August 2010", "Warn if we are in a case where Theano version between version 9923a40c7b7a and the 2 august 2010(fixed date), generated an error in that case. This happen when their is 2 consecutive sum in the graph, bad code was generated. Was fixed 2 August 2010",
BoolParam(default_0_3)) BoolParam(default_0_3),
in_c_key=False)
AddConfigVar('warn.sum_div_dimshuffle_bug', AddConfigVar('warn.sum_div_dimshuffle_bug',
"Warn if previous versions of Theano (between rev. 3bd9b789f5e8, 2010-06-16, and cfc6322e5ad4, 2010-08-03) would have given incorrect result. This bug was triggered by sum of division of dimshuffled tensors.", "Warn if previous versions of Theano (between rev. 3bd9b789f5e8, 2010-06-16, and cfc6322e5ad4, 2010-08-03) would have given incorrect result. This bug was triggered by sum of division of dimshuffled tensors.",
BoolParam(default_0_3)) BoolParam(default_0_3),
in_c_key=False)
...@@ -111,9 +111,13 @@ def get_config_md5(): ...@@ -111,9 +111,13 @@ def get_config_md5():
Return a string md5 of the current config options. It should be such that Return a string md5 of the current config options. It should be such that
we can safely assume that two different config setups will lead to two we can safely assume that two different config setups will lead to two
different strings. different strings.
We only take into account config options for which `in_c_key` is True.
""" """
all_opts = sorted(_config_var_list, key=lambda cv: cv.fullname) all_opts = sorted([c for c in _config_var_list if c.in_c_key],
return theano.gof.cc.hash_from_code('\n'.join(['%s = %s' % (cv.fullname, cv.val) for cv in all_opts])) key=lambda cv: cv.fullname)
return theano.gof.cc.hash_from_code('\n'.join(
['%s = %s' % (cv.fullname, cv.val) for cv in all_opts]))
class TheanoConfigParser(object): class TheanoConfigParser(object):
...@@ -138,17 +142,27 @@ config = TheanoConfigParser() ...@@ -138,17 +142,27 @@ config = TheanoConfigParser()
# - The subtrees provide the same interface as the root # - The subtrees provide the same interface as the root
# - ConfigParser subclasses control get/set of config properties to guard against craziness. # - ConfigParser subclasses control get/set of config properties to guard against craziness.
def AddConfigVar(name, doc, configparam, root=config): def AddConfigVar(name, doc, configparam, root=config, in_c_key=True):
"""Add a new variable to theano.config """Add a new variable to theano.config
:type name: string for form "[section0.[section1.[etc]]].option" :type name: string for form "[section0.[section1.[etc]]].option"
:param name: the full name for this configuration variable. :param name: the full name for this configuration variable.
:type doc: string :type doc: string
:param doc: What does this variable specify? :param doc: What does this variable specify?
:type configparam: ConfigParam instance :type configparam: ConfigParam instance
:param configparam: an object for getting and setting this configuration parameter :param configparam: an object for getting and setting this configuration parameter
:type root: object :type root: object
:param root: used for recusive calls -- don't provide an argument for this parameter. :param root: used for recusive calls -- do not provide an argument for this parameter.
:type in_c_key: boolean
:param in_c_key: If True, then whenever this config option changes, the
key associated to compiled C modules also changes, i.e. it may trigger a
compilation of these modules (this compilation will only be partial if it
turns out that the generated C code is unchanged). Set this option to False
only if you are confident this option should not affect C code compilation.
:returns: None :returns: None
""" """
...@@ -169,11 +183,13 @@ def AddConfigVar(name, doc, configparam, root=config): ...@@ -169,11 +183,13 @@ def AddConfigVar(name, doc, configparam, root=config):
newroot = getattr(root, sections[0]) newroot = getattr(root, sections[0])
if not getattr(newroot, '_i_am_a_config_class', False) or isinstance(newroot, type): if not getattr(newroot, '_i_am_a_config_class', False) or isinstance(newroot, type):
raise TypeError('Internal config nodes must be config class instances', newroot) raise TypeError('Internal config nodes must be config class instances', newroot)
return AddConfigVar('.'.join(sections[1:]), doc, configparam, root=newroot) return AddConfigVar('.'.join(sections[1:]), doc, configparam,
root=newroot, in_c_key=in_c_key)
else: else:
if hasattr(root, name): if hasattr(root, name):
raise AttributeError('This name is already taken', configparam.fullname) raise AttributeError('This name is already taken', configparam.fullname)
configparam.doc = doc configparam.doc = doc
configparam.in_c_key = in_c_key
configparam.__get__() # trigger a read of the value from config files and env vars configparam.__get__() # trigger a read of the value from config files and env vars
setattr(root.__class__, sections[0], configparam) setattr(root.__class__, sections[0], configparam)
_config_var_list.append(configparam) _config_var_list.append(configparam)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论