提交 d6f4ec71 authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Remove other config variables from the config md5 used by cmodule

上级 fb21c2b5
......@@ -10,13 +10,16 @@ _logger = logging.getLogger('theano.compile.mode')
AddConfigVar('optimizer_excluding',
"When using the default mode, we will remove optimizer with that tag. Separate many tags with ':'.",
StrParam("", allow_override=False))
StrParam("", allow_override=False),
in_c_key=False)
AddConfigVar('optimizer_including',
"When using the default mode, we will add optimizer with that tag. Separate many tags with ':'.",
StrParam("", allow_override=False))
StrParam("", allow_override=False),
in_c_key=False)
AddConfigVar('optimizer_requiring',
"When using the default mode, we will require optimizer with that tag. Separate many tags with ':'.",
StrParam("", allow_override=False))
StrParam("", allow_override=False),
in_c_key=False)
def check_equal(x, y):
"""
......
......@@ -12,20 +12,24 @@ import_time = time.time()
AddConfigVar('ProfileMode.n_apply_to_print',
"Number of apply instances to print by default",
IntParam(15, lambda i: i > 0))
IntParam(15, lambda i: i > 0),
in_c_key=False)
AddConfigVar('ProfileMode.n_ops_to_print',
"Number of ops to print by default",
IntParam(20, lambda i: i > 0))
IntParam(20, lambda i: i > 0),
in_c_key=False)
AddConfigVar('ProfileMode.min_memory_size',
"""For the memory profile, do not print apply nodes if the size
of their outputs (in bytes) is lower then this threshold""",
IntParam(1024, lambda i: i >= 0))
IntParam(1024, lambda i: i >= 0),
in_c_key=False)
AddConfigVar('ProfileMode.profile_memory',
"""Enable profiling of memory used by Theano functions""",
BoolParam(False))
BoolParam(False),
in_c_key=False)
class Profile_Maker(FunctionMaker):
def create(self, input_storage=None, trustme=False):
......
......@@ -26,7 +26,8 @@ AddConfigVar('shared.value_borrows',
" variables, but use the .get_value() and .set_value() methods."
" False: shared variables 'value' property is guaranteed to not"
" alias theano-managed memory. True: no guarantee, but faster."),
BoolParam(True))
BoolParam(True),
in_c_key=False)
class SharedVariable(Variable):
"""
......
......@@ -32,7 +32,7 @@ AddConfigVar('int_division',
"What to do when one computes x / y, where both x and y are of "
"integer types",
EnumStr('int', 'raise', 'floatX'),
)
in_c_key=False)
#gpu mean let the driver select the gpu. Needed in case of gpu in exclusive mode.
#gpuX mean use the gpu number X.
......@@ -136,7 +136,8 @@ AddConfigVar('lib.amdlibm',
AddConfigVar('op.set_flops',
"currently used only in ConvOp. The profile mode will print the flops/s for the op.",
BoolParam(False))
BoolParam(False),
in_c_key=False)
AddConfigVar('nvcc.fastmath',
"",
......@@ -148,7 +149,8 @@ AddConfigVar('gpuelemwise.sync',
AddConfigVar('traceback.limit',
"The number of stack to trace. -1 mean all.",
IntParam(5))
IntParam(5),
in_c_key=False)
AddConfigVar('experimental.mrg',
"Another random number generator that work on the gpu",
......
......@@ -26,7 +26,8 @@ _logger = logging.getLogger('theano.gof.opt')
AddConfigVar('time_seq_optimizer',
"Should SeqOptimizer print the time taked by each of its optimizer",
BoolParam(False))
BoolParam(False),
in_c_key=False)
from theano.gof import deque
import destroyhandler as dh
......
......@@ -10,8 +10,8 @@ from theano.configparser import TheanoConfigParser, AddConfigVar, FloatParam
from theano import config
AddConfigVar('optdb.position_cutoff',
'Where to stop eariler during optimization. It represent the position of the optimizer where to stop.',
FloatParam(float('inf'))
)
FloatParam(float('inf')),
in_c_key=False)
class DB(object):
def __hash__(self):
......
......@@ -104,7 +104,8 @@ def broadcast_like(value, template, env, dtype=None):
theano.configparser.AddConfigVar('tensor.insert_inplace_optimizer_validate_nb',
"-1: auto, if graph have less then 500 nodes 1, else 10",
theano.configparser.IntParam(-1))
theano.configparser.IntParam(-1),
in_c_key=False)
def insert_inplace_optimizer_op(OP):
"""
......@@ -1103,12 +1104,12 @@ def local_alloc_elemwise(node):
theano.configparser.AddConfigVar('experimental.local_alloc_elemwise',
"If True enable the experimental optimization local_alloc_elemwise",
theano.configparser.BoolParam(False),
)
in_c_key=False)
#This version if faster but not as save.
theano.configparser.AddConfigVar('experimental.local_alloc_elemwise_assert',
"If False enable the experimental optimization local_alloc_elemwise but WITHOUT assert into the graph!",
theano.configparser.BoolParam(True),
)
in_c_key=False)
if theano.config.experimental.local_alloc_elemwise:
#enabled by default when the lifter of assert is done.
register_specialize(local_alloc_elemwise)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论