提交 db2e6901 authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Yet more pep8 fixes.

上级 9ac04ab4
...@@ -95,7 +95,6 @@ import scalar ...@@ -95,7 +95,6 @@ import scalar
#import sparse #import sparse
import gradient import gradient
from gradient import Rop, Lop, grad from gradient import Rop, Lop, grad
import gof
if config.device.startswith('gpu') or config.init_gpu_device.startswith('gpu'): if config.device.startswith('gpu') or config.init_gpu_device.startswith('gpu'):
import theano.sandbox.cuda import theano.sandbox.cuda
......
import os import os
import logging import logging
import subprocess import subprocess
import sys
from theano.configparser import ( from theano.configparser import (
AddConfigVar, BoolParam, ConfigParam, EnumStr, IntParam, FloatParam, AddConfigVar, BoolParam, ConfigParam, EnumStr, IntParam,
StrParam, TheanoConfigParser) TheanoConfigParser)
_logger = logging.getLogger('theano.configdefaults') _logger = logging.getLogger('theano.configdefaults')
...@@ -41,10 +40,13 @@ AddConfigVar('int_division', ...@@ -41,10 +40,13 @@ AddConfigVar('int_division',
EnumStr('int', 'raise', 'floatX'), EnumStr('int', 'raise', 'floatX'),
in_c_key=False) in_c_key=False)
#gpu mean let the driver select the gpu. Needed in case of gpu in exclusive mode. # gpu means let the driver select the gpu. Needed in case of gpu in
#gpuX mean use the gpu number X. # exclusive mode.
# gpuX mean use the gpu number X.
AddConfigVar('device', AddConfigVar('device',
"Default device for computations. If gpu*, change the default to try to move computation to it and to put shared variable of float32 on it.", ("Default device for computations. If gpu*, change the default to try "
"to move computation to it and to put shared variable of float32 "
"on it."),
EnumStr('cpu', 'gpu', EnumStr('cpu', 'gpu',
'gpu0', 'gpu1', 'gpu2', 'gpu3', 'gpu0', 'gpu1', 'gpu2', 'gpu3',
'gpu4', 'gpu5', 'gpu6', 'gpu7', 'gpu4', 'gpu5', 'gpu6', 'gpu7',
...@@ -93,14 +95,16 @@ try: ...@@ -93,14 +95,16 @@ try:
stdin=dummy_stdin.fileno()) stdin=dummy_stdin.fileno())
# Keep the default linker the same as the one for the mode FAST_RUN # Keep the default linker the same as the one for the mode FAST_RUN
AddConfigVar('linker', AddConfigVar('linker',
"Default linker used if the theano flags mode is Mode or ProfileMode", ("Default linker used if the theano flags mode is Mode "
"or ProfileMode"),
EnumStr('c|py', 'py', 'c', 'c|py_nogc', 'c&py', EnumStr('c|py', 'py', 'c', 'c|py_nogc', 'c&py',
'vm', 'cvm', 'vm_nogc', 'cvm_nogc'), 'vm', 'cvm', 'vm_nogc', 'cvm_nogc'),
in_c_key=False) in_c_key=False)
except OSError: except OSError:
# g++ is not present, linker should default to python only # g++ is not present, linker should default to python only
AddConfigVar('linker', AddConfigVar('linker',
"Default linker used if the theano flags mode is Mode or ProfileMode", ("Default linker used if the theano flags mode is Mode "
"or ProfileMode"),
EnumStr('py', 'c|py', 'c', 'c|py_nogc', 'c&py', EnumStr('py', 'c|py', 'c', 'c|py_nogc', 'c&py',
'vm', 'cvm', 'vm_nogc', 'cvm_nogc'), 'vm', 'cvm', 'vm_nogc', 'cvm_nogc'),
in_c_key=False) in_c_key=False)
...@@ -113,12 +117,14 @@ del dummy_stdin ...@@ -113,12 +117,14 @@ del dummy_stdin
#Keep the default optimizer the same as the one for the mode FAST_RUN #Keep the default optimizer the same as the one for the mode FAST_RUN
AddConfigVar('optimizer', AddConfigVar('optimizer',
"Default optimizer. If not None, will use this linker with the Mode object(not ProfileMode or DebugMode)", ("Default optimizer. If not None, will use this linker with the Mode "
"object (not ProfileMode or DebugMode)"),
EnumStr('fast_run', 'merge', 'fast_compile', 'None'), EnumStr('fast_run', 'merge', 'fast_compile', 'None'),
in_c_key=False) in_c_key=False)
AddConfigVar('on_opt_error', AddConfigVar('on_opt_error',
"What to do when an optimization crashes: warn and skip it, or raise the exception", ("What to do when an optimization crashes: warn and skip it, or raise "
"the exception"),
EnumStr('warn', 'raise'), EnumStr('warn', 'raise'),
in_c_key=False) in_c_key=False)
...@@ -160,16 +166,18 @@ AddConfigVar('nocleanup', ...@@ -160,16 +166,18 @@ AddConfigVar('nocleanup',
# changed at runtime. # changed at runtime.
AddConfigVar('tensor.cmp_sloppy', AddConfigVar('tensor.cmp_sloppy',
"Relax tensor._allclose (0) not at all, (1) a bit, (2) more", "Relax tensor._allclose (0) not at all, (1) a bit, (2) more",
IntParam(0, lambda i: i in (0,1,2), allow_override=False), IntParam(0, lambda i: i in (0, 1, 2), allow_override=False),
in_c_key=False) in_c_key=False)
AddConfigVar('tensor.local_elemwise_fusion', AddConfigVar('tensor.local_elemwise_fusion',
"Enable or not in fast_run mode(fast_run optimization) the elemwise fusion optimization", ("Enable or not in fast_run mode(fast_run optimization) the elemwise "
"fusion optimization"),
BoolParam(True), BoolParam(True),
in_c_key=False) in_c_key=False)
AddConfigVar('gpu.local_elemwise_fusion', AddConfigVar('gpu.local_elemwise_fusion',
"Enable or not in fast_run mode(fast_run optimization) the gpu elemwise fusion optimization", ("Enable or not in fast_run mode(fast_run optimization) the gpu "
"elemwise fusion optimization"),
BoolParam(True), BoolParam(True),
in_c_key=False) in_c_key=False)
...@@ -179,7 +187,8 @@ AddConfigVar('lib.amdlibm', ...@@ -179,7 +187,8 @@ AddConfigVar('lib.amdlibm',
BoolParam(False)) BoolParam(False))
AddConfigVar('op.set_flops', AddConfigVar('op.set_flops',
"currently used only in ConvOp. The profile mode will print the flops/s for the op.", ("currently used only in ConvOp. The profile mode will print the "
"flops/s for the op."),
BoolParam(False), BoolParam(False),
in_c_key=False) in_c_key=False)
...@@ -244,8 +253,14 @@ AddConfigVar('numpy.seterr_invalid', ...@@ -244,8 +253,14 @@ AddConfigVar('numpy.seterr_invalid',
### To disable some warning about old bug that are fixed now. ### To disable some warning about old bug that are fixed now.
### ###
AddConfigVar('warn.ignore_bug_before', AddConfigVar('warn.ignore_bug_before',
"If 'None', we warn about all Theano bugs found by default. If 'all', we don't warn about Theano bugs found by default. If a version, we print only the warnings relative to Theano bugs found after that version. Warning for specific bugs can be configured with specific [warn] flags.", ("If 'None', we warn about all Theano bugs found by default. "
EnumStr('None', 'all', '0.3','0.4', '0.4.1', '0.5', allow_override=False), "If 'all', we don't warn about Theano bugs found by default. "
"If a version, we print only the warnings relative to Theano "
"bugs found after that version. "
"Warning for specific bugs can be configured with specific "
"[warn] flags."),
EnumStr('None', 'all', '0.3', '0.4', '0.4.1', '0.5',
allow_override=False),
in_c_key=False) in_c_key=False)
...@@ -263,34 +278,48 @@ def warn_default(version): ...@@ -263,34 +278,48 @@ def warn_default(version):
AddConfigVar('warn.argmax_pushdown_bug', AddConfigVar('warn.argmax_pushdown_bug',
"Warn if in past version of Theano we generated a bug with the theano.tensor.nnet.nnet.local_argmax_pushdown optimization. Was fixed 27 may 2010", ("Warn if in past version of Theano we generated a bug with the "
"theano.tensor.nnet.nnet.local_argmax_pushdown optimization. "
"Was fixed 27 may 2010"),
BoolParam(warn_default('0.3')), BoolParam(warn_default('0.3')),
in_c_key=False) in_c_key=False)
AddConfigVar('warn.gpusum_01_011_0111_bug', AddConfigVar('warn.gpusum_01_011_0111_bug',
"Warn if we are in a case where old version of Theano had a silent bug with GpuSum pattern 01,011 and 0111 when the first dimensions was bigger then 4096. Was fixed 31 may 2010", ("Warn if we are in a case where old version of Theano had a "
"silent bug with GpuSum pattern 01,011 and 0111 when the first "
"dimensions was bigger then 4096. Was fixed 31 may 2010"),
BoolParam(warn_default('0.3')), BoolParam(warn_default('0.3')),
in_c_key=False) in_c_key=False)
AddConfigVar('warn.sum_sum_bug', AddConfigVar('warn.sum_sum_bug',
"Warn if we are in a case where Theano version between version 9923a40c7b7a and the 2 august 2010(fixed date), generated an error in that case. This happen when their is 2 consecutive sum in the graph, bad code was generated. Was fixed 2 August 2010", ("Warn if we are in a case where Theano version between version "
"9923a40c7b7a and the 2 august 2010 (fixed date), generated an "
"error in that case. This happens when there are 2 consecutive "
"sums in the graph, bad code was generated. "
"Was fixed 2 August 2010"),
BoolParam(warn_default('0.3')), BoolParam(warn_default('0.3')),
in_c_key=False) in_c_key=False)
AddConfigVar('warn.sum_div_dimshuffle_bug', AddConfigVar('warn.sum_div_dimshuffle_bug',
"Warn if previous versions of Theano (between rev. 3bd9b789f5e8, 2010-06-16, and cfc6322e5ad4, 2010-08-03) would have given incorrect result. This bug was triggered by sum of division of dimshuffled tensors.", ("Warn if previous versions of Theano (between rev. "
"3bd9b789f5e8, 2010-06-16, and cfc6322e5ad4, 2010-08-03) "
"would have given incorrect result. This bug was triggered by "
"sum of division of dimshuffled tensors."),
BoolParam(warn_default('0.3')), BoolParam(warn_default('0.3')),
in_c_key=False) in_c_key=False)
AddConfigVar('warn.subtensor_merge_bug', AddConfigVar('warn.subtensor_merge_bug',
"Warn if previous versions of Theano (before 0.5rc2) could have given " "Warn if previous versions of Theano (before 0.5rc2) could have given "
"incorrect results when indexing into a subtensor with negative stride " "incorrect results when indexing into a subtensor with negative "
"(for instance, for instance, x[a:b:-1][c]).", "stride (for instance, for instance, x[a:b:-1][c]).",
BoolParam(warn_default('0.5')), BoolParam(warn_default('0.5')),
in_c_key=False) in_c_key=False)
AddConfigVar('compute_test_value', AddConfigVar('compute_test_value',
"If 'True', Theano will run each op at graph build time, using Constants, SharedVariables and the tag 'test_value' as inputs to the function. This helps the user track down problems in the graph before it gets optimized.", ("If 'True', Theano will run each op at graph build time, using "
"Constants, SharedVariables and the tag 'test_value' as inputs "
"to the function. This helps the user track down problems in the "
"graph before it gets optimized."),
EnumStr('off', 'ignore', 'warn', 'raise'), EnumStr('off', 'ignore', 'warn', 'raise'),
in_c_key=False) in_c_key=False)
...@@ -310,5 +339,5 @@ AddConfigVar('exception_verbosity', ...@@ -310,5 +339,5 @@ AddConfigVar('exception_verbosity',
A. Elemwise{add_no_inplace} A. Elemwise{add_no_inplace}
B. log_likelihood_v_given_h B. log_likelihood_v_given_h
C. log_likelihood_h""", C. log_likelihood_h""",
EnumStr('low','high'), EnumStr('low', 'high'),
in_c_key=False) in_c_key=False)
差异被折叠。
...@@ -12,6 +12,7 @@ from theano.compile.sharedvalue import SharedVariable ...@@ -12,6 +12,7 @@ from theano.compile.sharedvalue import SharedVariable
import logging import logging
logger = logging.getLogger('theano.updates') logger = logging.getLogger('theano.updates')
class Updates(dict): class Updates(dict):
""" """
Dict-like mapping from SharedVariable keys to their new values. Dict-like mapping from SharedVariable keys to their new values.
...@@ -30,7 +31,9 @@ class Updates(dict): ...@@ -30,7 +31,9 @@ class Updates(dict):
return super(Updates, self).__setitem__(key, value) return super(Updates, self).__setitem__(key, value)
else: else:
raise TypeError('Updates keys must inherit from SharedVariable', key) raise TypeError('Updates keys must inherit from SharedVariable',
key)
def update(self, other): def update(self, other):
for key, val in dict(other).iteritems(): for key, val in dict(other).iteritems():
if key in self: if key in self:
...@@ -50,4 +53,3 @@ class Updates(dict): ...@@ -50,4 +53,3 @@ class Updates(dict):
rval.update(other) rval.update(other)
rval.update(self) rval.update(self)
return rval return rval
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论