提交 ebbbd91d authored 作者: Frederic Bastien's avatar Frederic Bastien

moved theano.compile.mode.default_{mode,linker,optimizer} to theconfig system as…

moved theano.compile.mode.default_{mode,linker,optimizer} to theconfig system as {mode,linker,optimizer}. Also added that when config.mode is Mode, ProfileMode or DebugMode, mode.get_default_mode return this mode initilized with config.linker and config.optimizer.
上级 3dac2fd1
......@@ -261,8 +261,8 @@ Example:
>>> minstance = m.make(mode='DEBUG_MODE')
Whenever possible, unit tests should omit this parameter. Leaving
out the mode will ensure that unit tests use the default mode
(defined in compile.mode.default_mode). This default_mode is set to
out the mode will ensure that unit tests use the default mode.
This default mode is set to
the configuration variable :attr:`config.mode`, which defaults to
'FAST_RUN', and can be set by various mechanisms (see :mod:`config`).
......
......@@ -22,8 +22,7 @@ Theano defines the following modes by name:
but can identify many kinds of problems.
The default mode is typically ``FAST_RUN``, but it can be controlled via the
configuration variable :attr:`config.mode`, which can in turn be overridden by
setting ``theano.compile.mode.default_mode`` directly, which can in turn be
configuration variable :attr:`config.mode`, which can be
overridden by passing the keyword argument to :func:`theano.function`.
.. TODO::
......
......@@ -10,11 +10,12 @@ purpose of it is to hack it to investigate what your own particular program is d
.. code-block:: python
from theano.gof.link import WrapLinkerMany
from theano import config
from theano.compile.mode import (Mode, register_mode, predefined_modes, predefined_linkers,
predefined_optimizers, default_linker, default_optimizer)
predefined_optimizers)
class StepMode(Mode):
def __init__(self, linker=default_linker, optimizer=default_optimizer):
def __init__(self, linker=config.linker, optimizer=config.optimizer):
def blah(i, node, th):
# This function will be run for each node in your compiled program.
......
......@@ -22,9 +22,8 @@ Theano defines the following modes by name:
but can identify many kinds of problems.
The default mode is typically ``FAST_RUN``, but it can be controlled via
the configuration variable :attr:`config.mode`, which can in turn be
overridden by setting `theano.compile.mode.default_mode` directly,
which can in turn be overridden by passing the keyword argument to
the configuration variable :attr:`config.mode`,
which can be overridden by passing the keyword argument to
:func:`theano.function <function.function>`.
================= =============================================================== ===============================================================================
......
......@@ -698,7 +698,7 @@ class FunctionMaker(object):
their output value directly
:param mode: a Mode instance telling FunctionMaker how to optimize and link. None
means to use the `default_mode`.
means to use the `config.mode`.
:param accept_inplace: True iff it is acceptable to have inplace operations
in the graph from the inputs to the outputs
......@@ -864,7 +864,7 @@ def orig_function(inputs, outputs, mode=None, accept_inplace = False, name=None)
itself or a list of one or more return values)
:param mode: a descriptive string or a Mode instance. (Default of None means to use
`mode.default_mode` (See below for descriptive string list).
`config.mode` (See below for descriptive string list).
:param name: an optional name for this fct. If used, the profile mode will print the time spent in this fct.
......
......@@ -45,8 +45,6 @@ predefined_linkers = {
'c&py' : gof.DualLinker(checker = check_equal)
}
#Keep default_linker the same as the one for default_mode
default_linker = 'c|py'
def register_linker(name, linker):
"""Add a `Linker` which can be referred to by `name` in `Mode`."""
......@@ -72,8 +70,6 @@ predefined_optimizers = {
'fast_run_stable' : OPT_FAST_RUN_STABLE,
'fast_compile' : OPT_FAST_COMPILE
}
#Keep default_optimizer the same as the one for default_mode
default_optimizer = 'fast_run'
def register_optimizer(name, opt):
"""Add a `Optimizer` which can be referred to by `name` in `Mode`."""
......@@ -155,7 +151,7 @@ class Mode(object):
predefined_modes.
"""
def __init__(self, linker = default_linker, optimizer = default_optimizer):
def __init__(self, linker = config.linker, optimizer = config.optimizer):
self.__setstate__((linker, optimizer))
def __getstate__(self):
......@@ -220,20 +216,19 @@ predefined_modes = {'FAST_COMPILE': FAST_COMPILE,
'FAST_RUN_NOGC':FAST_RUN_NOGC,
'SANITY_CHECK': SANITY_CHECK}
# The default mode used by functions and modules is read from the configuration.
# keep default_mode.optimizer==default_optimizer and default_mode.linker==default_linker!
default_mode = config.mode
def get_mode(string):
if string is None: string = default_mode
if string is None: string = config.mode
if not isinstance(string, str): return string #it is already a mode...
if not predefined_modes.has_key(string):
raise Exception("No predefixed mode exist for string: %s"%string)
return predefined_modes[string]
def get_default_mode():
return get_mode(default_mode)
if config.mode in ['Mode','ProfileMode','DebugMode']:
return Mode(linker=config.linker, optimizer=config.optimizer)
return get_mode(config.mode)
default_mode = config.mode
def register_mode(name, mode):
"""Add a `Mode` which can be referred to by `name` in `function`."""
......
......@@ -87,7 +87,7 @@ class Component(object):
be called.
"""
if mode is None:
mode = get_mode.default_mode
mode = get_mode.get_default_mode()
memo = {}
self.allocate(memo)
rval = self.build(mode, memo)
......@@ -101,7 +101,7 @@ class Component(object):
arguments and the keyword arguments. If 'mode' is in the
keyword arguments it will be passed to build().
"""
mode = kwargs.pop('mode', get_mode.default_mode)
mode = kwargs.pop('mode', get_mode.get_default_mode())
rval = self.make_no_init(mode)
if hasattr(rval, 'initialize'):
rval.initialize(*args, **kwargs)
......@@ -1141,7 +1141,7 @@ class Module(ComponentDict):
"""
self.make_module_instance(args,kwargs)
mode = kwargs.pop('mode', get_mode.default_mode)
mode = kwargs.pop('mode', get_mode.get_default_mode())
rval = self.make_no_init(mode)
if hasattr(rval, 'initialize'):
rval.initialize(*args, **kwargs)
......
......@@ -2,7 +2,7 @@ import time, atexit, copy
from theano.gof.link import WrapLinker
from theano.gof.cutils import run_cthunk
from theano.compile.mode import Mode, register_mode, predefined_modes, predefined_linkers, predefined_optimizers, default_linker, default_optimizer
from theano.compile.mode import Mode, register_mode, predefined_modes, predefined_linkers, predefined_optimizers
from theano.gof.cc import OpWiseCLinker
from theano.gof.python25 import any
from theano import gof
......@@ -30,7 +30,7 @@ class Profile_Maker(FunctionMaker):
return ret
class ProfileMode(Mode):
def __init__(self, linker=default_linker, optimizer=default_optimizer):
def __init__(self, linker=config.linker, optimizer=config.optimizer):
local_time = [0.0]
apply_time = {}
apply_call = {}
......
import unittest
from theano import gof
from theano import gof,config
from theano import compile
from theano.scalar import *
......@@ -389,10 +389,14 @@ class T_picklefunction(unittest.TestCase):
sm = T.dmatrix('s')
f = function([a, x, s, xm, sm], ((a.T.T)*(tensor.dot(xm, (sm.T.T.T)) + x).T * (x/x) + s))
old_default_mode = compile.mode.default_mode
old_default_mode = config.mode
old_default_opt = config.optimizer
old_default_link = config.linker
try:
str_f = cPickle.dumps(f)
compile.mode.default_mode = mode_module.Mode(linker='py', optimizer=None)
config.mode = 'Mode'
config.linker = 'py'
config.optimizer = 'None'
g = cPickle.loads(str_f)
#print g.maker.mode
#print compile.mode.default_mode
......@@ -400,8 +404,10 @@ class T_picklefunction(unittest.TestCase):
if e[0].startswith('DebugMode is not pickl'):
g = 'ok'
finally:
compile.mode.default_mode = old_default_mode
config.mode = old_default_mode
config.optimizer = old_default_opt
config.linker = old_default_link
if g == 'ok':
return
......@@ -545,7 +551,7 @@ if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite)
#</boilerplate>
elif 0:
theano.compile.mode.default_mode = 'FAST_COMPILE'
theano.config.mode = 'FAST_COMPILE'
t = T_picklefunction()
def fu(b):
assert b
......
......@@ -40,7 +40,6 @@ class NNet(object):
class TestNnet(unittest.TestCase):
def test_nnet(self):
#theano.compile.default_mode = 'FAST_RUN'
rng = numpy.random.RandomState(1827)
data = rng.rand(10, 4)
nnet = NNet(n_input = 3, n_hidden = 10)
......
......@@ -4,7 +4,7 @@
__docformat__ = "restructuredtext en"
import cPickle, numpy, unittest
from theano.compile.mode import default_mode
from theano import config
from theano.compile.module import *
from theano.compile.function_module import AliasedMemoryError
import theano.tensor as T
......@@ -435,7 +435,7 @@ class T_module(unittest.TestCase):
"""Test that we can manipulate the mutable, strict, etc. flags (see SymbolicInput) of
Method inputs"""
if default_mode == 'FAST_COMPILE':
if config.mode == 'FAST_COMPILE':
return
M = Module()
......@@ -696,8 +696,8 @@ def test_method_implicit_ticket_384():
raise
def get_mode():
if default_mode != 'DEBUG_MODE':
mode = default_mode
if config.mode != 'DEBUG_MODE':
mode = config.mode
else: mode = 'FAST_RUN'
return mode
......
......@@ -443,6 +443,6 @@ class Test_pfunc(unittest.TestCase):
self.failUnlessRaises(TypeError, pfunc, [], x)
if __name__ == '__main__':
theano.compile.mode.default_mode = 'FAST_COMPILE'
theano.config.mode = 'FAST_COMPILE'
Test_pfunc().test_default_scalar_container()
......@@ -13,9 +13,20 @@ AddConfigVar('device',
EnumStr('cpu', *['gpu%i'%i for i in range(4)])
)
# keep the default mode.optimizer==config.optimizer and mode.linker==config.linker!
AddConfigVar('mode',
"Default compilation mode",
EnumStr('FAST_RUN', 'FAST_COMPILE', 'PROFILE_MODE', 'DEBUG_MODE'))
EnumStr('FAST_RUN', 'FAST_COMPILE', 'PROFILE_MODE', 'DEBUG_MODE', 'Mode', 'ProfileMode', 'DebugMode'))
#Keep the default linker the same as the one for the mode
AddConfigVar('linker',
"Default linker. If not None, will use this linker with the Mode object(not ProfileMode or DebugMode)",
EnumStr('c|py', 'py', 'c', 'c|py_nogc', 'c&py'))
#Keep the default optimizer the same as the one for the mode
AddConfigVar('optimizer',
"Default optimizer. If not None, will use this linker with the Mode object(not ProfileMode or DebugMode)",
EnumStr('fast_run', 'merge', 'fast_compile', 'None'))
AddConfigVar('home',
"User home directory",
......
......@@ -14,14 +14,14 @@ from theano.gof.python25 import any, all
from theano import gof
from theano.tensor.elemwise import DimShuffle
from theano.compile.mode import default_mode
from theano.compile.mode import get_default_mode
from theano import function
from theano.tests import unittest_tools as utt
### seed random number generator so that unittests are deterministic ###
utt.seed_rng()
def inplace_func(inputs, outputs, mode=default_mode):
def inplace_func(inputs, outputs, mode=get_default_mode()):
return function(inputs, outputs, mode=mode, accept_inplace=True)
......
......@@ -4,7 +4,7 @@ import theano
from theano import tensor as T
from theano.tensor import nnet as NN
from theano.compile import module
from theano.compile.mode import default_mode
from theano.compile.mode import get_default_mode
from theano import tensor as T, sparse as S
import numpy as N
import sys
......@@ -524,7 +524,7 @@ def test_naacl_model(iters_per_unsup=3, iters_per_sup=3,
if optimizer:
mode = theano.Mode(linker='c|py', optimizer=optimizer)
else: mode = default_mode
else: mode = get_default_mode()
if realistic:
m = create_realistic(compile_mode=mode)
else:
......
......@@ -202,7 +202,7 @@ class test_canonize(unittest.TestCase):
#We must be sure that the Canonizer is working, but that we don't have other
# optimisation that could hide bug in the Canonizer as local_elemwise_fusion
mode=compile.mode.predefined_modes[compile.mode.default_mode]
mode=compile.mode.get_default_mode()
old_optimizer = mode._optimizer
try:
mode._optimizer=gof.Query(["canonicalize"])
......@@ -282,7 +282,7 @@ class test_canonize(unittest.TestCase):
#We must be sure that the Canonizer is working, but that we don't have other
# optimisation that could hide bug in the Canonizer as local_elemwise_fusion
mode=compile.mode.predefined_modes[compile.mode.default_mode]
mode=compile.mode.get_default_mode()
mode._optimizer=gof.Query(["canonicalize"])
mode._optimizer=mode._optimizer.excluding('local_elemwise_fusion')
for id, [g, sym_inputs, val_inputs, nb_elemwise, out_dtype] in enumerate(cases):
......@@ -327,7 +327,7 @@ class test_canonize(unittest.TestCase):
#We must be sure that the Canonizer is working, but that we don't have other
# optimisation that could hide bug in the Canonizer as local_elemwise_fusion
mode=compile.mode.predefined_modes[compile.mode.default_mode]
mode=compile.mode.get_default_mode()
old_optimizer = mode._optimizer
try:
mode._optimizer=gof.Query(["canonicalize"])
......@@ -474,7 +474,7 @@ class test_canonize(unittest.TestCase):
fvv = theano._asarray(numpy.random.rand(shp[0]),dtype='float32').reshape(1,shp[0])
#We must be sure that the Canonizer is working, but that we don't have other
# optimisation that could hide bug in the Canonizer as local_elemwise_fusion
mode=compile.mode.predefined_modes[compile.mode.default_mode]
mode=compile.mode.get_default_mode()
old_optimizer = mode._optimizer
try:
mode._optimizer=gof.Query(["canonicalize"])
......@@ -907,7 +907,7 @@ class test_fusion(unittest.TestCase):
#g.owner.inputs[0] is out... make owner a weakref?
def test_log1p():
m = theano.compile.default_mode
m = theano.config.mode
if m == 'FAST_COMPILE':
m = 'FAST_RUN'
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论