提交 9cfb0b69 authored 作者: David Warde-Farley's avatar David Warde-Farley

LOTS of mutable default argument fixes.

上级 c223445c
...@@ -37,7 +37,7 @@ In this way, we could express something like Logistic Regression like this: ...@@ -37,7 +37,7 @@ In this way, we could express something like Logistic Regression like this:
def sample(self, n): def sample(self, n):
"""[Symbolically] draw a sample of size n""" """[Symbolically] draw a sample of size n"""
def density(self, pt, givens={}): def density(self, pt, givens=None):
"""Conditional Density/Probability of P(self=pt) """Conditional Density/Probability of P(self=pt)
Implicitly conditioned on knowing the values of all variables Implicitly conditioned on knowing the values of all variables
......
...@@ -149,7 +149,7 @@ Corner cases and exotic examples can be found in the tests. ...@@ -149,7 +149,7 @@ Corner cases and exotic examples can be found in the tests.
.. code-block:: python .. code-block:: python
def pfunc(params, outputs, mode=None, givens={}, updates=[]) def pfunc(params, outputs, mode=None, givens=None, updates=None)
"""Function-constructor for graphs with shared variables. """Function-constructor for graphs with shared variables.
:type params: list of either Variable or Param instances. :type params: list of either Variable or Param instances.
......
...@@ -1417,7 +1417,9 @@ class _Linker(gof.link.LocalLinker): ...@@ -1417,7 +1417,9 @@ class _Linker(gof.link.LocalLinker):
self.env = None self.env = None
self.maker = maker self.maker = maker
def accept(self, env, no_recycling=[]): def accept(self, env, no_recycling=None):
if no_recycling is None:
no_recycling = []
if self.env is not None and self.env is not env: if self.env is not None and self.env is not env:
assert type(self) is _Linker assert type(self) is _Linker
return type(self)(self.env, self.maker).accept(env, no_recycling) return type(self)(self.env, self.maker).accept(env, no_recycling)
......
...@@ -11,7 +11,7 @@ from profiling import ProfileStats ...@@ -11,7 +11,7 @@ from profiling import ProfileStats
from pfunc import pfunc from pfunc import pfunc
from numpy import any #for to work in python 2.4 from numpy import any #for to work in python 2.4
def function(inputs, outputs=None, mode=None, updates=[], givens=[], def function(inputs, outputs=None, mode=None, updates=None, givens=None,
no_default_updates=False, accept_inplace=False, name=None, no_default_updates=False, accept_inplace=False, name=None,
rebuild_strict=True, allow_input_downcast=None, profile=None, rebuild_strict=True, allow_input_downcast=None, profile=None,
on_unused_input='raise'): on_unused_input='raise'):
...@@ -80,7 +80,11 @@ def function(inputs, outputs=None, mode=None, updates=[], givens=[], ...@@ -80,7 +80,11 @@ def function(inputs, outputs=None, mode=None, updates=[], givens=[],
""" """
#tuple are used in some tests, as we accepted them in the past #tuple are used in some tests, as we accepted them in the past
#I prefer to allow it as they act the same as list for what they are used. #I prefer to allow it as they act the same as list for what they are used.
if not isinstance(inputs,(list,tuple)): if updates is None:
updates = []
if givens is None:
givens = []
if not isinstance(inputs, (list, tuple)):
raise Exception("Inputs variable of a Theano function should be contained in a list, even when there is a single input.") raise Exception("Inputs variable of a Theano function should be contained in a list, even when there is a single input.")
# compute some features of the arguments: # compute some features of the arguments:
......
...@@ -263,7 +263,7 @@ class Method(Component): ...@@ -263,7 +263,7 @@ class Method(Component):
mode=None mode=None
"""This will override the Module compilation mode for this Method""" """This will override the Module compilation mode for this Method"""
def __init__(self, inputs, outputs, updates = {}, mode=None): def __init__(self, inputs, outputs, updates=None, mode=None):
"""Initialize attributes """Initialize attributes
:param inputs: value for `Method.inputs` :param inputs: value for `Method.inputs`
...@@ -283,6 +283,8 @@ class Method(Component): ...@@ -283,6 +283,8 @@ class Method(Component):
:type mode: None or any mode accepted by `compile.function` :type mode: None or any mode accepted by `compile.function`
""" """
if updates is None:
updates = {}
super(Method, self).__init__() super(Method, self).__init__()
self.inputs = inputs self.inputs = inputs
self.outputs = outputs self.outputs = outputs
...@@ -589,7 +591,7 @@ class Composite(Component): ...@@ -589,7 +591,7 @@ class Composite(Component):
else: else:
yield component yield component
def flat_components_map(self, include_self = False, path = []): def flat_components_map(self, include_self=False, path=None):
""" """
Generator that yields (path, component) pairs in a flattened Generator that yields (path, component) pairs in a flattened
hierarchy of composites and components, where path is a hierarchy of composites and components, where path is a
...@@ -600,6 +602,8 @@ class Composite(Component): ...@@ -600,6 +602,8 @@ class Composite(Component):
If include_self is True, the list will include the Composite If include_self is True, the list will include the Composite
instances, else it will only yield the list of leaves. instances, else it will only yield the list of leaves.
""" """
if path is None:
path = []
if include_self: if include_self:
yield path, self yield path, self
for name, component in self.components_map(): for name, component in self.components_map():
...@@ -758,7 +762,9 @@ class ComponentList(Composite): ...@@ -758,7 +762,9 @@ class ComponentList(Composite):
member.name = '%s.%i' % (name, i) member.name = '%s.%i' % (name, i)
def default_initialize(self, init = {}, **kwinit): def default_initialize(self, init=None, **kwinit):
if init is None:
init = {}
for k, initv in dict(init, **kwinit).iteritems(): for k, initv in dict(init, **kwinit).iteritems():
self[k] = initv self[k] = initv
...@@ -788,7 +794,9 @@ class ComponentDictInstance(ComponentDictInstanceNoInit): ...@@ -788,7 +794,9 @@ class ComponentDictInstance(ComponentDictInstanceNoInit):
ComponentDictInstance is meant to be instantiated by ComponentDict. ComponentDictInstance is meant to be instantiated by ComponentDict.
""" """
def initialize(self, init={}, **kwinit): def initialize(self, init=None, **kwinit):
if init is None:
init = {}
for k, initv in dict(init, **kwinit).iteritems(): for k, initv in dict(init, **kwinit).iteritems():
self[k] = initv self[k] = initv
...@@ -797,7 +805,9 @@ class ComponentDictInstance(ComponentDictInstanceNoInit): ...@@ -797,7 +805,9 @@ class ComponentDictInstance(ComponentDictInstanceNoInit):
class ComponentDict(Composite): class ComponentDict(Composite):
InstanceType = ComponentDictInstance # Type used by build() to make the instance InstanceType = ComponentDictInstance # Type used by build() to make the instance
def __init__(self, components = {}, **kwcomponents): def __init__(self, components=None, **kwcomponents):
if components is None:
components = {}
super(ComponentDict, self).__init__() super(ComponentDict, self).__init__()
components = dict(components, **kwcomponents) components = dict(components, **kwcomponents)
for val in components.itervalues(): for val in components.itervalues():
...@@ -1077,10 +1087,12 @@ class Module(ComponentDict): ...@@ -1077,10 +1087,12 @@ class Module(ComponentDict):
memo[self] = inst memo[self] = inst
return inst return inst
def _instance_initialize(self, inst, init = {}, **kwinit): def _instance_initialize(self, inst, init=None, **kwinit):
""" """
Default initialization method. Default initialization method.
""" """
if init is None:
init = {}
for name, value in chain(init.iteritems(), kwinit.iteritems()): for name, value in chain(init.iteritems(), kwinit.iteritems()):
inst[name] = value inst[name] = value
......
...@@ -322,7 +322,7 @@ class Param(object): ...@@ -322,7 +322,7 @@ class Param(object):
self.implicit = implicit self.implicit = implicit
def pfunc(params, outputs=None, mode=None, updates=[], givens=[], def pfunc(params, outputs=None, mode=None, updates=None, givens=None,
no_default_updates=False, accept_inplace=False, name=None, no_default_updates=False, accept_inplace=False, name=None,
rebuild_strict=True, allow_input_downcast=None, rebuild_strict=True, allow_input_downcast=None,
profile=None, on_unused_input='raise'): profile=None, on_unused_input='raise'):
...@@ -405,6 +405,10 @@ def pfunc(params, outputs=None, mode=None, updates=[], givens=[], ...@@ -405,6 +405,10 @@ def pfunc(params, outputs=None, mode=None, updates=[], givens=[],
# Then it clones the outputs and the update expressions. This rebuilds a computation graph # Then it clones the outputs and the update expressions. This rebuilds a computation graph
# from the inputs and the givens. # from the inputs and the givens.
# #
if updates is None:
updates = []
if givens is None:
givens = []
if profile is None: if profile is None:
profile = config.profile profile = config.profile
# profile -> True or False # profile -> True or False
......
...@@ -402,8 +402,10 @@ class CLinker(link.Linker): ...@@ -402,8 +402,10 @@ class CLinker(link.Linker):
def __init__(self): def __init__(self):
self.env = None self.env = None
def accept(self, env, no_recycling=[]): def accept(self, env, no_recycling=None):
"""WRITEME""" """WRITEME"""
if no_recycling is None:
no_recycling = []
if self.env is not None and self.env is not env: if self.env is not None and self.env is not env:
return type(self)().accept(env, no_recycling) return type(self)().accept(env, no_recycling)
#raise Exception("Cannot accept from a Linker that is already" #raise Exception("Cannot accept from a Linker that is already"
...@@ -987,12 +989,18 @@ class CLinker(link.Linker): ...@@ -987,12 +989,18 @@ class CLinker(link.Linker):
) )
@staticmethod @staticmethod
def cmodule_key_(env, no_recycling, compile_args=[], libraries=[], def cmodule_key_(env, no_recycling, compile_args=None, libraries=None,
header_dirs=[], insert_config_md5=True): header_dirs=None, insert_config_md5=True):
""" """
Do the actual computation of cmodule_key in a static method Do the actual computation of cmodule_key in a static method
to allow it to be reused in scalar.Composite.__eq__ to allow it to be reused in scalar.Composite.__eq__
""" """
if compile_args is None:
compile_args = []
if libraries is None:
libraries = []
if header_dirs is None:
header_dirs = []
order = list(env.toposort()) order = list(env.toposort())
#set of variables that have been computed by nodes we have #set of variables that have been computed by nodes we have
# seen 'so far' in the loop below # seen 'so far' in the loop below
...@@ -1381,7 +1389,9 @@ class OpWiseCLinker(link.LocalLinker): ...@@ -1381,7 +1389,9 @@ class OpWiseCLinker(link.LocalLinker):
self.nice_errors = nice_errors self.nice_errors = nice_errors
self.allow_gc = allow_gc self.allow_gc = allow_gc
def accept(self, env, no_recycling=[]): def accept(self, env, no_recycling=None):
if no_recycling is None:
no_recycling = []
if self.env is not None and self.env is not env: if self.env is not None and self.env is not env:
return type(self)(self.fallback_on_perform).accept(env, return type(self)(self.fallback_on_perform).accept(env,
no_recycling) no_recycling)
...@@ -1519,7 +1529,9 @@ class DualLinker(link.Linker): ...@@ -1519,7 +1529,9 @@ class DualLinker(link.Linker):
self.env = None self.env = None
self.checker = checker self.checker = checker
def accept(self, env, no_recycling=[]): def accept(self, env, no_recycling=None):
if no_recycling is None:
no_recycling = []
if self.env is not None and self.env is not env: if self.env is not None and self.env is not env:
return type(self)(self.checker).accept(env, no_recycling) return type(self)(self.checker).accept(env, no_recycling)
# raise Exception("Cannot accept from a Linker that is already " # raise Exception("Cannot accept from a Linker that is already "
......
...@@ -80,7 +80,7 @@ class Env(utils.object2): ...@@ -80,7 +80,7 @@ class Env(utils.object2):
### Special ### ### Special ###
# TODO: document which things that features can do to the env # TODO: document which things that features can do to the env
def __init__(self, inputs, outputs, features=[]): def __init__(self, inputs, outputs, features=None):
""" """
Create an Env which operates on the subgraph bound by the inputs and outputs Create an Env which operates on the subgraph bound by the inputs and outputs
sets. sets.
...@@ -92,6 +92,10 @@ class Env(utils.object2): ...@@ -92,6 +92,10 @@ class Env(utils.object2):
""" """
if features is None:
features = []
# XXX: What the hell? Was the features argument never used?
self._features = [] self._features = []
# All nodes in the subgraph defined by inputs and outputs are cached in nodes # All nodes in the subgraph defined by inputs and outputs are cached in nodes
......
...@@ -675,9 +675,11 @@ def general_toposort(r_out, deps, debug_print = False): ...@@ -675,9 +675,11 @@ def general_toposort(r_out, deps, debug_print = False):
return rlist return rlist
def io_toposort(i, o, orderings = {}): def io_toposort(i, o, orderings=None):
"""WRITEME """WRITEME
""" """
if orderings is None:
orderings = {}
#the inputs are used only here in the function that decides what 'predecessors' to explore #the inputs are used only here in the function that decides what 'predecessors' to explore
iset = set(i) iset = set(i)
def deps(obj): def deps(obj):
...@@ -701,7 +703,7 @@ default_node_formatter = lambda op, argstrings: "%s(%s)" % (op.op, ...@@ -701,7 +703,7 @@ default_node_formatter = lambda op, argstrings: "%s(%s)" % (op.op,
", ".join(argstrings)) ", ".join(argstrings))
def is_same_graph(var1, var2, givens={}, debug=False): def is_same_graph(var1, var2, givens=None, debug=False):
""" """
Return True iff Variables `var1` and `var2` perform the same computation. Return True iff Variables `var1` and `var2` perform the same computation.
...@@ -740,6 +742,8 @@ def is_same_graph(var1, var2, givens={}, debug=False): ...@@ -740,6 +742,8 @@ def is_same_graph(var1, var2, givens={}, debug=False):
====== ====== ====== ====== ====== ====== ====== ======
""" """
# Lazy import. # Lazy import.
if givens is None:
givens = {}
global equal_computations, is_same_graph_with_merge global equal_computations, is_same_graph_with_merge
if equal_computations is None: if equal_computations is None:
from theano.gof.opt import is_same_graph_with_merge from theano.gof.opt import is_same_graph_with_merge
......
...@@ -299,7 +299,8 @@ def map_storage(env, order, input_storage, output_storage): ...@@ -299,7 +299,8 @@ def map_storage(env, order, input_storage, output_storage):
return input_storage, output_storage, storage_map return input_storage, output_storage, storage_map
def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling = [], profiler = None, nice_errors = True): def streamline(env, thunks, order, post_thunk_old_storage=None,
no_recycling=None, profiler=None, nice_errors=True):
"""WRITEME """WRITEME
:param env: :param env:
...@@ -320,6 +321,8 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling = ...@@ -320,6 +321,8 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling =
:param nice_errors: run in such a way that the double-traceback is printed. This costs a :param nice_errors: run in such a way that the double-traceback is printed. This costs a
bit of performance in the inner python loop. bit of performance in the inner python loop.
""" """
if no_recycling is None:
no_recycling = []
if profiler is not None: if profiler is not None:
raise NotImplementedError() raise NotImplementedError()
...@@ -419,7 +422,7 @@ class PerformLinker(LocalLinker): ...@@ -419,7 +422,7 @@ class PerformLinker(LocalLinker):
self.env = None self.env = None
self.allow_gc = allow_gc self.allow_gc = allow_gc
def accept(self, env, no_recycling = []): def accept(self, env, no_recycling=None):
""" """
:param env: a PerformLinker can have accepted one Env instance at a time. :param env: a PerformLinker can have accepted one Env instance at a time.
...@@ -427,6 +430,8 @@ class PerformLinker(LocalLinker): ...@@ -427,6 +430,8 @@ class PerformLinker(LocalLinker):
:returns: self (TODO: WHY? Who calls this function?) :returns: self (TODO: WHY? Who calls this function?)
""" """
if no_recycling is None:
no_recycling = []
if self.env is not None and self.env is not env: if self.env is not None and self.env is not env:
return type(self)().accept(env, no_recycling) return type(self)().accept(env, no_recycling)
#raise Exception("Cannot accept from a Linker that is already tied to another Env.") #raise Exception("Cannot accept from a Linker that is already tied to another Env.")
...@@ -548,7 +553,7 @@ class WrapLinker(Linker): ...@@ -548,7 +553,7 @@ class WrapLinker(Linker):
self.linkers = linkers self.linkers = linkers
self.wrapper = wrapper self.wrapper = wrapper
def accept(self, env, no_recycling = []): def accept(self, env, no_recycling=None):
""" """
@type env: gof.Env @type env: gof.Env
@param env: the env which we will link @param env: the env which we will link
...@@ -560,6 +565,8 @@ class WrapLinker(Linker): ...@@ -560,6 +565,8 @@ class WrapLinker(Linker):
the computation to avoid reusing it. the computation to avoid reusing it.
""" """
if no_recycling is None:
no_recycling = []
if self.env is not None and self.env is not env: if self.env is not None and self.env is not env:
return type(self)(self.linkers, self.wrapper).accept(env, no_recycling) return type(self)(self.linkers, self.wrapper).accept(env, no_recycling)
......
...@@ -348,12 +348,14 @@ class MergeOptimizer(Optimizer): ...@@ -348,12 +348,14 @@ class MergeOptimizer(Optimizer):
merge_optimizer = MergeOptimizer() merge_optimizer = MergeOptimizer()
def is_same_graph_with_merge(var1, var2, givens={}): def is_same_graph_with_merge(var1, var2, givens=None):
""" """
Merge-based implementation of `theano.gof.graph.is_same_graph`. Merge-based implementation of `theano.gof.graph.is_same_graph`.
See help on `theano.gof.graph.is_same_graph` for additional documentation. See help on `theano.gof.graph.is_same_graph` for additional documentation.
""" """
if givens is None:
givens = {}
# Copy variables since the MergeOptimizer will modify them. # Copy variables since the MergeOptimizer will modify them.
copied = copy.deepcopy([var1, var2, givens]) copied = copy.deepcopy([var1, var2, givens])
vars = copied[0:2] vars = copied[0:2]
...@@ -475,7 +477,9 @@ class LocalOptimizer(object): ...@@ -475,7 +477,9 @@ class LocalOptimizer(object):
class FromFunctionLocalOptimizer(LocalOptimizer): class FromFunctionLocalOptimizer(LocalOptimizer):
"""WRITEME""" """WRITEME"""
def __init__(self, fn, tracks = []): def __init__(self, fn, tracks=None):
if tracks is None:
tracks = []
self.transform = fn self.transform = fn
self._tracks = tracks self._tracks = tracks
def tracks(self): def tracks(self):
......
...@@ -40,9 +40,18 @@ def MyValue(data): ...@@ -40,9 +40,18 @@ def MyValue(data):
class MyOp(Op): class MyOp(Op):
def __init__(self, nin, name, vmap = {}, dmap = {}, nout = 1, def __init__(self, nin, name, vmap=None, dmap=None, nout=1,
destroyhandler_tolerate_same = [], destroyhandler_tolerate_same=None,
destroyhandler_tolerate_aliased = []): destroyhandler_tolerate_aliased=None):
if vmap is None:
vmap = {}
if dmap is None:
dmap = {}
if destroyhandler_tolerate_same is None:
destroyhandler_tolerate_same = []
if destroyhandler_tolerate_aliased is None:
destroyhandler_tolerate_aliased = []
self.nin = nin self.nin = nin
self.nout = nout self.nout = nout
self.name = name self.name = name
......
...@@ -31,9 +31,9 @@ def MyVariable(name): ...@@ -31,9 +31,9 @@ def MyVariable(name):
class MyOp(Op): class MyOp(Op):
def __init__(self, name, dmap = {}, x = None): def __init__(self, name, dmap=None, x=None):
self.name = name self.name = name
self.destroy_map = dmap self.destroy_map = dmap if dmap is not None else {}
self.x = x self.x = x
def make_node(self, *inputs): def make_node(self, *inputs):
......
...@@ -429,7 +429,7 @@ class VM_Linker(link.LocalLinker): ...@@ -429,7 +429,7 @@ class VM_Linker(link.LocalLinker):
self.callback = callback self.callback = callback
self.updated_vars = {} self.updated_vars = {}
def accept(self, env, no_recycling=[]): def accept(self, env, no_recycling=None):
""" """
:param env: a PerformLinker can have accepted one Env instance :param env: a PerformLinker can have accepted one Env instance
at a time. at a time.
...@@ -438,6 +438,8 @@ class VM_Linker(link.LocalLinker): ...@@ -438,6 +438,8 @@ class VM_Linker(link.LocalLinker):
:returns: self (TODO: WHY? Who calls this function?) :returns: self (TODO: WHY? Who calls this function?)
""" """
if no_recycling is None:
no_recycling = []
if self.env is not None and self.env is not env: if self.env is not None and self.env is not env:
return type(self)().accept(env, no_recycling) return type(self)().accept(env, no_recycling)
self.env = env self.env = env
......
...@@ -88,10 +88,12 @@ class PycudaElemwiseKernelOp(GpuOp): ...@@ -88,10 +88,12 @@ class PycudaElemwiseKernelOp(GpuOp):
nin = property(lambda self: self.scalar_op.nin) nin = property(lambda self: self.scalar_op.nin)
nout = property(lambda self: self.scalar_op.nout) nout = property(lambda self: self.scalar_op.nout)
def __init__(self, scalar_op, inplace_pattern={}, name=None): def __init__(self, scalar_op, inplace_pattern=None, name=None):
if inplace_pattern is None:
inplace_pattern = {}
self.name = name self.name = name
self.scalar_op = scalar_op self.scalar_op = scalar_op
self.inplace_pattern = None self.inplace_pattern = inplace_pattern
def __str__(self): def __str__(self):
if self.name is None: if self.name is None:
...@@ -172,10 +174,12 @@ class PycudaElemwiseSourceModuleOp(GpuOp): ...@@ -172,10 +174,12 @@ class PycudaElemwiseSourceModuleOp(GpuOp):
nin = property(lambda self: self.scalar_op.nin) nin = property(lambda self: self.scalar_op.nin)
nout = property(lambda self: self.scalar_op.nout) nout = property(lambda self: self.scalar_op.nout)
def __init__(self, scalar_op, inplace_pattern={}, name=None): def __init__(self, scalar_op, inplace_pattern=None, name=None):
if inplace_pattern is None:
inplace_pattern = {}
self.name = name self.name = name
self.scalar_op = scalar_op self.scalar_op = scalar_op
self.inplace_pattern = None self.inplace_pattern = inplace_pattern
def __str__(self): def __str__(self):
if self.name is None: if self.name is None:
...@@ -264,10 +268,12 @@ class PycudaElemwiseSourceModuleMakeThunkOp(Op): ...@@ -264,10 +268,12 @@ class PycudaElemwiseSourceModuleMakeThunkOp(Op):
nin = property(lambda self: self.scalar_op.nin) nin = property(lambda self: self.scalar_op.nin)
nout = property(lambda self: self.scalar_op.nout) nout = property(lambda self: self.scalar_op.nout)
def __init__(self, scalar_op, inplace_pattern={}, name=None): def __init__(self, scalar_op, inplace_pattern=None, name=None):
if inplace_pattern is None:
inplace_pattern = {}
self.name = name self.name = name
self.scalar_op = scalar_op self.scalar_op = scalar_op
self.inplace_pattern = None self.inplace_pattern = inplace_pattern
def __str__(self): def __str__(self):
if self.name is None: if self.name is None:
......
...@@ -170,14 +170,18 @@ class Print(Op): ...@@ -170,14 +170,18 @@ class Print(Op):
class PrinterState(gof.utils.scratchpad): class PrinterState(gof.utils.scratchpad):
def __init__(self, props={}, **more_props): def __init__(self, props=None, **more_props):
if props is None:
props = {}
if isinstance(props, gof.utils.scratchpad): if isinstance(props, gof.utils.scratchpad):
self.__update__(props) self.__update__(props)
else: else:
self.__dict__.update(props) self.__dict__.update(props)
self.__dict__.update(more_props) self.__dict__.update(more_props)
def clone(self, props={}, **more_props): def clone(self, props=None, **more_props):
if props is None:
props = {}
return PrinterState(self, **dict(props, **more_props)) return PrinterState(self, **dict(props, **more_props))
...@@ -359,8 +363,10 @@ class PPrinter: ...@@ -359,8 +363,10 @@ class PPrinter:
cp.assign(condition, printer) cp.assign(condition, printer)
return cp return cp
def process_graph(self, inputs, outputs, updates={}, def process_graph(self, inputs, outputs, updates=None,
display_inputs=False): display_inputs=False):
if updates is None:
updates = {}
if not isinstance(inputs, (list, tuple)): if not isinstance(inputs, (list, tuple)):
inputs = [inputs] inputs = [inputs]
if not isinstance(outputs, (list, tuple)): if not isinstance(outputs, (list, tuple)):
......
...@@ -130,10 +130,12 @@ class GpuElemwise(GpuOp): ...@@ -130,10 +130,12 @@ class GpuElemwise(GpuOp):
nin = property(lambda self: self.scalar_op.nin) nin = property(lambda self: self.scalar_op.nin)
nout = property(lambda self: self.scalar_op.nout) nout = property(lambda self: self.scalar_op.nout)
def __init__(self, scalar_op, inplace_pattern={}, sync=None): def __init__(self, scalar_op, inplace_pattern=None, sync=None):
#TODO-- this looks like a bug-- either we should use the sync argument #TODO-- this looks like a bug-- either we should use the sync argument
# or get rid of it, we shouldn't let the client think they can control # or get rid of it, we shouldn't let the client think they can control
#sync when they can't #sync when they can't
if inplace_pattern is None:
inplace_pattern = {}
sync = config.gpuelemwise.sync sync = config.gpuelemwise.sync
self.scalar_op = scalar_op self.scalar_op = scalar_op
......
...@@ -39,11 +39,13 @@ class NaiveAlgo(object): ...@@ -39,11 +39,13 @@ class NaiveAlgo(object):
#cache_version = () #cache_version = ()
cache_version = (15, verbose) cache_version = (15, verbose)
def __init__(self, scalar_op, sync=True, inplace_pattern={}): def __init__(self, scalar_op, sync=True, inplace_pattern=None):
""" """
:param scalar_op: the scalar operation to execute on each element. :param scalar_op: the scalar operation to execute on each element.
:param sync: if True, will wait after the kernel launch and check for error call. :param sync: if True, will wait after the kernel launch and check for error call.
""" """
if inplace_pattern is None:
inplace_pattern = {}
try: try:
code = scalar_op.c_support_code_apply(None, "nodename") code = scalar_op.c_support_code_apply(None, "nodename")
if code: if code:
......
...@@ -54,9 +54,14 @@ class Kouh2008(object): ...@@ -54,9 +54,14 @@ class Kouh2008(object):
_logger.debug('output dtype %s' % output.dtype) _logger.debug('output dtype %s' % output.dtype)
@classmethod @classmethod
def new_expbounds(cls, rng, x_list, n_out, dtype=None, params=[], updates=[], exponent_range=(1.0, 3.0)): def new_expbounds(cls, rng, x_list, n_out, dtype=None, params=None,
updates=None, exponent_range=(1.0, 3.0)):
""" """
""" """
if params is None:
params = []
if updates is None:
updates = []
if dtype is None: if dtype is None:
dtype = x_list[0].dtype dtype = x_list[0].dtype
n_terms = len(x_list) n_terms = len(x_list)
......
...@@ -42,10 +42,12 @@ class DebugLinker(gof.WrapLinker): ...@@ -42,10 +42,12 @@ class DebugLinker(gof.WrapLinker):
if compare_variables is not None: if compare_variables is not None:
self.debug_post.append(self.compare_variables) self.debug_post.append(self.compare_variables)
def accept(self, env, no_recycling = []): def accept(self, env, no_recycling=None):
if no_recycling is None:
no_recycling = []
return gof.WrapLinker.accept(self, return gof.WrapLinker.accept(self,
env = env, env=env,
no_recycling = no_recycling) no_recycling=no_recycling)
def store_value(self, i, node, *thunks): def store_value(self, i, node, *thunks):
th1 = thunks[0] th1 = thunks[0]
...@@ -165,7 +167,9 @@ def numpy_compare(a, b, tolerance = 1e-6): ...@@ -165,7 +167,9 @@ def numpy_compare(a, b, tolerance = 1e-6):
return a == b return a == b
def numpy_debug_linker(pre, post = []): def numpy_debug_linker(pre, post=None):
if post is None:
post = []
return DebugLinker([gof.OpWiseCLinker], return DebugLinker([gof.OpWiseCLinker],
pre, pre,
post, post,
......
...@@ -96,10 +96,12 @@ def compile_fn(f, path_locals, common_inputs): ...@@ -96,10 +96,12 @@ def compile_fn(f, path_locals, common_inputs):
updated = [] updated = []
return compiled_f, updated return compiled_f, updated
def compile(smod, initial_values={}): def compile(smod, initial_values=None):
""" """
:type values: dictionary Variable -> value :type values: dictionary Variable -> value
""" """
if initial_values is None:
initial_values = {}
def sym_items(mod): def sym_items(mod):
for k in mod.__dict__: for k in mod.__dict__:
if k in ['__module__', 'build_graph', '__doc__']: if k in ['__module__', 'build_graph', '__doc__']:
......
...@@ -281,8 +281,10 @@ def test_consistency_GPU_parallel(): ...@@ -281,8 +281,10 @@ def test_consistency_GPU_parallel():
samples = numpy.array(samples).flatten() samples = numpy.array(samples).flatten()
assert(numpy.allclose(samples, java_samples)) assert(numpy.allclose(samples, java_samples))
def basictest(f, steps, sample_size, prefix="", allow_01=False, inputs=[], def basictest(f, steps, sample_size, prefix="", allow_01=False, inputs=None,
target_avg=0.5, target_std=None, mean_rtol=0.01): target_avg=0.5, target_std=None, mean_rtol=0.01):
if inputs is None:
inputs = []
dt = 0.0 dt = 0.0
avg_std = 0.0 avg_std = 0.0
......
...@@ -91,16 +91,18 @@ class RVal(object): ...@@ -91,16 +91,18 @@ class RVal(object):
"""A Return-Value object for a `symbolic_fn` """ """A Return-Value object for a `symbolic_fn` """
outputs = [] outputs = []
"""The method will compute values for the variables in this list""" """The method will compute values for the variables in this list"""
updates = {} updates = {}
"""The method will update module variables in this dictionary """The method will update module variables in this dictionary
For items ``(k,v)`` in this dictionary, ``k`` must be a `symbolic_member` of some module. For items ``(k,v)`` in this dictionary, ``k`` must be a `symbolic_member` of some module.
On each call to this compiled function, the value of ``k`` will be replaced with the On each call to this compiled function, the value of ``k`` will be replaced with the
computed value of the Variable ``v``. computed value of the Variable ``v``.
""" """
def __init__(self, outputs, updates={}): def __init__(self, outputs, updates=None):
if updates is None:
updates = {}
self.outputs = outputs self.outputs = outputs
assert type(updates) is dict assert type(updates) is dict
self.updates = updates self.updates = updates
......
...@@ -454,7 +454,7 @@ def infer_shape(outs, inputs, input_shapes): ...@@ -454,7 +454,7 @@ def infer_shape(outs, inputs, input_shapes):
class Validator(object): class Validator(object):
def __init__(self, valid=[], invalid=[], valid_equivalent={}): def __init__(self, valid=None, invalid=None, valid_equivalent=None):
''' '''
Check if variables can be expressed without using variables in invalid. Check if variables can be expressed without using variables in invalid.
...@@ -462,6 +462,13 @@ class Validator(object): ...@@ -462,6 +462,13 @@ class Validator(object):
variables to valid ones that can be used instead. variables to valid ones that can be used instead.
''' '''
if valid is None:
valid = []
if invalid is None:
invalid = []
if valid_equivalent is None:
valid_equivalent = {}
# Nodes that are valid to have in the graph computing outputs # Nodes that are valid to have in the graph computing outputs
self.valid = set(valid) self.valid = set(valid)
......
...@@ -88,7 +88,9 @@ rk = RandomKit('rk', 0xBAD5EED) ...@@ -88,7 +88,9 @@ rk = RandomKit('rk', 0xBAD5EED)
class RModule(Module): class RModule(Module):
"""Module providing random number streams in Theano graphs.""" """Module providing random number streams in Theano graphs."""
def __init__(self, components = {}, **kwcomponents): def __init__(self, components=None, **kwcomponents):
if components is None:
components = {}
super(RModule, self).__init__(components, **kwcomponents) super(RModule, self).__init__(components, **kwcomponents)
self.random = RandomKit('rkit') self.random = RandomKit('rkit')
self._rkit = KitComponent(self.random) self._rkit = KitComponent(self.random)
......
...@@ -430,7 +430,7 @@ class Elemwise(Op): ...@@ -430,7 +430,7 @@ class Elemwise(Op):
Elemwise(log)(rand(3, 4, 5)) Elemwise(log)(rand(3, 4, 5))
""" """
def __init__(self, scalar_op, inplace_pattern={}, name=None, def __init__(self, scalar_op, inplace_pattern=None, name=None,
nfunc_spec=None): nfunc_spec=None):
""" """
Usage: Elemwise(scalar_op, inplace_pattern = {}) Usage: Elemwise(scalar_op, inplace_pattern = {})
...@@ -451,6 +451,8 @@ class Elemwise(Op): ...@@ -451,6 +451,8 @@ class Elemwise(Op):
NOTE: as of now, the sign of the nout field is ignored (some work NOTE: as of now, the sign of the nout field is ignored (some work
needs to be done to resize the destinations when needed). needs to be done to resize the destinations when needed).
""" """
if inplace_pattern is None:
inplace_pattern = {}
self.name = name self.name = name
self.scalar_op = scalar_op self.scalar_op = scalar_op
self.inplace_pattern = inplace_pattern self.inplace_pattern = inplace_pattern
......
...@@ -31,7 +31,7 @@ class T_softplus(unittest.TestCase): ...@@ -31,7 +31,7 @@ class T_softplus(unittest.TestCase):
class T_sigmoid_opts(unittest.TestCase): class T_sigmoid_opts(unittest.TestCase):
def get_mode(self, excluding=[]): def get_mode(self, excluding=None):
""" """
Return appropriate mode for the tests. Return appropriate mode for the tests.
...@@ -41,6 +41,8 @@ class T_sigmoid_opts(unittest.TestCase): ...@@ -41,6 +41,8 @@ class T_sigmoid_opts(unittest.TestCase):
set to 'FAST_COMPILE' (in which case it is replaced by the 'FAST_RUN' set to 'FAST_COMPILE' (in which case it is replaced by the 'FAST_RUN'
mode), without the optimizations specified in `excluding`. mode), without the optimizations specified in `excluding`.
""" """
if excluding is None:
excluding = []
m = theano.config.mode m = theano.config.mode
if m == 'FAST_COMPILE': if m == 'FAST_COMPILE':
mode = theano.compile.mode.get_mode('FAST_RUN') mode = theano.compile.mode.get_mode('FAST_RUN')
......
...@@ -173,9 +173,19 @@ def safe_make_node(op, *inputs): ...@@ -173,9 +173,19 @@ def safe_make_node(op, *inputs):
return node.owner return node.owner
def makeTester(name, op, expected, checks={}, good={}, bad_build={}, def makeTester(name, op, expected, checks=None, good=None, bad_build=None,
bad_runtime={}, grad={}, mode=None, grad_rtol=None, bad_runtime=None, grad=None, mode=None, grad_rtol=None,
eps=1e-10, skip=False): eps=1e-10, skip=False):
if checks is None:
checks = {}
if good is None:
good = {}
if bad_build is None:
bad_build = {}
if bad_runtime is None:
bad_runtime = {}
if grad is None:
grad = {}
if grad is True: if grad is True:
grad = good grad = good
...@@ -400,7 +410,9 @@ def rand_of_dtype(shape, dtype): ...@@ -400,7 +410,9 @@ def rand_of_dtype(shape, dtype):
raise TypeError() raise TypeError()
def makeBroadcastTester(op, expected, checks={}, name=None, **kwargs): def makeBroadcastTester(op, expected, checks=None, name=None, **kwargs):
if checks is None:
checks = {}
if name is None: if name is None:
name = str(op) name = str(op)
# Here we ensure the test name matches the name of the variable defined in # Here we ensure the test name matches the name of the variable defined in
...@@ -575,10 +587,12 @@ MulInplaceTester = makeBroadcastTester(op = inplace.mul_inplace, ...@@ -575,10 +587,12 @@ MulInplaceTester = makeBroadcastTester(op = inplace.mul_inplace,
inplace = True) inplace = True)
def copymod(dct, without=[], **kwargs): def copymod(dct, without=None, **kwargs):
"""Return dct but with the keys named by args removed, and with """Return dct but with the keys named by args removed, and with
kwargs added. kwargs added.
""" """
if without is None:
without = []
rval = copy(dct) rval = copy(dct)
for a in without: for a in without:
if a in rval: if a in rval:
......
...@@ -1427,7 +1427,9 @@ class TestGer(TestCase, unittest_tools.TestOptimizationMixin): ...@@ -1427,7 +1427,9 @@ class TestGer(TestCase, unittest_tools.TestOptimizationMixin):
self.ger_destructive = ger_destructive self.ger_destructive = ger_destructive
self.gemm = gemm_no_inplace self.gemm = gemm_no_inplace
def function(self, inputs, outputs, updates={}): def function(self, inputs, outputs, updates=None):
if updates is None:
updates = {}
return theano.function(inputs, outputs, self.mode, updates=updates) return theano.function(inputs, outputs, self.mode, updates=updates)
def b(self, bval): def b(self, bval):
......
...@@ -21,11 +21,13 @@ class MyType(Type): ...@@ -21,11 +21,13 @@ class MyType(Type):
class MyOp(Op): class MyOp(Op):
def __init__(self, name, dmap = {}, x = None): def __init__(self, name, dmap=None, x=None):
if dmap is None:
dmap = {}
self.name = name self.name = name
self.destroy_map = dmap self.destroy_map = dmap
self.x = x self.x = x
def make_node(self, *inputs): def make_node(self, *inputs):
inputs = map(as_variable, inputs) inputs = map(as_variable, inputs)
for input in inputs: for input in inputs:
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论