提交 bcfe70c7 authored 作者: Arnaud Bergeron's avatar Arnaud Bergeron

Remove remnants of theano modules that were deleted in 0.7.

上级 43d58c19
from __future__ import print_function
import copy, inspect
import theano
import theano.tensor as T
from six import string_types, add_metaclass, iteritems
from six.moves import xrange
#import klass
def symbolic(f):
f.__is_symbolic = True
return f
class InitGraph(type):
def __init__(cls, name, bases, dct):
# print 'INITIALIZING', name
super(InitGraph, cls).__init__(name, bases, dct)
def just_symbolic(dct):
def filter(k, v):
return True
if getattr(v, '__is_symbolic', False):
return True
if issubclass(v, SymbolicModule):
return True
return isinstance(v, theano.Variable) and not k.startswith('_')
r = {}
for key, val in iteritems(dct):
if list(filter(key, val)):
r[key] = val
return r
build_graph_rval = cls.build_graph()
if not isinstance(build_graph_rval, dict):
raise TypeError('%s.build_graph did not return dictionary' % cls)
dct = just_symbolic(build_graph_rval)
for key, val in iteritems(dct):
# print ' adding class attribute', key
if isinstance(val, theano.Variable) and val.name is None:
val.name = key
if callable(val):
setattr(cls, key, staticmethod(val))
else:
setattr(cls, key, val)
# installs class attributes from build_graph after declaration
@add_metaclass(InitGraph)
class SymbolicModule(object):
# if we call this function, it will return a new SymbolicModule
def __new__(self, **kwargs):
class SymMod(SymbolicModule):
@staticmethod
def build_graph(*bg_args, **bg_kwargs):
# this one is like self.build_graph,
# except that the kwargs are automatically inserted
kwcopy = copy.copy(kwargs)
kwcopy.update(bg_kwargs)
return self.build_graph(*bg_args, **kwcopy)
setattr(SymMod, '__name__', self.__name__ + '_derived')
return SymMod
@staticmethod
def build_graph():
return {}
def issymbolicmodule(thing):
try:
return issubclass(thing, SymbolicModule)
except Exception:
return False
def issymbolicmethod(thing):
return getattr(thing, '__symbolic_method', False)
def symbolic_module(f):
class SymMod(SymbolicModule):
build_graph = staticmethod(f)
return SymMod
def symbolicmethod(f):
f.__symbolic_method = True
return f
class CompiledModule(object):
pass
def compile_fn(f, path_locals, common_inputs):
(args, vararg, kwarg, default) = inspect.getargspec(f)
if default:
# this can be handled correctly, in that default arguments trump path_locals
raise NotImplementedError()
# make new inputs for the vars named in args
# this has the effect of creating new storage for these arguments
# The common storage doesn't get messed with.
inputs = [In(path_locals.get(name, name)) for name in args]
inputs.extend([v for k, v in common_inputs.items() if k not in args])
outputs = f()
# print 'inputs', inputs
# print 'outputs', outputs
compiled_f = theano.function(inputs, outputs)
updated = []
return compiled_f, updated
def compile(smod, initial_values=None):
"""
:type values: dictionary Variable -> value
"""
if initial_values is None:
initial_values = {}
def sym_items(mod):
for k in mod.__dict__:
if k in ['__module__', 'build_graph', '__doc__']:
pass
else:
yield k, getattr(mod, k)
def walker(root):
def modwalker(path_locals, values):
for val in values:
yield path_locals, val
if isinstance(val, list):
for s in modwalker(path_locals, val):
yield s
elif isinstance(val, dict):
for s in modwalker(path_locals, val.values()):
yield s
elif issymbolicmodule(val):
for s in modwalker(val.__dict__, [v for k, v in sym_items(val)]):
yield s
elif isinstance(val, (string_types, int, float)):
pass
elif isinstance(val, theano.Variable):
pass
elif issymbolicmethod(val):
pass
else :
# check for weird objects that we would like to disallow
# not all objects can be transfered by the clone mechanism below
raise TypeError( (val, type(val), getattr(val, '__name__')))
for blah in modwalker(root.__dict__, [v for k, v in sym_items(root)]):
yield blah
# Locate all the starting nodes, and create containers entries for their values
inputs = {}
for path_locals, val in walker(smod):
if isinstance(val, theano.Variable) and (val.owner is None) and (val not in inputs):
inputs[val] = theano.In(val, value=theano.gof.Container(val, ['a']))
assert len(inputs) == len([v for v in inputs.items()])
# Locate all the functions to compile, and compile them
compiled_functions = {}
for path_locals, val in walker(smod):
if issymbolicmethod(val):
f, update_expressions = compile_fn(val, path_locals, inputs)
compiled_functions[val] = f
# Now replicate the nested structure of the SymbolicModule smod
# with CompiledModules instead
reflected = {}
def reflect(thing):
# UNHASHABLE TYPES
if isinstance(thing, list):
return [reflect(e) for e in thing]
if isinstance(thing, dict):
raise NotImplementedError()
# HASHABLE TYPES
if thing not in reflected:
if issymbolicmodule(thing):
class CMod(CompiledModule):
pass
setattr(CMod, '__name__', thing.__name__ + '_compiled')
# TODO: consider an instance of the class, or the class itself?
# which is easier for copying?
cmod = CMod()
reflected[thing] = cmod
for key, val in sym_items(thing):
setattr(CMod, key, reflect(val))
elif isinstance(thing, (string_types, int, float)):
reflected[thing] = thing
elif isinstance(thing, theano.Variable):
if thing.owner is None:
def getter(s):
return inputs[thing].value.value
def setter(s, v):
inputs[thing].value.storage[0] = v
p = property(getter, setter)
print(p)
reflected[thing] = p
else:
reflected[thing] = None # TODO: how to reflect derived resuls?
elif issymbolicmethod(thing):
reflected[thing] = compiled_functions[thing]
else :
# check for weird objects that we would like to disallow
# not all objects can be transfered by the clone mechanism below
raise TypeError('reflecting not supported for',
(thing, type(thing), getattr(thing, '__name__', None)))
return reflected[thing]
rval = reflect(smod)
rval.__inputs = inputs
rval.__compiled_functions = compiled_functions
return rval
@symbolic_module
def LR(x=None, y=None, v=None, c=None, l2_coef=None):
# our points, one point per row
if x is None:
x = T.dmatrix()
# targets , one per row
if y is None:
y = T.dmatrix()
# first layer weights
if v is None:
v = T.dmatrix()
# first layer biases
if c is None:
c = T.dvector()
if l2_coef is None:
l2_coef = T.dscalar()
pred = T.dot(x, v) + c
sse = T.sum((pred - y) * (pred - y))
mse = sse / T.shape(y)[0]
v_l2 = T.sum(T.sum(v*v))
loss = mse + l2_coef * v_l2
@symbolicmethod
def params():
return [v, c]
return locals()
@symbolic_module
def Layer(x=None, w=None, b=None):
# our points, one point per row
if x is None:
x = T.dmatrix()
# first layer weights
if w is None:
w = T.dmatrix()
# first layer bias
if b is None:
b = T.dvector()
y = T.tanh(T.dot(x, w) + b)
@symbolicmethod
def params(): return [w, b]
return locals()
@symbolic_module
def NNet(x=None, y=None, n_hid_layers=2):
# our points, one point per row
if x is None:
x = T.dmatrix()
# targets , one per row
if y is None:
y = T.dmatrix()
layers = []
_x = x
for i in xrange(n_hid_layers):
layers.append(Layer(x=_x))
_x = layers[-1].y
classif = LR(x=_x)
@symbolicmethod
def params():
rval = classif.params()
for l in layers:
rval.extend(l.params())
print([id(r) for r in rval])
return rval
if 0:
@symbolicmethod
def update(x, y):
pp = params()
gp = T.grad(classif.loss, pp)
return dict((p, p - 0.01*g) for p, g in zip(pp, gp))
return locals()
nnet = compile(NNet)
print(nnet)
print(nnet.params())
print(nnet.params.__dict__['finder'][NNet.layers[0].w])
nnet.params[NNet.layers[0].w] = [[6]]
print(nnet.params())
print(nnet.params())
if 0:
def deco(f):
class SymMod(SymbolicModule):
def __call__(self, *args, **kwargs):
# return another SymbolicModule built like self
def dummy(*dargs, **dkwargs):
print('args', args, dargs)
print('kwargs', kwargs, dkwargs)
return f(*args, **kwargs)
return deco(dummy)
locals_dict = f()
for key, val in iteritems(locals_dict):
if isinstance(val, theano.Variable):
try:
kres = klass.KlassMember(val)
except Exception:
kres = klass.KlassVariable(val)
setattr(SymMod, key, kres)
elif callable(val) and getattr(val, '__is_symbolic'):
setattr(SymMod, key, val)
return SymMod()
@deco
def logistic_regression(
x=T.dmatrix(), #our points, one point per row
y=T.dmatrix(), #our targets
v=T.dmatrix(), #first layer weights
c=T.dvector(), #first layer bias
l2_coef=T.dscalar()
):
pred = T.dot(x, v) + c
sse = T.sum((pred - y) * (pred - y))
v_l2 = T.sum(T.sum(v*v))
loss = sse + l2_coef * v_l2
@symbolic
def params(): return [v, c]
return just_symbolic(locals())
@deco
def tanh_layer(
top_part=None,
x=T.dmatrix(), #our points, one point per row
w=T.dmatrix(), #first layer weights
b=T.dvector(), #first layer bias
**kwargs # other things from logistic_regression
):
hid = T.tanh(T.dot(x, w) + b)
if top_part:
print('top_part', top_part, 'kwargs', kwargs)
top = top_part(x=hid, **kwargs) # SymbolicModule
def params(): return top.params() + [w, b]
else:
def params(): return [w, b]
return just_symbolic(locals())
if 0:
print('logistic_regression', logistic_regression)
print('tanh_layer', tanh_layer)
print('nnet1', nnet1)
nnet1 = tanh_layer(logistic_regression)
nnet2 = tanh_layer(nnet1)
print('nnet2', nnet2)
if 0:
class SymbolicModule(object):
name = "__no_name__" # name of this module
variable_table = {} #map strings (names) to Variables
method_table = {} #map strings to compilable functions
include_list = []
constructor_fn = None
def build(self):
"""Run the body of the included modules in order, using the current variables and imports
"""
def include(self, symbolic_module, name=None):
"""This redefines the symbols in the kwargs
"""
if name is None:
name = symbolic_module.name
def __init__(self, constructor_fn=None):
""" A constructor fn builds
- a graph on top of the variable table, and
- compilable methods.
"""
@SymbolicModule_fromFn
def neural_net(
x=T.dmatrix(), #our points, one point per row
y=T.dmatrix(), #our targets
w=T.dmatrix(), #first layer weights
b=T.dvector(), #first layer bias
v=T.dmatrix(), #second layer weights
c=T.dvector(), #second layer bias
step=T.dscalar(), # step size for gradient descent
l2_coef=T.dscalar() # l2 regularization amount
):
"""Idea A:
"""
hid = T.tanh(T.dot(x, w) + b)
pred = T.dot(hid, v) + c
sse = T.sum((pred - y) * (pred - y))
w_l2 = T.sum(T.sum(w*w))
v_l2 = T.sum(T.sum(v*v))
loss = sse + l2_coef * (w_l2 + v_l2)
def symbolic_params(cls):
return [cls.w, cls.b, cls.v, cls.c]
def update(cls, x, y, **kwargs):
params = cls.symbolic_params()
gp = T.grad(cls.loss, params)
return [], [In(p, update=p - cls.step * g) for p, g in zip(params, gp)]
def predict(cls, x, **kwargs):
return cls.pred, []
return locals()
# at this point there is a neural_net module all built and compiled,
# there is also a neural_net.symbolic_module which can be imported.
@SymbolicModule_fromFn
def PCA(
x=T.dmatrix(),
var_thresh=T.dscalar()
):
# naive version, yes
s, v, d = T.svd(x)
acc = T.accumulate(v)
npc = T.lsearch(acc, var_thresh * T.sum(v))
y = s[:, :npc]
# transform will map future points x into the principle components space
transform = d[:npc, :].T / v[:npc]
return locals()
# at this point there is a neural_net module all built and compiled,
# there is also a neural_net.symbolic_module which can be imported.
# running this means:
nnet_on_pca = neural_net(x=PCA.y, submodules=[PCA])
#nnet_on_pca = SymbolicModule()
# nnet_on_pca.include(PCA) #an already-instantiated Module
# nnet_on_pca.x = nnet_on_pca.PCA.y #configure this Module
# nnet_on_pca.build(neural_net) # instantiate this module
nnet_on_pca = neural_net(
substitute=dict(x=PCA.x),
submodules=[PCA],
add_symbols=dict(x=PCA.x)
)
nnet = logistic_regression(
redefine={'x': (LogisticLayer.x, LogisticLayer.y)},
submodule={'hid': LogisticLayer},
add_symbols={'x': LogisticLayer.x})
def stats_collector(r, stat_name):
"""stats_collector(nnet_on_pca.x, 'mean')
"""
return mean_collector(x=r)
from __future__ import print_function
from theano.sandbox.theano_object import *
RUN_TESTS = False
def run(TF):
def deco(f):
if TF and RUN_TESTS:
print('running test', f.__name__)
f()
if RUN_TESTS:
return f
else: return None
return deco
class MyModule(TheanoObject):
def __init__(self, a=3, b=9):
super(MyModule, self).__init__()
self.a = self.symbolic_member(2)
self.b = self.symbolic_member(3)
self.c = 100 # a constant
self.d = [self.symbolic_member(5), self.symbolic_member(6)]
self.e = ['a', self.symbolic_member(6)]
@symbolic_fn
def add(self, x):
return RVal(self.a + self.b + x)
@symbolic_fn_opts(mode='FAST_COMPILE')
def sub(self, x):
outputs = (self.a - x, self.b - x)
updates = {self.b: self.b-x}
return RVal(outputs, updates)
def normal_function(self, x):
return self.add(x) + self.sub(x) #use numpy addition
@symbolic_fn
def use_submodule(self, x):
return RVal(self.a + x + self.submodule.b)
@run(True)
def test_outputs():
MM = MyModule(3, 4)
assert MM.add(5) == 12
assert MM.b.get() == 4
MM.sub(3)
assert MM.b.get() == 1 # test get()
assert MM.add(5) == 9 # test that b's container is shared between add and sub
MM.b.set(2) # test set
assert MM.b.get() == 2 # test get()
assert MM.add(5) == 10 # test that b's container is shared between add and sub
@run(True)
def test_submodule():
MM = MyModule(1, 2)
MM.submodule = MyModule(3, 4)
assert MM.add(5) == 8
MM.submodule.sub(7)
assert MM.submodule.b.get() == -3
assert MM.use_submodule(0) == -2 # self.a is 1 + self.submodule.b is -3
@run(False)
def test_misc_prints():
MM = MyModule()
print(MM)
print('add', MM.add(4))
print('b', MM.value(MM.b))
print('sub', MM.sub(45))
print('b', MM.value(MM.b))
print(MM.sub(23))
print(MM.add(9))
print(MM.add(19))
print('b', MM.value(MM.b))
print('a', MM.value(MM.a))
MM.value_set(MM.a, 6)
MM.value_set(MM.b, 6)
print(MM.add(6))
try:
MM.b = 5
except Exception as e:
print(e)
MM.del_member(MM.b)
try:
print('b', MM.value(MM.b))
except Exception as e:
print(e)
MM.b = 'asdffd'
try:
print('b', MM.value(MM.b))
except Exception as e:
print(e)
try:
print('b', MM.value(MM.b))
except Exception as e:
print('E', e)
print(MM.b)
print('a', MM.value(MM.a))
"""
DRAFT: TheanoObject
N.B. the gotcha with this design is listed in the documentation of
`TheanoObject`.
"""
from __future__ import print_function
import theano
from theano import tensor
import numpy
def theano_type(x):
"""
Return a theano Type instance suitable for containing value `x`.
"""
if type(x) is int:
return tensor.lscalar
else:
raise NotImplementedError()
class symbolic_fn_callable(object):
"""
This is the class whose instance you get when you access a symbolic function
in a `TheanoObject`.
When you call a symbolic function (`symbolic_fn`) of a TheanoObject,
the `__call__` of this class handles your request.
You can also access the symbolic outputs and updates of a symbolic function
through this class.
Examples
--------
class T(TheanoObject):
@symbolic_fn
def add(self, x):
...
add_outputs = ...
add_updates = ...
return RVal(add_outputs, add_updates)
t = T()
t.add.outputs(5) # returns `add_outputs` from when `x=theano_type(5)`
t.add.updates(5) # returns `add_updates` from when `x=theano_type(5)`
t.add.theano_function(5) # returns the `Function` compiled when
# `x=theano_type(5)`
t.add(5) # runs the `Function` compiled when `x=theano_type(5)`
# with arguments `(5,)`
"""
def __init__(self, fn, mode):
self.fn = fn
self.mode = mode
def on(self, o_self):
"""
Silly method to work with symbolic_fn.__get__.
"""
self.o_self = o_self
return self
def run_symbolic(self, *args, **kwargs):
return self.o_self._get_method_impl(self.fn, self.o_self, args, kwargs, mode=self.mode)
def __call__(self, *args, **kwargs):
return self.run_symbolic(*args, **kwargs)['theano_function'](*args, **kwargs)
def theano_function(self, *args, **kwargs):
return self.run_symbolic(*args, **kwargs)['theano_function']
def outputs(self, *args, **kwargs):
return self.run_symbolic(*args, **kwargs)['outputs']
def updates(self, *args, **kwargs):
return self.run_symbolic(*args, **kwargs)['updates']
class symbolic_fn(object):
"""
A property-like class for decorating symbolic functions in `TheanoObject`.
"""
def __init__(self, fn, mode=None):
self.fn = fn
self.callable = symbolic_fn_callable(fn, mode)
def __get__(self, o_self, o_cls):
return self.callable.on(o_self)
def __set__(self, o_self, new_val):
pass
# return NotImplemented
def symbolic_fn_opts(**kwargs):
"""
Return a decorator for symbolic_functions in a `TheanoObject`.
`kwargs` passed here are passed to `theano.function` via `symbolic_fn`.
"""
def deco(f):
return symbolic_fn(f, **kwargs)
return deco
class RVal(object):
"""
A Return-Value object for a `symbolic_fn`.
"""
outputs = []
"""
The method will compute values for the variables in this list.
"""
updates = {}
"""The method will update module variables in this dictionary.
For items ``(k,v)`` in this dictionary, ``k`` must be a `symbolic_member`
of some module.
On each call to this compiled function, the value of ``k`` will be replaced
with the computed value of the Variable ``v``.
"""
def __init__(self, outputs, updates=None):
if updates is None:
updates = {}
self.outputs = outputs
assert type(updates) is dict
self.updates = updates
class TheanoObject(object):
"""
Base for Theano-supported classes.
This class provides support for symbolic_fn class attributes.
These will be compiled on demand so that they can be used just like normal
(non-symbolic) methods.
The symbolic functions in a TheanoObject can share member variables that
have been created using the `symbolic_member` method.
Notes
-----
Other variables (ones not created using ``self.symbolic_member``) referred
to in the body of a symbolic function will *not* be shared between symbolic
functions, or between symbolic functions and this class. These other
variables will be locked away in the closure of a symbolic function when
that function is compiled.
.. warning:: It is not recommended for code to interleave
(a) changes to non-symbolic instance variables with
(b) calls to symbolic functions that use those instance variables.
A symbolic function may be compiled multiple times because it must be
compiled for each set of argument types.
Each time the function is compiled, the values of non-symbolic variables
will be locked into the compiled function. Subsequent changes to those
non-symbolic instance variables will not have any effect on the behaviour
of the already-compiled symbolic function.
:todo: Is there an efficient way of recognizing when a compiled symbolic
function is stale, wrt the current values of the class's instance variables?
- One option is to re-evaluate symbolic functions symbolically and see if
the graph can be completely merged with the original graph. This is not
fast enough to do all the time by default though.
"""
def __init__(self):
self.module_method_cache = {}
def _get_method_impl(self, fn, o_self, args, kwargs, mode):
"""
Retrieve information about the symbolic function (`fn`) in TheanoObject
instance `o_self`, being evaluated on arguments `args` and `kwargs`.
Returns
-------
dict with entries 'theano_function', 'outputs', 'updates'
The theano function compiled for these arguments, the symbolic
outputs of that function, and the symbolic updates performed by
that function.
Notes
-----
This function caches return values in self.`module_method_cache`.
:todo: This may at some point become a class-level cache rather than an
instance-level cache.
"""
if kwargs:
raise NotImplementedError()
cache = self.module_method_cache
args_types = tuple(theano_type(arg) for arg in args)
key = (fn, args_types)
if key not in cache:
inputs = [a() for a in args_types]
print('compiling', fn, 'for inputs', inputs)
rval = fn(o_self, *inputs)
print('compiling to compute outputs', rval.outputs)
if isinstance(rval.outputs, (tuple, list)):
all_required_inputs = theano.gof.graph.inputs(rval.outputs)
else:
all_required_inputs = theano.gof.graph.inputs([rval.outputs])
# construct In instances for the symbolic_member instances that can automatically be
# included here.
module_inputs = [theano.compile.io.In(
variable=v,
value=v._theanoclass_container,
mutable=(v in rval.updates),
update=rval.updates.get(v, None))
for v in all_required_inputs \
if hasattr(v, '_theanoclass_container') and not (v in inputs)]
cache[key] = dict(theano_function=theano.function(inputs+module_inputs, rval.outputs),
updates=rval.updates,
outputs=rval.outputs,
mode=mode)
return cache[key]
def symbolic_member(self, ival, name=None):
"""
Create a Variable instance to hold value `ival`.
This function also immediately creates a Container object for ival.
When the returned Variable is used as input to a `TheanoObject`
`symbolic_fn`, (but does not appear as an argument to that symbolic_fn),
then this Container will be used to retrieve (and store) values for the
Variable.
This Variable's Container's contents can be retrieved by its `get()`
method.
This Variable's Container's contents can be written using its
`set(newval)` method.
"""
if type(ival) is not int:
raise NotImplementedError()
v = tensor.lscalar(name)
v._theanoclass_container = \
theano.gof.Container(v,
storage=[theano._asarray(ival, dtype='int64')],
readonly=False)
assert not hasattr(v, 'set')
assert not hasattr(v, 'get')
v.get = lambda : v._theanoclass_container.data
def setval_in_v(newval):
v._theanoclass_container.data = newval
v.set = setval_in_v
return v
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论