提交 cda83c5d authored 作者: Olivier Breuleux's avatar Olivier Breuleux

expanded the structore of compile, added sanity check mode

上级 a5391d95
import function_module
from function_module import *
import mode
from mode import *
import io
from io import *
import builders
from builders import *
import unittest
import gof, gof.opt
from theano import compile
from theano.compile.function_module import *
from theano.scalar import *
from theano import tensor
from theano import tensor as T
import random
import numpy as N
from theano.compile.builders import *
class T_OpFromGraph(unittest.TestCase):
def test_straightforward(self):
x, y, z = T.matrices('xyz')
e = x + y * z
op = OpFromGraph([x, y, z], [e], mode='FAST_RUN')
f = op(x, y, z) - op(y, z, x)
fn = function([x, y, z], f)
xv, yv, zv = N.ones((2, 2)), N.ones((2, 2))*3, N.ones((2, 2))*5
assert numpy.all(8.0 == fn(xv, yv, zv))
assert numpy.all(8.0 == fn(xv, yv, zv))
def test_size_changes(self):
x, y, z = T.matrices('xyz')
e = T.dot(x, y)
op = OpFromGraph([x, y], [e], mode='FAST_RUN')
f = op(x, op(y, z))
fn = function([x, y, z], f)
xv, yv, zv = N.ones((2, 3)), N.ones((3, 4))*3, N.ones((4, 5))*5
res = fn(xv, yv, zv)
assert res.shape == (2, 5)
assert numpy.all(180.0 == res)
res = fn(xv, yv, zv)
assert res.shape == (2, 5)
assert numpy.all(180.0 == res)
def test_grad(self):
x, y, z = T.matrices('xyz')
e = x + y * z
op = OpFromGraph([x, y, z], [e], mode='FAST_RUN', grad_depth = 2)
f = op(x, y, z)
f = f - T.grad(f, y)
fn = function([x, y, z], f)
xv, yv, zv = N.ones((2, 2)), N.ones((2, 2))*3, N.ones((2, 2))*5
assert numpy.all(11.0 == fn(xv, yv, zv))
if __name__ == '__main__':
unittest.main()
......@@ -147,43 +147,6 @@ def checkfor(testcase, fn, E):
class T_OpFromGraph(unittest.TestCase):
def test_straightforward(self):
x, y, z = T.matrices('xyz')
e = x + y * z
op = OpFromGraph([x, y, z], [e], mode='FAST_RUN')
f = op(x, y, z) - op(y, z, x)
fn = function([x, y, z], f)
xv, yv, zv = N.ones((2, 2)), N.ones((2, 2))*3, N.ones((2, 2))*5
assert numpy.all(8.0 == fn(xv, yv, zv))
assert numpy.all(8.0 == fn(xv, yv, zv))
def test_size_changes(self):
x, y, z = T.matrices('xyz')
e = T.dot(x, y)
op = OpFromGraph([x, y], [e], mode='FAST_RUN')
f = op(x, op(y, z))
fn = function([x, y, z], f)
xv, yv, zv = N.ones((2, 3)), N.ones((3, 4))*3, N.ones((4, 5))*5
res = fn(xv, yv, zv)
assert res.shape == (2, 5)
assert numpy.all(180.0 == res)
res = fn(xv, yv, zv)
assert res.shape == (2, 5)
assert numpy.all(180.0 == res)
def test_grad(self):
x, y, z = T.matrices('xyz')
e = x + y * z
op = OpFromGraph([x, y, z], [e], mode='FAST_RUN', grad_depth = 2)
f = op(x, y, z)
f = f - T.grad(f, y)
fn = function([x, y, z], f)
xv, yv, zv = N.ones((2, 2)), N.ones((2, 2))*3, N.ones((2, 2))*5
assert numpy.all(11.0 == fn(xv, yv, zv))
class T_function(unittest.TestCase):
def test_empty(self):
fn = function([], []) #ok
......
from .. import gof
from .. import gradient as G
from function_module import function
class OpFromGraph(gof.Op):
"""
This create an L{Op} from a list of input results and a list of output
results.
The signature is the same as the signature of L{FunctionFactory}
and/or function and the resulting L{Op}'s perform will do the same
operation as::
function(inputs, outputs, **kwargs)
Take note that the following options, if provided, must take the
value(s) listed below:
unpack_single = False
borrow_outputs = False
OpFromGraph takes an additional input, grad_depth. If grad_depth
is n, OpFromGraph will make special Ops for gradients up to the
nth level, allowing the user to differentiate this op up to n
times. The parameter defaults to 1. If grad_depth == 0, the op
will not be differentiable.
Example:
x, y, z = tensor.scalars('xyz')
e = x + y * z
op = OpFromGraph([x, y, z], [e], linker='c')
# op behaves like a normal theano op
e2 = op(x, y, z) + op(z, y, x)
fn = function([x, y, z], [e2])
"""
def __init__(self, inputs, outputs, grad_depth = 1, **kwargs):
self.fn = function(inputs, outputs, **kwargs)
self.inputs = inputs
self.outputs = outputs
self.input_types = [input.type for input in inputs]
self.output_types = [output.type for output in outputs]
if grad_depth > 0:
output_grads = [t() for t in self.output_types]
gd = G.grad_sources_inputs(zip(self.outputs, output_grads), self.inputs)
gs = map(gd.get, self.inputs)
self.grad_ops = []
for g in gs:
if g is None:
self.grad_ops.append(lambda *args: None)
else:
self.grad_ops.append(OpFromGraph(inputs + output_grads,
[g],
grad_depth = grad_depth - 1))
def make_node(self, *inputs):
for input, type in zip(inputs, self.input_types):
if not type == input.type:
raise TypeError("Wrong type, expected %s but got %s" % type, input.type)
return gof.Apply(self,
inputs,
[type() for type in self.output_types])
def perform(self, node, inputs, outputs):
results = self.fn(*inputs)
for output, result in zip(outputs, results):
output[0] = result
def grad(self, inputs, output_grads):
if hasattr(self, 'grad_ops'):
return [go(*(inputs + output_grads)) for go in self.grad_ops]
else:
raise NotImplementedError
差异被折叠。
class SymbolicInput(object):
"""
Represents a symbolic input for use with function or FunctionMaker.
result: a Result instance.
This will be assigned a value before running the function,
not computed from its owner.
name: Any type. (If autoname=True, defaults to result.name).
If name is a valid Python identifier, this input can be set by kwarg, and its value
can be accessed by self.<name>.
update: Result instance (default: None)
value (see previous) will be replaced with this expression result after each function call.
If update is None, the update will be the default value of the input.
mutable: Bool (default: False if update is None, True if update is not None)
True: permit the compiled function to modify the python object being passed as the input
False: do not permit the compiled function to modify the python object being passed as the input.
strict: Bool (default: False)
True: means that the value you pass for this input must have exactly the right type
False: the value you pass for this input may be casted automatically to the proper type
autoname: Bool (default: True)
See the name option.
"""
def __init__(self, result, name=None, update=None, mutable=None, strict=False, autoname=True):
self.result = result
self.name = result.name if (autoname and name is None) else name
if self.name is not None and not isinstance(self.name, str):
raise TypeError("name must be a string! (got: %s)" % self.name)
self.update = update
self.mutable = mutable if (mutable is not None) else (update is not None)
self.strict = strict
def __str__(self):
if self.update:
return "In(%s -> %s)" % (self.result, self.update)
else:
return "In(%s)" % self.result
def __repr__(self):
return str(self)
class SymbolicInputKit(object):
"""
Represents a group ("kit") of SymbolicInputs. If fed into function or
FunctionMaker, only the inputs which are needed to compile the function
properly will be taken.
A SymbolicInputKit provides the distribute function in order to set or
initialize several inputs from a single value. Specialized Kits should
override it.
"""
def __init__(self, name):
if not isinstance(name, str):
raise TypeError('naem must be a string (got: %s)' % name)
self.name = name
self.sinputs = []
self.results = []
def add_input(self, sinput):
"""
Add a SymbolicInput to this SymbolicInputKit. It will be given the
next available index.
"""
self.sinputs.append(sinput)
self.results.append(sinput.result)
def distribute(self, value, indices, containers):
"""
Given a list of indices corresponding to SymbolicInputs in this kit
as well as a corresponding list of containers, initialize all the
containers using the provided value.
"""
raise NotImplementedError
def complete(self, inputs):
"""
Given inputs (a list of Result instances), checks through all
the SymbolicInputs in the kit and return a sorted list of
indices and a list of their corresponding SymbolicInputs such
that each of them represents some result in the inputs list.
Not all the provided inputs will have a corresponding
SymbolicInput in the kit.
"""
ret = []
for input in inputs:
try:
i = self.results.index(input)
ret.append((i, self.sinputs[i]))
except ValueError:
pass
ret.sort()
return zip(*ret)
class In(SymbolicInput):
"""
Represents a symbolic input for use with function or FunctionMaker.
result: a Result instance.
This will be assigned a value before running the function,
not computed from its owner.
name: Any type. (If autoname=True, defaults to result.name).
If name is a valid Python identifier, this input can be set by kwarg, and its value
can be accessed by self.<name>.
value: Any type.
The initial/default value for this input. If update is None, this input acts just like
an argument with a default value in Python. If update is not None, changes to this
value will "stick around", whether due to an update or a user's explicit action.
update: Result instance (default: None)
value (see previous) will be replaced with this expression result after each function call.
If update is None, the update will be the default value of the input.
mutable: Bool (default: False if update is None, True if update is not None)
True: permit the compiled function to modify the python object being passed as the input
False: do not permit the compiled function to modify the python object being passed as the input.
strict: Bool (default: False)
True: means that the value you pass for this input must have exactly the right type
False: the value you pass for this input may be casted automatically to the proper type
autoname: Bool (default: True)
See the name option.
"""
def __init__(self, result, name=None, value=None, update=None, mutable=None, strict=False, autoname=True):
super(In, self).__init__(result, name, update, mutable, strict, autoname)
self.value = value
class SymbolicOutput(object):
"""
Represents a symbolic output for use with function or FunctionMaker.
borrow: set this to True to indicate that a reference to
function's internal storage may be returned. A value
returned for this output might be clobbered by running
the function again, but the function might be faster.
"""
def __init__(self, result, borrow=False):
self.result = result
self.borrow = borrow
Out = SymbolicOutput
import numpy
from .. import gof
def check_equal(x, y):
"""
Returns True iff x[0] and y[0] are equal (checks the dtype and
shape if x and y are numpy.ndarray instances). Used internally.
"""
x, y = x[0], y[0]
if isinstance(x, numpy.ndarray) and isinstance(y, numpy.ndarray):
if x.dtype != y.dtype or x.shape != y.shape or numpy.any(abs(x - y) > 1e-10):
raise Exception("Output mismatch.", {'performlinker': x, 'clinker': y})
else:
if x != y:
raise Exception("Output mismatch.", {'performlinker': x, 'clinker': y})
# If a string is passed as the linker argument in the constructor for
# Mode, it will be used as the key to retrieve the real linker in this
# dictionary
predefined_linkers = {
'py' : gof.PerformLinker(),
'c' : gof.CLinker(),
'c|py' : gof.OpWiseCLinker(),
'c&py' : gof.DualLinker(checker = check_equal)
}
default_linker = 'c|py'
def register_linker(name, linker):
"""Add a `Linker` which can be referred to by `name` in `Mode`."""
if name in predefined_linkers:
raise ValueError('Linker name already taken: %s' % name)
predefined_linkers[name] = linker
# If a string is passed as the optimizer argument in the constructor
# for Mode, it will be used as the key to retrieve the real optimizer
# in this dictionary
predefined_optimizers = {
None : lambda env: None,
'merge' : gof.MergeOptimizer(),
}
default_optimizer = 'merge'
def register_optimizer(name, opt):
"""Add a `Optimizer` which can be referred to by `name` in `Mode`."""
if name in predefined_optimizers:
raise ValueError('Optimizer name already taken: %s' % name)
predefined_optimizers[name] = opt
class Mode(object):
"""
The Mode represents a way to optimize and then link a computation
graph.
* optimizer -> a structure of type Optimizer. An Optimizer may
simplify the math, put similar computations together, improve
numerical stability and various other improvements.
* linker -> a structure of type Linker. A Linker decides which
implementations to use (C or Python, for example) and how to
string them together to perform the computation.
See predefined_linkers, predefined_optimizers and also
predefined_modes.
"""
def __init__(self, linker = default_linker, optimizer = default_optimizer):
self.__setstate__((linker, optimizer))
def __getstate__(self):
return (self.provided_linker, self.provided_optimizer)
def __setstate__(self, (linker, optimizer)):
self.provided_linker = linker
self.provided_optimizer = optimizer
if isinstance(linker, str) or linker is None:
linker = predefined_linkers[linker]
self.linker = linker
if isinstance(optimizer, str) or optimizer is None:
optimizer = predefined_optimizers[optimizer]
self.optimizer = optimizer
def __str__(self):
return "Mode(linker = %s, optimizer = %s)" % (self.provided_linker, self.provided_optimizer)
# If a string is passed as the mode argument in function or
# FunctionMaker, the Mode will be taken from this dictionary using the
# string as the key
predefined_modes = {'FAST_COMPILE': Mode('py', 'merge')}
default_mode = 'FAST_COMPILE'
def register_mode(name, mode):
"""Add a `Mode` which can be referred to by `name` in `function`."""
if name in predefined_modes:
raise ValueError('Mode name already taken: %s' % name)
predefined_modes[name] = mode
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论