提交 d18ed5ad authored 作者: James Bergstra's avatar James Bergstra

comments and minor changes to compiling, linking

上级 892ca0bc
......@@ -19,6 +19,8 @@ def infer_reuse_pattern(env, outputs_to_disown):
Given an env and a list of results, returns the list of all
results which may share the same underlying data storage as any of
the specified results. Used internally by function, FunctionMaker.
This list is also refered to as no_recycling sometimes.
"""
do_not_reuse = list()
seen = set()
......@@ -130,14 +132,14 @@ class Function(object):
input.distribute(value, indices, cs)
for c in cs:
c.provided += 1
def set(c, v):
def assign(c, v):
c.data = v
setters = []
# Initialize the storage
for i, ((input, indices, sinputs), (required, refeed, value)) in enumerate(zip(self.indices, defaults)):
if indices is None: # this is true iff input is not a SymbolicInputKit
c = containers[0]
c = containers[0] #containers is being used as a stack. Here we pop off the next one.
if input.strict:
c.strict = True
if value is not None:
......@@ -155,7 +157,7 @@ class Function(object):
finder[input.name] = c if input.name not in finder else DUPLICATE
# inv_finder maps the container to the input (useful for one error message)
inv_finder[c] = input
setters.append(partial(set, c))
setters.append(partial(assign, c))
containers[:1] = []
else:
# The input is a SymbolicInputKit, so we take as many containers as the Kit provides inputs
......@@ -440,11 +442,12 @@ class FunctionMaker(object):
raise ValueError("'linker' parameter of FunctionFactory should be a Linker with an accept method " \
"or one of %s" % predefined_linkers.keys())
#the 'no_borrow' outputs are the ones for which that we can't return the internal storage pointer.
no_borrow = [output for output, spec in zip(env.outputs, outputs+additional_outputs) if not spec.borrow]
if not no_borrow:
self.linker = linker.accept(env)
else:
if no_borrow:
self.linker = linker.accept(env, no_recycling = infer_reuse_pattern(env, no_borrow))
else:
self.linker = linker.accept(env)
self.indices = indices
self.inputs = inputs
......
......@@ -756,7 +756,7 @@ class OpWiseCLinker(link.LocalLinker):
no_recycling can contain a list of Results that belong to the env.
If a Result is in no_recycling, CLinker will clear the output storage
associated to it during the computation (to avoid reusing it).
associated to it prior to computation (to avoid reusing it).
"""
__cache__ = {}
......
......@@ -131,7 +131,7 @@ class Container(object):
self.type = r
else:
self.type = r.type
self.name = name or r.name
self.name = r.name if name is None else name
self.storage = storage
self.readonly = readonly
self.strict = strict
......@@ -149,7 +149,7 @@ class Container(object):
else:
self.storage[0] = self.type.filter(value)
except Exception, e:
e.args = e.args + (self.name,)
e.args = e.args + (('Container name "%s"' % self.name),)
raise
data = property(__get, __set)
value = property(__get, __set)
......@@ -160,11 +160,33 @@ class Container(object):
def map_storage(env, order, input_storage, output_storage):
"""WRITEME"""
"""Ensure there is storage for inputs, outputs, and interior nodes.
:param env: The current env. This function uses the inputs and outputs attributes.
:param order: an iterable over Apply instances (in program running order)
:param input_storage: None or existing input storage (see below)
:param output_storage: None or existing output storage (see below)
:rtype: 3-tuple
:returns: (list of storage for inputs, list of storage for outputs, and the `storage_map`)
This function iterates over the nodes in `order` and ensures that for every
input and output `Result`, there is a unique storage container. This is
returned as a dictionary Result->storage called the `storage_map`.
This function also returns `input_storage` which is a list of storages corresponding to env.inputs.
This function also returns `output_storage` which is a list of storages corresponding to env.outputs.
"""
#each Apply argument's data is stored in a list of length 1 (these lists act like pointers)
# input_storage is a list of data-containers for the inputs.
if input_storage is None:
input_storage = [[None] for input in env.inputs]
else:
assert len(env.inputs) == len(input_storage)
storage_map = {}
for r, storage in zip(env.inputs, input_storage):
storage_map[r] = storage
......@@ -172,10 +194,12 @@ def map_storage(env, order, input_storage, output_storage):
# if not isinstance(orphan, Constant):
# raise TypeError("Cannot link a graph with non-constant orphans.", orphan)
# storage_map[orphan] = [orphan.data]
if output_storage is not None:
assert len(env.outputs) == len(output_storage)
for r, storage in zip(env.outputs, output_storage):
storage_map[r] = storage
thunks = []
for node in order:
for r in node.inputs:
......@@ -196,10 +220,6 @@ def map_storage(env, order, input_storage, output_storage):
def streamline(env, thunks, order, no_recycling = [], profiler = None):
"""WRITEME"""
def clear():
for thunk in thunks:
for output in thunk.outputs:
output[0] = None
if profiler is None:
def f():
for x in no_recycling:
......@@ -218,7 +238,6 @@ def streamline(env, thunks, order, no_recycling = [], profiler = None):
profiler.profile_node(thunk, node)
profiler.profile_env(g, env)
f.profiler = profiler
f.clear = clear
return f
class LocalLinker(Linker):
......@@ -260,7 +279,7 @@ class PerformLinker(LocalLinker):
:param no_recycling: WRITEME
:returns: self (WHY? Who calls this function?)
:returns: self (TODO: WHY? Who calls this function?)
"""
if self.env is not None and self.env is not env:
return type(self)().accept(env, no_recycling)
......@@ -275,7 +294,7 @@ class PerformLinker(LocalLinker):
:param input_storage: WRITEME
:param output_storage: WRITEME
:returns: WRITEME (or see: SOMETHING)
:returns: function to run all nodes, list of input containers, list of output containers, list of thunks (for all of program), list of nodes (for all of program)
"""
env = self.env
......@@ -288,18 +307,24 @@ class PerformLinker(LocalLinker):
node_input_storage = tuple(storage_map[input] for input in node.inputs)
node_output_storage = tuple(storage_map[output] for output in node.outputs)
p = node.op.perform
# Thunk is meant to be called without arguments.
# The arguments are given in the lambda expression so that they are saved in the lambda expression.
# Using the closure in a simple way didn't work.
thunk = lambda p = p, i = node_input_storage, o = node_output_storage, n = node: p(n, [x[0] for x in i], o)
thunk.inputs = node_input_storage
thunk.outputs = node_output_storage
thunk.perform = p
thunks.append(thunk)
if no_recycling is True:
if no_recycling is True:
#True is like some special code for *everything*.
#FunctionMaker always passes a list I think -JB
no_recycling = storage_map.values()
no_recycling = utils.difference(no_recycling, input_storage)
else:
no_recycling = [storage_map[r] for r in no_recycling if r not in env.inputs]
# The function that actually runs your program is one of the f's in streamline.
f = streamline(env, thunks, order, no_recycling = no_recycling, profiler = profiler)
return f, [Container(input, storage) for input, storage in zip(env.inputs, input_storage)], \
......
......@@ -1559,6 +1559,11 @@ def shape_padleft(tensor, n_ones):
pattern = ['x']*n_ones + [i for i in range(tensor.type.ndim)]
return DimShuffle(tensor.broadcastable, pattern)(tensor)
@constructor
def rightpad_shape(tensor, n_ones):
"""Reshape `tensor` by right-padding the shape with `n_ones` 1s"""
pattern = [i for i in range(tensor.type.ndim)] + ['x']*n_ones
return DimShuffle(tensor.broadcastable, pattern)(tensor)
@constructor
def shape_padright(tensor, n_ones):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论