more files

上级 7ef88361
差异被折叠。
import gof
import core
class _GradD(dict):
"""A dictionary-like class, into which derivative expressions may be added"""
def add(self, r, dr):
"""Add dv to the sum of gradients associated with v"""
if r is core.UNDEFINED:
self[r] = core.UNDEFINED
elif r in self:
self[r] = self[r] + dr
else:
self[r] = dr
def expand_grad(i, o, cost_derivs):
grad_d = _GradD(cost_derivs)
core.build_mode()
for op in gof.graph.io_toposort(i, o).__reversed__():
op.update_gradient(grad_d)
# inputgs = op.grad(*(op.inputs + [grad_d[output] for output in op.outputs]))
# if not isinstance(inputgs, (list, tuple)):
# inputgs = [inputgs] * len(op.inputs)
# for input, inputg in zip(op.inputs, inputgs):
# grad_d.add(input, inputg)
core.pop_mode()
return grad_d
def grad(cost, wrt, cost_grad = 1.0):
# cost, wrt = core.wrap(cost), core.wrap(wrt)
cost_derivs = expand_grad([wrt], [cost], {cost: core.wrap(cost_grad)})
# print wrt
# for k, v in cost_derivs.items():
# print k, v
ret = cost_derivs.get(wrt, None)
if ret is core.UNDEFINED:
raise Exception("The gradient wrt %s is undefined." % wrt)
return ret
from core import *
import gof
def pattern_opt(in_pattern, out_pattern):
def parse(x):
if isinstance(x, (list, tuple)):
return [parse(y) for y in x]
elif isinstance(x, wrapper):
return x.opclass
elif isinstance(x, str) or (hasattr(x, '__bases__') and issubclass(x, gof.op.Op)):
return x
else:
raise TypeError("Bad input type for pattern_opt.")
return gof.opt.PatternOptimizer(parse(in_pattern), parse(out_pattern))
def op_sub(op1, op2):
if isinstance(op1, wrapper):
op1 = op1.opclass
if isinstance(op2, wrapper):
op2 = op2.opclass
return gof.opt.OpSubOptimizer(op1, op2)
#def make_patterns(patterns):
# return [name, pattern_opt(inp, outp) for name, inp, outp in patterns]
def export_opts(opts):
for name, opt in opts:
if name:
globals()[name] = opt
# double_transpose_eliminator = pattern_opt((transpose, (transpose, 'x')), 'x')
# patterns = make_patterns(patterns)
# export_patterns(patterns)
# List of optimizations to perform. They are listed in the order they are applied.
opts = [
['double_transpose_eliminator', pattern_opt((transpose, (transpose, 'x')),
'x')],
['addxx_to_twice', pattern_opt((add, 'x', 'x'),
(twice, 'x'))],
['twice_to_itwice', op_sub(twice, itwice)],
['mulxx_to_twice', pattern_opt((mul, 'x', 'x'),
(sqr, 'x'))],
['sqr_to_isqr', op_sub(sqr, isqr)],
['add_to_iadd', op_sub(add, iadd)],
['add_to_iadd_reverse', pattern_opt((add, 'x', 'y'),
(iadd, 'y', 'x'))],
['remove_copies', gof.opt.OpRemover(array_copy)],
[None, gof.lib.DummyRemover] # has to be at the end
]
export_opts(opts) # publish the optimizations performed under individual names
optimizer = gof.opt.MergeOptMerge(gof.opt.SeqOptimizer([opt for name, opt in opts]))
import core
import gof
from numpy import random as r
# def rwrap(f):
# wrapped =
# def ret(self, *args):
class RandomState(gof.Op, gof.ext.IONames):
input_names = ['seed']
def __init__(self, seed):
inputs = [wrap(seed)]
outputs = [PythonR()]
gof.Op.__init__(self, inputs, outputs)
def thunk(self):
def f():
self.out.storage = r.RandomState(self.seed.storage)
return f
class Random(object):
def __init__(seed):
self.state = core.wrap(seed)
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论