提交 7eb7f38e authored 作者: Olivier Breuleux's avatar Olivier Breuleux

SGrad, improvements to formula

上级 82d158a2
......@@ -11,6 +11,23 @@ def _zip(*lists):
else:
return zip(*lists)
# x = ivector()
# y = ivector()
# e = x + y
# f = Formula(x = x, y = y, e = e)
# y = x + x
# g = Formula(x=x,y=y)
# x2 = x + x
# g = Formula(x=x, x2=x2)
class Formula(utils.object2):
def __init__(self, symtable_d = {}, **symtable_kwargs):
......@@ -257,11 +274,23 @@ class Formulas(utils.object2):
# class Test(Formulas):
# x = T.ivector()
# y = T.ivector()
# e = x + y + 21
# x = T.ivector()
# y = T.ivector()
# e = x + y + 21
# f1 = Formula(x = x, y = y, e = e)
# Test() -> f1.clone()
# f = Test()
# print f
# print f.reassign(x = T.ivector())
......@@ -317,6 +346,8 @@ print unicode(g)
# lr = 0.01
# def autoassociator_f(x, w, b, c):
# reconstruction = sigmoid(T.dot(sigmoid(T.dot(x, w) + b), w.T) + c)
......
......@@ -253,6 +253,11 @@ def tensor(*args, **kwargs):
def _multi(*fns):
def f2(f, names):
if isinstance(names, int):
if names == 1:
return f()
else:
return [f() for i in xrange(names)]
if len(names) == 1:
return f(names)
else:
......@@ -639,6 +644,7 @@ def transpose(x, **kwargs):
class Subtensor(Op):
"""Return a subtensor view
......@@ -908,7 +914,7 @@ class Dot(Op):
def grad(self, (x, y), (gz,)):
return dot(gz, y.T), dot(x.T, gz)
def __str__(self):
return "Dot"
return "dot"
dot = Dot()
class Gemm(Op):
......@@ -1136,6 +1142,14 @@ gemm = Gemm()
# Gradient
#########################
class SGrad(gof.Op):
def make_node(self, cost, wrt):
return Apply(self, [cost, wrt], [wrt.type()])
def expand(self, r):
cost, wrt = r.owner.inputs
return grad(cost, wrt)
sgrad = SGrad()
def grad(cost, wrt, g_cost=None):
"""
@type cost: L{Result}
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论