moved elemwise2 to elemwise

上级 c7db795f
import time
import unittest import unittest
import numpy
from tensor import astensor, Tensor from gof import ResultBase, Op, Env, modes
import gof import gof
from gof import modes, Env
from elemwise import * from scalar import *
class ElemwiseAdd(Elemwise):
def __init__(self, x, y):
self.inputs = (x, y)
self.outputs = [Tensor(dtype = x.dtype, broadcastable = x.broadcastable)]
def var_desc(self):
return [('x', 1), ('y', 1)], [('z', 1)]
# def destroy_map(self): from elemwise import *
# return {self.out: [self.inputs[0]]}
def c_code_foreach(self):
return "%(z)s_i = %(x)s_i + %(y)s_i;"
def inputs(): def inputs():
l1 = [[1.0, 2.0], [3.0, 4.0]] x = modes.build(Tensor('float64', (0, 0), name = 'x'))
l2 = [[3.0, 4.0], [1.0, 2.0]] y = modes.build(Tensor('float64', (1, 0), name = 'y'))
l3 = numpy.ones((2, 3)) z = modes.build(Tensor('float64', (0, 0), name = 'z'))
x = modes.build(astensor(l1, name = 'x'))
y = modes.build(astensor(l2, name = 'y'))
z = modes.build(astensor(l3, name = 'z'))
return x, y, z return x, y, z
def env(inputs, outputs, validate = True, features = []): def env(inputs, outputs, validate = True, features = []):
return Env(inputs, outputs, features = features, consistency_check = validate) return Env(inputs, outputs, features = features, consistency_check = validate)
class _test_Elemwise(unittest.TestCase): class _test_DimShuffle(unittest.TestCase):
def test_0(self): def with_linker(self, linker):
x, y, z = inputs() for xsh, shuffle, zsh in [((2, 3), (1, 'x', 0), (3, 1, 2)),
e = ElemwiseAdd(x, y).out ((1, 2, 3), (1, 2), (2, 3)),
fn, i, o = gof.CLinker(env([x, y], [e])).make_thunk(True) ((1, 2, 1, 3), (1, 3), (2, 3)),
fn() ((2, 3, 4), (2, 1, 0), (4, 3, 2)),
assert (e.data == numpy.array([[4, 6], [4, 6]])).all() ((2, 3, 4), ('x', 2, 1, 0, 'x'), (1, 4, 3, 2, 1)),
x.data.resize((1, 4)) ((1, 4, 3, 2, 1), (3, 2, 1), (2, 3, 4)),
y.data.resize((1, 4)) ((1, 1, 4), (1, 2), (1, 4))]:
fn() x = modes.build(Tensor('float64', [1 * (entry == 1) for entry in xsh], name = 'x'))
assert (e.data == numpy.array([[4, 6, 4, 6]])).all() e = DimShuffle(x, shuffle).out
# print shuffle, e.owner.grad(e.owner.inputs, e.owner.outputs).owner.new_order
f = linker(env([x], [e])).make_function(inplace=False)
assert f(numpy.ones(xsh)).shape == zsh
# def test_1(self): def test_perform(self):
# x, y, z = inputs() self.with_linker(gof.PerformLinker)
# e = ElemwiseAdd(x, y).out
# fn, i, o = gof.CLinker(env([x, y], [e])).make_thunk(True)
# fn()
# assert (e.data == numpy.array([[4, 6], [4, 6]])).all()
# x.data.resize((1, 4))
# y.data.resize((1, 4))
# fn()
# assert (e.data == numpy.array([[4, 6, 4, 6]])).all()
# def test_straightforward(self):
# x, y, z = inputs()
# e0 = DimShuffle(x, [1, 'x', 0]).out
# f = gof.PerformLinker(env([x], [e0])).make_function(inplace=True)
# assert f(numpy.ones((2, 3))).shape == (3, 1, 2)
class _test_Broadcast(unittest.TestCase):
def with_linker(self, linker):
for xsh, ysh in [((3, 5), (3, 5)),
((3, 5), (1, 5)),
((3, 5), (3, 1)),
((1, 5), (5, 1)),
((1, 1), (1, 1)),
((2, 3, 4, 5), (2, 3, 4, 5)),
((2, 3, 4, 5), (1, 3, 1, 5)),
((2, 3, 4, 5), (1, 1, 1, 1)),
((), ())]:
x = modes.build(Tensor('float64', [1 * (entry == 1) for entry in xsh], name = 'x'))
y = modes.build(Tensor('float64', [1 * (entry == 1) for entry in ysh], name = 'y'))
e = Broadcast(Add, (x, y)).out
f = linker(env([x, y], [e])).make_function(inplace = False)
# xv = numpy.array(range(numpy.product(xsh)))
# xv = xv.reshape(xsh)
# yv = numpy.array(range(numpy.product(ysh)))
# yv = yv.reshape(ysh)
xv = numpy.asarray(numpy.random.rand(*xsh))
yv = numpy.asarray(numpy.random.rand(*ysh))
zv = xv + yv
# print "AAAAAAAAAAAAAAAAAA"
# print f(xv, yv)
# print zv
# print "BBBBBBBBBBBBBBBBBB"
self.failUnless((f(xv, yv) == zv).all())
def with_linker_inplace(self, linker):
for xsh, ysh in [((5, 5), (5, 5)),
((5, 5), (1, 5)),
((5, 5), (5, 1)),
((1, 1), (1, 1)),
((2, 3, 4, 5), (2, 3, 4, 5)),
((2, 3, 4, 5), (1, 3, 1, 5)),
((2, 3, 4, 5), (1, 1, 1, 1)),
((), ())]:
x = modes.build(Tensor('float64', [1 * (entry == 1) for entry in xsh], name = 'x'))
y = modes.build(Tensor('float64', [1 * (entry == 1) for entry in ysh], name = 'y'))
e = Broadcast(Add, (x, y), {0:0}).out
f = linker(env([x, y], [e])).make_function(inplace = False)
xv = numpy.asarray(numpy.random.rand(*xsh))
yv = numpy.asarray(numpy.random.rand(*ysh))
zv = xv + yv
f(xv, yv)
self.failUnless((xv == zv).all())
def test_perform(self):
self.with_linker(gof.PerformLinker)
def test_c(self):
self.with_linker(gof.CLinker)
def test_perform_inplace(self):
self.with_linker_inplace(gof.PerformLinker)
def test_c_inplace(self):
self.with_linker_inplace(gof.CLinker)
def test_fill(self):
x = modes.build(Tensor('float64', [0, 0], name = 'x'))
y = modes.build(Tensor('float64', [1, 1], name = 'y'))
e = Broadcast(Second, (x, y), {0:0}).out
f = gof.CLinker(env([x, y], [e])).make_function(inplace = False)
xv = numpy.ones((5, 5))
yv = numpy.random.rand(1, 1)
f(xv, yv)
assert (xv == yv).all()
class _test_CAReduce(unittest.TestCase):
def with_linker(self, linker):
for xsh, tosum in [((5, 6), (0, 1)),
((5, 6), (0, )),
((5, 6), (1, )),
((5, 6), ()),
((2, 3, 4, 5), (0, 1, 3)),
((), ())]:
x = modes.build(Tensor('float64', [1 * (entry == 1) for entry in xsh], name = 'x'))
e = CAReduce(Add, [x], dimensions_to_reduce = tosum).out
f = linker(env([x], [e])).make_function(inplace = False)
xv = numpy.asarray(numpy.random.rand(*xsh))
zv = xv
for axis in reversed(sorted(tosum)):
zv = numpy.add.reduce(zv, axis)
# print "AAAAAAAAAAAAAAAAAA"
# print xsh, tosum
# print f(xv)
# print zv
# print f(xv) - zv
# print "BBBBBBBBBBBBBBBBBB"
self.failUnless((numpy.abs(f(xv) - zv) < 1e-10).all())
def test_perform(self):
self.with_linker(gof.PerformLinker)
def test_c(self):
self.with_linker(gof.CLinker)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()
# x = modes.build(Tensor('float64', [0, 0], name = 'x'))
# y = modes.build(Tensor('float64', [0, 0], name = 'y'))
# e = Broadcast(SquareDiff, (x, y), {0:0}).out
# f = gof.CLinker(env([x, y], [e])).make_function(inplace = False)
# xv = numpy.random.rand(1000, 1000)
# yv = numpy.random.rand(1000, 1000)
# zv = numpy.random.rand(1000, 1000)
# add = numpy.frompyfunc(lambda x, y: x + y, 2, 1)
# t0 = time.time()
# for i in xrange(100):
# xv -= yv
# xv *= xv
# # xv += yv
# print time.time() - t0
# t0 = time.time()
# for i in xrange(100):
# f(xv, yv)
# print time.time() - t0
# def test_straightforward(self):
# x, y, z = inputs()
# e0 = CAReduce(Add, [x]).out
# # print e0.owner
# f = gof.PerformLinker(env([x], [e0])).make_function(inplace=True)
# assert f(numpy.ones((2, 2))) == 4.0
##########
##########
# def test_straightforward(self):
# x, y, z = inputs()
# e0 = Broadcast(Add, (x, y)).out
# f = gof.PerformLinker(env([x, y], [e0])).make_function(inplace=True)
# assert (f(numpy.ones((2, 2)), numpy.ones((1, 2))) == numpy.ones((2, 2))*2).all()
# # for result in e0.owner.grad(e0.owner.inputs, (z, )):
# # print env([x, y, z], [result])
# def test_c(self):
# x = modes.build(Tensor('float64', (0, 0), name = 'x'))
# y = modes.build(Tensor('float64', (0, 1), name = 'y'))
# z = modes.build(Tensor('float64', (0, 0), name = 'z'))
# # x = modes.build(Tensor('float64', (), name = 'x'))
# # y = modes.build(Tensor('float64', (), name = 'y'))
# # x, y, z = inputs()
# e0 = Broadcast(Add, (x, y)).out
# f = gof.CLinker(env([x, y], [e0])).make_function(inplace=True)
# print f(numpy.ones((4, 4), order = 'f'), numpy.array([[1], [2], [3], [4]]))
# # print f(numpy.ones(()), numpy.ones(()))
# assert (f(numpy.ones((2, 2)), numpy.ones((2, 1))) == numpy.ones((2, 2))*2).all()
import time
import unittest
from gof import ResultBase, Op, Env, modes
import gof
from scalar import *
from elemwise2 import *
def inputs():
x = modes.build(Tensor('float64', (0, 0), name = 'x'))
y = modes.build(Tensor('float64', (1, 0), name = 'y'))
z = modes.build(Tensor('float64', (0, 0), name = 'z'))
return x, y, z
def env(inputs, outputs, validate = True, features = []):
return Env(inputs, outputs, features = features, consistency_check = validate)
class _test_DimShuffle(unittest.TestCase):
def with_linker(self, linker):
for xsh, shuffle, zsh in [((2, 3), (1, 'x', 0), (3, 1, 2)),
((1, 2, 3), (1, 2), (2, 3)),
((1, 2, 1, 3), (1, 3), (2, 3)),
((2, 3, 4), (2, 1, 0), (4, 3, 2)),
((2, 3, 4), ('x', 2, 1, 0, 'x'), (1, 4, 3, 2, 1)),
((1, 4, 3, 2, 1), (3, 2, 1), (2, 3, 4)),
((1, 1, 4), (1, 2), (1, 4))]:
x = modes.build(Tensor('float64', [1 * (entry == 1) for entry in xsh], name = 'x'))
e = DimShuffle(x, shuffle).out
# print shuffle, e.owner.grad(e.owner.inputs, e.owner.outputs).owner.new_order
f = linker(env([x], [e])).make_function(inplace=False)
assert f(numpy.ones(xsh)).shape == zsh
def test_perform(self):
self.with_linker(gof.PerformLinker)
# def test_straightforward(self):
# x, y, z = inputs()
# e0 = DimShuffle(x, [1, 'x', 0]).out
# f = gof.PerformLinker(env([x], [e0])).make_function(inplace=True)
# assert f(numpy.ones((2, 3))).shape == (3, 1, 2)
class _test_Broadcast(unittest.TestCase):
def with_linker(self, linker):
for xsh, ysh in [((3, 5), (3, 5)),
((3, 5), (1, 5)),
((3, 5), (3, 1)),
((1, 5), (5, 1)),
((1, 1), (1, 1)),
((2, 3, 4, 5), (2, 3, 4, 5)),
((2, 3, 4, 5), (1, 3, 1, 5)),
((2, 3, 4, 5), (1, 1, 1, 1)),
((), ())]:
x = modes.build(Tensor('float64', [1 * (entry == 1) for entry in xsh], name = 'x'))
y = modes.build(Tensor('float64', [1 * (entry == 1) for entry in ysh], name = 'y'))
e = Broadcast(Add, (x, y)).out
f = linker(env([x, y], [e])).make_function(inplace = False)
# xv = numpy.array(range(numpy.product(xsh)))
# xv = xv.reshape(xsh)
# yv = numpy.array(range(numpy.product(ysh)))
# yv = yv.reshape(ysh)
xv = numpy.asarray(numpy.random.rand(*xsh))
yv = numpy.asarray(numpy.random.rand(*ysh))
zv = xv + yv
# print "AAAAAAAAAAAAAAAAAA"
# print f(xv, yv)
# print zv
# print "BBBBBBBBBBBBBBBBBB"
self.failUnless((f(xv, yv) == zv).all())
def with_linker_inplace(self, linker):
for xsh, ysh in [((5, 5), (5, 5)),
((5, 5), (1, 5)),
((5, 5), (5, 1)),
((1, 1), (1, 1)),
((2, 3, 4, 5), (2, 3, 4, 5)),
((2, 3, 4, 5), (1, 3, 1, 5)),
((2, 3, 4, 5), (1, 1, 1, 1)),
((), ())]:
x = modes.build(Tensor('float64', [1 * (entry == 1) for entry in xsh], name = 'x'))
y = modes.build(Tensor('float64', [1 * (entry == 1) for entry in ysh], name = 'y'))
e = Broadcast(Add, (x, y), {0:0}).out
f = linker(env([x, y], [e])).make_function(inplace = False)
xv = numpy.asarray(numpy.random.rand(*xsh))
yv = numpy.asarray(numpy.random.rand(*ysh))
zv = xv + yv
f(xv, yv)
self.failUnless((xv == zv).all())
def test_perform(self):
self.with_linker(gof.PerformLinker)
def test_c(self):
self.with_linker(gof.CLinker)
def test_perform_inplace(self):
self.with_linker_inplace(gof.PerformLinker)
def test_c_inplace(self):
self.with_linker_inplace(gof.CLinker)
def test_fill(self):
x = modes.build(Tensor('float64', [0, 0], name = 'x'))
y = modes.build(Tensor('float64', [1, 1], name = 'y'))
e = Broadcast(Second, (x, y), {0:0}).out
f = gof.CLinker(env([x, y], [e])).make_function(inplace = False)
xv = numpy.ones((5, 5))
yv = numpy.random.rand(1, 1)
f(xv, yv)
assert (xv == yv).all()
class _test_CAReduce(unittest.TestCase):
def with_linker(self, linker):
for xsh, tosum in [((5, 6), (0, 1)),
((5, 6), (0, )),
((5, 6), (1, )),
((5, 6), ()),
((2, 3, 4, 5), (0, 1, 3)),
((), ())]:
x = modes.build(Tensor('float64', [1 * (entry == 1) for entry in xsh], name = 'x'))
e = CAReduce(Add, [x], dimensions_to_reduce = tosum).out
f = linker(env([x], [e])).make_function(inplace = False)
xv = numpy.asarray(numpy.random.rand(*xsh))
zv = xv
for axis in reversed(sorted(tosum)):
zv = numpy.add.reduce(zv, axis)
# print "AAAAAAAAAAAAAAAAAA"
# print xsh, tosum
# print f(xv)
# print zv
# print f(xv) - zv
# print "BBBBBBBBBBBBBBBBBB"
self.failUnless((numpy.abs(f(xv) - zv) < 1e-10).all())
def test_perform(self):
self.with_linker(gof.PerformLinker)
def test_c(self):
self.with_linker(gof.CLinker)
if __name__ == '__main__':
unittest.main()
# x = modes.build(Tensor('float64', [0, 0], name = 'x'))
# y = modes.build(Tensor('float64', [0, 0], name = 'y'))
# e = Broadcast(SquareDiff, (x, y), {0:0}).out
# f = gof.CLinker(env([x, y], [e])).make_function(inplace = False)
# xv = numpy.random.rand(1000, 1000)
# yv = numpy.random.rand(1000, 1000)
# zv = numpy.random.rand(1000, 1000)
# add = numpy.frompyfunc(lambda x, y: x + y, 2, 1)
# t0 = time.time()
# for i in xrange(100):
# xv -= yv
# xv *= xv
# # xv += yv
# print time.time() - t0
# t0 = time.time()
# for i in xrange(100):
# f(xv, yv)
# print time.time() - t0
# def test_straightforward(self):
# x, y, z = inputs()
# e0 = CAReduce(Add, [x]).out
# # print e0.owner
# f = gof.PerformLinker(env([x], [e0])).make_function(inplace=True)
# assert f(numpy.ones((2, 2))) == 4.0
##########
##########
# def test_straightforward(self):
# x, y, z = inputs()
# e0 = Broadcast(Add, (x, y)).out
# f = gof.PerformLinker(env([x, y], [e0])).make_function(inplace=True)
# assert (f(numpy.ones((2, 2)), numpy.ones((1, 2))) == numpy.ones((2, 2))*2).all()
# # for result in e0.owner.grad(e0.owner.inputs, (z, )):
# # print env([x, y, z], [result])
# def test_c(self):
# x = modes.build(Tensor('float64', (0, 0), name = 'x'))
# y = modes.build(Tensor('float64', (0, 1), name = 'y'))
# z = modes.build(Tensor('float64', (0, 0), name = 'z'))
# # x = modes.build(Tensor('float64', (), name = 'x'))
# # y = modes.build(Tensor('float64', (), name = 'y'))
# # x, y, z = inputs()
# e0 = Broadcast(Add, (x, y)).out
# f = gof.CLinker(env([x, y], [e0])).make_function(inplace=True)
# print f(numpy.ones((4, 4), order = 'f'), numpy.array([[1], [2], [3], [4]]))
# # print f(numpy.ones(()), numpy.ones(()))
# assert (f(numpy.ones((2, 2)), numpy.ones((2, 1))) == numpy.ones((2, 2))*2).all()
...@@ -9,7 +9,7 @@ import gof, gof.graph ...@@ -9,7 +9,7 @@ import gof, gof.graph
from gof.python25 import any from gof.python25 import any
import gof import gof
from elemwise2 import DimShuffle from elemwise import DimShuffle
def _numpy_checker(x, y): def _numpy_checker(x, y):
""" """
......
from gof import opt from gof import opt
from elemwise2 import Broadcast from elemwise import Broadcast
class InplaceOptimizer(opt.OpSpecificOptimizer): class InplaceOptimizer(opt.OpSpecificOptimizer):
......
...@@ -11,7 +11,7 @@ import gof.op ...@@ -11,7 +11,7 @@ import gof.op
from base_tensor import BaseTensor, BaseTensorOp from base_tensor import BaseTensor, BaseTensorOp
import blas # for gemm, dot import blas # for gemm, dot
import elemwise2 as s2t import elemwise as s2t
import scalar as scal import scalar as scal
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论