提交 dfb52b32 authored 作者: Frédéric Bastien's avatar Frédéric Bastien

Merge pull request #3155 from harlouci/props_gof_tests

Props gof tests
from __future__ import print_function
import unittest
from nose.plugins.skip import SkipTest
from nose.plugins.attrib import attr
import numpy
......@@ -83,6 +80,8 @@ def double(name):
class MyOp(Op):
__props__ = ("nin", "name")
def __init__(self, nin, name):
self.nin = nin
self.name = name
......@@ -99,14 +98,6 @@ class MyOp(Op):
def __str__(self):
return self.name
def __eq__(self, other):
return (type(self) == type(other) and
self.name == other.name and
self.nin == other.nin)
def __hash__(self):
return hash(type(self)) ^ hash(self.name) ^ hash(self.nin)
def perform(self, node, inputs, out_):
out, = out_
out[0] = self.impl(*inputs)
......@@ -326,7 +317,7 @@ def test_duallinker_mismatch():
# this runs OpWiseCLinker and PerformLinker in parallel and feeds
# variables of matching operations to _my_checker to verify that they
# are the same.
res = fn(1.0, 2.0, 3.0)
fn(1.0, 2.0, 3.0)
raise Exception("An exception should have been raised here!")
except MyExc as e:
pass
......@@ -359,7 +350,7 @@ def test_c_fail_error():
lnk = OpWiseCLinker().accept(Env([y, z], [e]))
fn = lnk.make_function()
try:
res = fn(1.5, 3.0)
fn(1.5, 3.0)
except RuntimeError:
print('Yay, TEST PASSED')
return # test passed
......
......@@ -27,7 +27,8 @@ class MyOp(theano.compile.ops.DeepCopyOp):
rand = numpy.random.rand()
return ("""printf("%(rand)s\\n");""" + code) % locals()
# Else, no C code
return super(DeepCopyOp, self).c_code(node, name, inames, onames, sub)
return super(theano.compile.ops.DeepCopyOp, self).c_code(
node, name, inames, onames, sub)
def test_inter_process_cache():
......
......@@ -13,19 +13,13 @@ from theano import scalar
from theano import tensor as T
from theano.gof import Apply, Op
from theano.gof import utils
from theano.scan_module import scan
from theano.tensor.basic import _allclose
# Used in TestComputeTestValue.test_no_perform
class IncOneC(Op):
"""An Op with only a C (c_code) implementation"""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
__props__ = ()
def make_node(self, input):
input = scalar.as_scalar(input)
......@@ -241,7 +235,7 @@ class TestComputeTestValue(unittest.TestCase):
orig_compute_test_value = theano.config.compute_test_value
try:
theano.config.compute_test_value = 'raise'
#theano.config.compute_test_value = 'warn'
# theano.config.compute_test_value = 'warn'
k = T.iscalar("k")
A = T.vector("A")
k.tag.test_value = 3
......@@ -286,7 +280,7 @@ class TestComputeTestValue(unittest.TestCase):
non_sequences=A,
n_steps=k)
assert False
except ValueError as e:
except ValueError:
# Get traceback
tb = sys.exc_info()[2]
# Get frame info 4 layers up
......@@ -343,12 +337,7 @@ class TestComputeTestValue(unittest.TestCase):
def test_no_c_code(self):
class IncOnePython(Op):
"""An Op with only a Python (perform) implementation"""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
__props__ = ()
def make_node(self, input):
input = scalar.as_scalar(input)
......@@ -416,6 +405,6 @@ class TestComputeTestValue(unittest.TestCase):
init_Mu1 = theano.shared(
numpy.zeros((5,), dtype=config.floatX)).dimshuffle('x', 0)
f = theano.function([], outputs=[init_Mu1])
theano.function([], outputs=[init_Mu1])
finally:
theano.config.compute_test_value = orig_compute_test_value
from __future__ import print_function
import unittest
from six.moves import xrange
from theano.gof.type import Type
from theano.gof import graph
from theano.gof.graph import Variable, Apply
from theano.gof.op import Op
from theano.gof.opt import *
from theano.gof.opt import * # noqa
from theano.gof import destroyhandler
from theano.gof.fg import FunctionGraph, InconsistencyError
......@@ -15,8 +13,14 @@ from theano.gof.toolbox import ReplaceValidate
from copy import copy
PatternOptimizer = lambda p1, p2, ign=True: OpKeyOptimizer(PatternSub(p1, p2), ignore_newtrees=ign)
OpSubOptimizer = lambda op1, op2, fail=NavigatorOptimizer.warn_ignore, ign=True: TopoOptimizer(OpSub(op1, op2), ignore_newtrees=ign, failure_callback=fail)
def PatternOptimizer(p1, p2, ign=True):
return OpKeyOptimizer(PatternSub(p1, p2), ignore_newtrees=ign)
def OpSubOptimizer(op1, op2, fail=NavigatorOptimizer.warn_ignore, ign=True):
return TopoOptimizer(OpSub(op1, op2),
ignore_newtrees=ign, failure_callback=fail)
def as_variable(x):
......@@ -329,7 +333,7 @@ def test_long_destroyers_loop():
add_in_place(y, z)),
add_in_place(z, x))
try:
g2 = Env(*graph.clone([x, y, z], [e2]))
Env(*graph.clone([x, y, z], [e2]))
raise Exception("Shouldn't have reached this point.")
except InconsistencyError:
pass
......@@ -349,7 +353,7 @@ def test_multi_destroyers():
x, y, z = inputs()
e = add(add_in_place(x, y), add_in_place(x, y))
try:
g = Env([x, y, z], [e])
Env([x, y, z], [e])
raise Exception("Shouldn't have reached this point.")
except InconsistencyError as e:
pass
......
......@@ -28,7 +28,7 @@ class TFunctionGraph(unittest.TestCase):
func = theano.gof.FunctionGraph([v], [v + 1])
s = pickle.dumps(func)
func2 = pickle.loads(s)
pickle.loads(s)
def test_node_outputs_not_used(self):
"""In the past, we where removing some not used variable from
......
......@@ -6,17 +6,16 @@ from itertools import count
from theano import (
clone, sparse,
shared, tensor)
sparse,
shared, tensor)
from theano.gof.graph import (
Node, Apply, Constant,
as_string, clone, general_toposort, inputs, io_toposort,
is_same_graph, Variable)
Apply,
as_string, clone, general_toposort, inputs, io_toposort,
is_same_graph, Variable)
from theano.gof.op import Op
from theano.gof.type import Type
from theano.tensor.var import TensorVariable
from theano.sandbox.cuda.var import (
CudaNdarrayVariable, CudaNdarrayConstant, CudaNdarraySharedVariable)
CudaNdarrayVariable, CudaNdarrayConstant, CudaNdarraySharedVariable)
def as_variable(x):
......@@ -45,6 +44,8 @@ def MyVariable(thingy):
class MyOp(Op):
__props__ = ()
def make_node(self, *inputs):
inputs = list(map(as_variable, inputs))
for input in inputs:
......@@ -54,9 +55,6 @@ class MyOp(Op):
outputs = [MyVariable(sum([input.type.thingy for input in inputs]))]
return Apply(self, inputs, outputs)
def __str__(self):
return self.__class__.__name__
MyOp = MyOp()
##########
......@@ -86,9 +84,11 @@ class TestInputs:
class X:
leaf_formatter = lambda self, leaf: str(leaf.type)
node_formatter = lambda self, node, argstrings: "%s(%s)" % (node.op,
", ".join(argstrings))
def leaf_formatter(self, leaf):
return str(leaf.type)
def node_formatter(self, node, argstrings):
return "%s(%s)" % (node.op, ", ".join(argstrings))
def str(self, inputs, outputs):
return as_string(inputs, outputs,
......@@ -118,7 +118,7 @@ class TestStr(X):
assert self.str([r1, r2, r5], node2.outputs) == ["MyOp(*1 -> MyOp(R1, R2), *1)"]
def test_cutoff(self):
r1, r2, r5 = MyVariable(1), MyVariable(2), MyVariable(5)
r1, r2 = MyVariable(1), MyVariable(2)
node = MyOp.make_node(r1, r2)
node2 = MyOp.make_node(node.outputs[0], node.outputs[0])
assert self.str(node.outputs, node2.outputs) == ["MyOp(R3, R3)"]
......@@ -186,7 +186,7 @@ class TestToposort:
def test_1(self):
"""Test a graph with double dependencies"""
r1, r2, r5 = MyVariable(1), MyVariable(2), MyVariable(5)
r1, r5 = MyVariable(1), MyVariable(5)
o = MyOp.make_node(r1, r1)
o2 = MyOp.make_node(o.outputs[0], r5)
all = general_toposort(o2.outputs, prenode)
......@@ -194,7 +194,7 @@ class TestToposort:
def test_2(self):
"""Test a graph where the inputs have owners"""
r1, r2, r5 = MyVariable(1), MyVariable(2), MyVariable(5)
r1, r5 = MyVariable(1), MyVariable(5)
o = MyOp.make_node(r1, r1)
r2b = o.outputs[0]
o2 = MyOp.make_node(r2b, r2b)
......@@ -215,7 +215,7 @@ class TestToposort:
def test_4(self):
"""Test inputs and outputs mixed together in a chain graph"""
r1, r2, r3, r4 = MyVariable(1), MyVariable(2), MyVariable(3), MyVariable(4)
r1, r2 = MyVariable(1), MyVariable(2)
o0 = MyOp.make_node(r1, r2)
o1 = MyOp.make_node(o0.outputs[0], r1)
all = io_toposort([r1, o0.outputs[0]], [o0.outputs[0], o1.outputs[0]])
......@@ -223,9 +223,9 @@ class TestToposort:
def test_5(self):
"""Test when outputs have clients"""
r1, r2, r3, r4 = MyVariable(1), MyVariable(2), MyVariable(3), MyVariable(4)
r1, r2, r4 = MyVariable(1), MyVariable(2), MyVariable(4)
o0 = MyOp.make_node(r1, r2)
o1 = MyOp.make_node(o0.outputs[0], r4)
MyOp.make_node(o0.outputs[0], r4)
all = io_toposort([], o0.outputs)
assert all == [o0]
......@@ -265,11 +265,11 @@ class TestIsSameGraph(unittest.TestCase):
"""
x, y, z = tensor.vectors('x', 'y', 'z')
self.check([
(x, x, (({}, True), )),
(x, y, (({}, False), ({y: x}, True), )),
(x, tensor.neg(x), (({}, False), )),
(x, tensor.neg(y), (({}, False), )),
])
(x, x, (({}, True), )),
(x, y, (({}, False), ({y: x}, True), )),
(x, tensor.neg(x), (({}, False), )),
(x, tensor.neg(y), (({}, False), )),
])
def test_full_graph(self):
"""
......@@ -278,14 +278,14 @@ class TestIsSameGraph(unittest.TestCase):
x, y, z = tensor.vectors('x', 'y', 'z')
t = x * y
self.check([
(x * 2, x * 2, (({}, True), )),
(x * 2, y * 2, (({}, False), ({y: x}, True), )),
(x * 2, y * 2, (({}, False), ({x: y}, True), )),
(x * 2, y * 3, (({}, False), ({y: x}, False), )),
(t * 2, z * 2, (({}, False), ({t: z}, True), )),
(t * 2, z * 2, (({}, False), ({z: t}, True), )),
(x * (y * z), (x * y) * z, (({}, False), )),
])
(x * 2, x * 2, (({}, True), )),
(x * 2, y * 2, (({}, False), ({y: x}, True), )),
(x * 2, y * 2, (({}, False), ({x: y}, True), )),
(x * 2, y * 3, (({}, False), ({y: x}, False), )),
(t * 2, z * 2, (({}, False), ({t: z}, True), )),
(t * 2, z * 2, (({}, False), ({z: t}, True), )),
(x * (y * z), (x * y) * z, (({}, False), )),
])
def test_merge_only(self):
"""
......@@ -294,15 +294,15 @@ class TestIsSameGraph(unittest.TestCase):
x, y, z = tensor.vectors('x', 'y', 'z')
t = x * y
self.check([
(x, t, (({}, False), ({t: x}, True))),
(t * 2, x * 2, (({}, False), ({t: x}, True), )),
(x * x, x * y, (({}, False), ({y: x}, True), )),
(x * x, x * y, (({}, False), ({y: x}, True), )),
(x * x + z, x * y + t, (({}, False),
({y: x}, False),
({y: x, t: z}, True))),
],
debug=False)
(x, t, (({}, False), ({t: x}, True))),
(t * 2, x * 2, (({}, False), ({t: x}, True), )),
(x * x, x * y, (({}, False), ({y: x}, True), )),
(x * x, x * y, (({}, False), ({y: x}, True), )),
(x * x + z, x * y + t, (({}, False),
({y: x}, False),
({y: x, t: z}, True))),
],
debug=False)
################
......@@ -317,12 +317,12 @@ class TestEval(unittest.TestCase):
self.w = 2 * self.z
def test_eval(self):
self.assertEqual(self.w.eval({self.x : 1., self.y : 2.}), 6.)
self.assertEqual(self.w.eval({self.z : 3}), 6.)
self.assertEqual(self.w.eval({self.x: 1., self.y: 2.}), 6.)
self.assertEqual(self.w.eval({self.z: 3}), 6.)
self.assertTrue(hasattr(self.w, "_fn_cache"),
"variable must have cache after eval")
"variable must have cache after eval")
self.assertFalse(hasattr(pickle.loads(pickle.dumps(self.w)), '_fn_cache'),
"temporary functions must not be serialized")
"temporary functions must not be serialized")
################
......@@ -410,6 +410,3 @@ class TestAutoName:
r2 = r1.clone()
assert r1.auto_name == "auto_" + str(autoname_id)
assert r2.auto_name == "auto_" + str(autoname_id + 1)
import unittest, os
import os
import numpy
import six.moves.cPickle as pickle
from theano.compat import DictMixin, OrderedDict
import theano
import theano.tensor as T
floatX = 'float32'
def test_graph_opt_caching():
opt_db_file = theano.config.compiledir+'/optimized_graphs.pkl'
os.system('rm %s'%opt_db_file)
opt_db_file = theano.config.compiledir + '/optimized_graphs.pkl'
os.system('rm %s' % opt_db_file)
mode = theano.config.mode
if mode in ["DEBUG_MODE", "DebugMode"]:
mode = "FAST_RUN"
......@@ -30,12 +29,12 @@ def test_graph_opt_caching():
q = theano.shared(numpy.ones((10, 10), dtype=floatX))
j = T.sum(T.sum(T.sum(m ** 2 + n) + p) + q)
f2 = theano.function([m, n], j, mode=mode)
in1 = numpy.ones((10, 10), dtype=floatX)
in2 = numpy.ones((10, 10), dtype=floatX)
assert f1(in1, in2) == f2(in1, in2)
finally:
theano.config.cache_optimizations = default
if __name__ == '__main__':
test_graph_opt_caching()
......@@ -5,8 +5,7 @@ import numpy
import theano
from theano.gof.op import PureOp
from theano.gof import Apply, generic, Container
from theano.gof.link import LocalLinker, map_storage, add_clear_storage
from theano.gof import Apply, generic
from theano import function, Mode
from theano.ifelse import ifelse
import theano.tensor as T
......
......@@ -10,7 +10,7 @@ from theano.gof.type import Type
from theano.gof.op import Op
from theano.gof import fg
from theano.gof.link import *
from theano.gof.link import * # noqa
from theano.compat import cmp
......@@ -31,6 +31,9 @@ def double(name):
class MyOp(Op):
__props__ = ("nin", "name", "impl")
def __init__(self, nin, name, impl=None):
self.nin = nin
self.name = name
......@@ -175,8 +178,11 @@ def test_sort_schedule_fn():
import theano
from theano.gof.sched import sort_schedule_fn, make_depends
x = theano.tensor.matrix('x')
y = theano.tensor.dot(x[:5]*2, x.T+1).T
str_cmp = lambda a, b: cmp(str(a), str(b)) # lexicographical sort
y = theano.tensor.dot(x[:5] * 2, x.T + 1).T
def str_cmp(a, b):
return cmp(str(a), str(b)) # lexicographical sort
linker = theano.OpWiseCLinker(schedule=sort_schedule_fn(str_cmp))
mode = theano.Mode(linker=linker)
f = theano.function((x,), (y,), mode=mode)
......
......@@ -56,6 +56,8 @@ class MyType(Type):
class MyOp(Op):
__props__ = ()
def make_node(self, *inputs):
inputs = list(map(as_variable, inputs))
for input in inputs:
......@@ -70,12 +72,7 @@ MyOp = MyOp()
class NoInputOp(Op):
"""An Op to test the corner-case of an Op with no input."""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
__props__ = ()
def make_node(self):
return Apply(self, [], [MyType('test')()])
......@@ -162,12 +159,7 @@ class TestMakeThunk(unittest.TestCase):
def test_no_c_code(self):
class IncOnePython(Op):
"""An Op with only a Python (perform) implementation"""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
__props__ = ()
def make_node(self, input):
input = scalar.as_scalar(input)
......@@ -204,12 +196,7 @@ class TestMakeThunk(unittest.TestCase):
def test_no_perform(self):
class IncOneC(Op):
"""An Op with only a C (c_code) implementation"""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
__props__ = ()
def make_node(self, input):
input = scalar.as_scalar(input)
......
......@@ -2,9 +2,9 @@
from theano.gof.type import Type
from theano.gof.graph import Variable, Apply, Constant
from theano.gof.op import Op
from theano.gof.opt import *
from theano.gof.opt import * # noqa
from theano.gof.fg import FunctionGraph as Env
from theano.gof.toolbox import *
from theano.gof.toolbox import * # noqa
def as_variable(x):
......@@ -53,7 +53,7 @@ class MyOp(Op):
return self.name
def __eq__(self, other):
#rval = (self is other) or (isinstance(other, MyOp) and self.x is not None and self.x == other.x and self.name == other.name)
# rval = (self is other) or (isinstance(other, MyOp) and self.x is not None and self.x == other.x and self.name == other.name)
rval = (self is other) or (isinstance(other, MyOp) and self.x is not None and self.x == other.x)
return rval
......@@ -84,8 +84,12 @@ def inputs():
return x, y, z
PatternOptimizer = lambda p1, p2, ign=False: OpKeyOptimizer(PatternSub(p1, p2), ignore_newtrees=ign)
TopoPatternOptimizer = lambda p1, p2, ign=True: TopoOptimizer(PatternSub(p1, p2), ignore_newtrees=ign)
def PatternOptimizer(p1, p2, ign=False):
return OpKeyOptimizer(PatternSub(p1, p2), ignore_newtrees=ign)
def TopoPatternOptimizer(p1, p2, ign=True):
return TopoOptimizer(PatternSub(p1, p2), ignore_newtrees=ign)
class TestPatternOptimizer:
......@@ -205,6 +209,7 @@ class TestPatternOptimizer:
x, y, z = inputs()
e = op4(op1(op2(x, y)), op1(op1(x, y)))
g = Env([x, y, z], [e])
def constraint(r):
# Only replacing if the input is an instance of Op2
return r.owner.op == op2
......@@ -225,6 +230,7 @@ class TestPatternOptimizer:
x, y, z = inputs()
e = op2(op1(x, x), op1(x, y))
g = Env([x, y, z], [e])
def constraint(r):
# Only replacing if the input is an instance of Op2
return r.owner.inputs[0] is not r.owner.inputs[1]
......@@ -263,8 +269,8 @@ class TestPatternOptimizer:
# assert str(g) == "[Op3(x, y)]"
OpSubOptimizer = lambda op1, op2: TopoOptimizer(OpSub(op1, op2))
OpSubOptimizer = lambda op1, op2: OpKeyOptimizer(OpSub(op1, op2))
def OpSubOptimizer(op1, op2):
return OpKeyOptimizer(OpSub(op1, op2))
class TestOpSubOptimizer:
......
from theano.gof.sched import (make_dependence_cmp, sort_apply_nodes,
reverse_dict, _toposort, posort)
import theano
from theano import tensor
from theano.gof.graph import io_toposort
from theano.compat import cmp
from six.moves import xrange
def test_dependence():
......@@ -22,7 +20,10 @@ def test_dependence():
def test_sort_apply_nodes():
x = tensor.matrix('x')
y = tensor.dot(x * 2, x + 1)
str_cmp = lambda a, b: cmp(str(a), str(b)) # lexicographical sort
def str_cmp(a, b):
return cmp(str(a), str(b)) # lexicographical sort
nodes = sort_apply_nodes([x], [y], cmps=[str_cmp])
for a, b in zip(nodes[:-1], nodes[1:]):
......@@ -39,10 +40,10 @@ def test_reverse_dict():
def test__toposort():
edges = {1: set((4, 6, 7)), 2: set((4, 6, 7)),
3: set((5, 7)), 4: set((6, 7)), 5: set((7,))}
3: set((5, 7)), 4: set((6, 7)), 5: set((7,))}
order = _toposort(edges)
assert not any(a in edges.get(b, ()) for i, a in enumerate(order)
for b in order[i:])
for b in order[i:])
def test_posort_easy():
......@@ -64,5 +65,5 @@ def test_posort():
cmps = [lambda a, b: a % 10 - b % 10,
lambda a, b: (a / 10) % 2 - (b / 10) % 2,
lambda a, b: a - b]
assert posort(l, *cmps) == \
[10, 1, 11, 2, 12, 3, 13, 4, 14, 5, 15, 6, 16, 7, 17, 8, 18, 9, 19]
assert (posort(l, *cmps) ==
[10, 1, 11, 2, 12, 3, 13, 4, 14, 5, 15, 6, 16, 7, 17, 8, 18, 9, 19])
......@@ -3,8 +3,8 @@ from theano.gof.graph import Variable, Apply
from theano.gof.type import Type
from theano.gof.op import Op
from theano.gof.fg import FunctionGraph, InconsistencyError
from theano.gof.toolbox import *
from theano.gof.fg import FunctionGraph
from theano.gof.toolbox import * # noqa
def as_variable(x):
......@@ -33,6 +33,8 @@ def MyVariable(name):
class MyOp(Op):
__props__ = ("nin", "name")
def __init__(self, nin, name):
self.nin = nin
self.name = name
......@@ -72,7 +74,7 @@ class TestNodeFinder:
assert hasattr(g, 'get_nodes')
for type, num in ((add, 3), (sigmoid, 3), (dot, 2)):
if not len([x for x in g.get_nodes(type)]) == num:
if not len([t for t in g.get_nodes(type)]) == num:
raise Exception("Expected: %i times %s" % (num, type))
new_e0 = add(y, z)
assert e0.owner in g.get_nodes(dot)
......@@ -81,8 +83,5 @@ class TestNodeFinder:
assert e0.owner not in g.get_nodes(dot)
assert new_e0.owner in g.get_nodes(add)
for type, num in ((add, 4), (sigmoid, 3), (dot, 1)):
if not len([x for x in g.get_nodes(type)]) == num:
if not len([t for t in g.get_nodes(type)]) == num:
raise Exception("Expected: %i times %s" % (num, type))
......@@ -331,6 +331,9 @@ if run_memory_usage_tests:
class RunOnce(theano.Op):
__props__ = ("nb_run",)
def __init__(self):
self.nb_run = 0
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论