提交 2c7949b6 authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Merge pull request #1526 from nouiz/lamblin-fix_pickle_cache_leak2

fix pickle cache leak
...@@ -14,6 +14,30 @@ from theano.scan_module import scan ...@@ -14,6 +14,30 @@ from theano.scan_module import scan
from theano.tensor.basic import _allclose from theano.tensor.basic import _allclose
# Used in TestComputeTestValue.test_no_perform
class IncOneC(Op):
"""An Op with only a C (c_code) implementation"""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def make_node(self, input):
input = scalar.as_scalar(input)
output = input.type()
return Apply(self, [input], [output])
def c_code_cache_version(self):
return (1,)
def c_code(self, node, name, inputs, outputs, sub):
x, = inputs
z, = outputs
return "%(z)s = %(x)s + 1;" % locals()
class TestComputeTestValue(unittest.TestCase): class TestComputeTestValue(unittest.TestCase):
def test_variable_only(self): def test_variable_only(self):
...@@ -338,28 +362,6 @@ class TestComputeTestValue(unittest.TestCase): ...@@ -338,28 +362,6 @@ class TestComputeTestValue(unittest.TestCase):
def test_no_perform(self): def test_no_perform(self):
if not theano.config.cxx: if not theano.config.cxx:
raise SkipTest("G++ not available, so we need to skip this test.") raise SkipTest("G++ not available, so we need to skip this test.")
class IncOneC(Op):
"""An Op with only a C (c_code) implementation"""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def make_node(self, input):
input = scalar.as_scalar(input)
output = input.type()
return Apply(self, [input], [output])
def c_code_cache_version(self):
return (1,)
def c_code(self, node, name, inputs, outputs, sub):
x, = inputs
z, = outputs
return "%(z)s = %(x)s + 1;" % locals()
orig_compute_test_value = theano.config.compute_test_value orig_compute_test_value = theano.config.compute_test_value
try: try:
...@@ -368,6 +370,8 @@ class TestComputeTestValue(unittest.TestCase): ...@@ -368,6 +370,8 @@ class TestComputeTestValue(unittest.TestCase):
i = scalar.int32('i') i = scalar.int32('i')
i.tag.test_value = 3 i.tag.test_value = 3
# Class IncOneC is defined outside of the TestComputeTestValue
# so it can be pickled and unpickled
o = IncOneC()(i) o = IncOneC()(i)
# Check that the perform function is not implemented # Check that the perform function is not implemented
......
差异被折叠。
...@@ -12,10 +12,14 @@ If you do want to rewrite these tests, bear in mind: ...@@ -12,10 +12,14 @@ If you do want to rewrite these tests, bear in mind:
import unittest import unittest
import theano import theano
from theano.gof import Variable, Op, FunctionGraph from theano.gof import FunctionGraph
from theano import gof from theano import gof
from theano.scalar.basic import * from theano.scalar.basic import (floats, float32, float64,
ints, int8, int32, complex64,
ComplexError, IntDiv, TrueDiv,
Composite, add, div_proxy,
and_, eq, neq, invert, mul)
def inputs(): def inputs():
...@@ -216,7 +220,7 @@ class test_div(unittest.TestCase): ...@@ -216,7 +220,7 @@ class test_div(unittest.TestCase):
d = float64() d = float64()
f = float32() f = float32()
print (a//b).owner.op #print (a//b).owner.op
assert isinstance((a//b).owner.op, IntDiv) assert isinstance((a//b).owner.op, IntDiv)
assert isinstance((b//a).owner.op, IntDiv) assert isinstance((b//a).owner.op, IntDiv)
assert isinstance((b/d).owner.op, TrueDiv) assert isinstance((b/d).owner.op, TrueDiv)
......
...@@ -880,6 +880,56 @@ class T_using_gpu(unittest.TestCase): ...@@ -880,6 +880,56 @@ class T_using_gpu(unittest.TestCase):
for x in f.maker.fgraph.toposort()]) for x in f.maker.fgraph.toposort()])
# Used in T_fibby
class Fibby(theano.Op):
"""
An arbitrarily generalized Fibbonacci sequence
"""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def make_node(self, x):
x_ = theano.tensor.as_tensor_variable(x)
assert x_.ndim == 1
return theano.Apply(self,
inputs=[x_],
outputs=[x_.type()])
# using x_.type() is dangerous, it copies x's broadcasting
# behaviour
def perform(self, node, inputs, output_storage):
x, = inputs
y = output_storage[0][0] = x.copy()
for i in range(2, len(x)):
y[i] = y[i - 1] * y[i - 2] + x[i]
def c_code(self, node, name, inames, onames, sub):
x, = inames
y, = onames
fail = sub['fail']
return """
Py_XDECREF(%(y)s);
%(y)s = (PyArrayObject*)PyArray_FromArray(
%(x)s, 0, NPY_ARRAY_ENSURECOPY);
if (!%(y)s)
%(fail)s;
{//New scope needed to make compilation work
dtype_%(y)s * y = (dtype_%(y)s*)%(y)s->data;
dtype_%(x)s * x = (dtype_%(x)s*)%(x)s->data;
for (int i = 2; i < %(x)s->dimensions[0]; ++i)
y[i] = y[i-1]*y[i-2] + x[i];
}
""" % locals()
def c_code_cache_version(self):
return (1,)
class T_fibby(unittest.TestCase): class T_fibby(unittest.TestCase):
## All tests here belong to ## All tests here belong to
## http://deeplearning.net/software/theano/extending/fibby.html ## http://deeplearning.net/software/theano/extending/fibby.html
...@@ -888,54 +938,8 @@ class T_fibby(unittest.TestCase): ...@@ -888,54 +938,8 @@ class T_fibby(unittest.TestCase):
def test_fibby_1(self): def test_fibby_1(self):
class Fibby(theano.Op): # The definition of class Fibby is done outside of the test,
# so the object can be pickled.
"""
An arbitrarily generalized Fibbonacci sequence
"""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def make_node(self, x):
x_ = theano.tensor.as_tensor_variable(x)
assert x_.ndim == 1
return theano.Apply(self,
inputs=[x_],
outputs=[x_.type()])
# using x_.type() is dangerous, it copies x's broadcasting
# behaviour
def perform(self, node, inputs, output_storage):
x, = inputs
y = output_storage[0][0] = x.copy()
for i in range(2, len(x)):
y[i] = y[i - 1] * y[i - 2] + x[i]
def c_code(self, node, name, inames, onames, sub):
x, = inames
y, = onames
fail = sub['fail']
return """
Py_XDECREF(%(y)s);
%(y)s = (PyArrayObject*)PyArray_FromArray(
%(x)s, 0, NPY_ARRAY_ENSURECOPY);
if (!%(y)s)
%(fail)s;
{//New scope needed to make compilation work
dtype_%(y)s * y = (dtype_%(y)s*)%(y)s->data;
dtype_%(x)s * x = (dtype_%(x)s*)%(x)s->data;
for (int i = 2; i < %(x)s->dimensions[0]; ++i)
y[i] = y[i-1]*y[i-2] + x[i];
}
""" % locals()
def c_code_cache_version(self):
return (1,)
fibby = Fibby() fibby = Fibby()
from theano.tensor.opt import (get_scalar_constant_value, from theano.tensor.opt import (get_scalar_constant_value,
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论