提交 90c3833e authored 作者: nouiz's avatar nouiz

Merge pull request #242 from nouiz/max_default

change the default of theano.{max,min,argmax,argmin,max_and_argmax} to t...
......@@ -18,13 +18,6 @@ from theano.gof import Container, Variable, generic
_logger = logging.getLogger('theano.compile.sharedvalue')
AddConfigVar('shared.value_borrows',
("DEPRECATED. You should not use the 'value' property of shared"
" variables, but use the .get_value() and .set_value() methods."
" False: shared variables 'value' property is guaranteed to not"
" alias theano-managed memory. True: no guarantee, but faster."),
BoolParam(True),
in_c_key=False)
class SharedVariable(Variable):
"""
......@@ -125,29 +118,6 @@ class SharedVariable(Variable):
cp.tag = copy.copy(self.tag)
return cp
def _value_get(self):
warnings.warn(("The .value property of shared variables is deprecated."
" You should use the .get_value() method instead."),
stacklevel=2)
return self.get_value(borrow=config.shared.value_borrows, return_internal_type=False)
def _value_set(self, new_value):
warnings.warn(("The .value property of shared variables is deprecated."
" You should use the .set_value() method instead."),
stacklevel=2)
return self.set_value(new_value, borrow=config.shared.value_borrows)
#TODO: USE A CONFIG VARIABLE TO set these get/set methods to the non-borrowing versions
# Semantically things are clearer when using non-borrow versions. That should be the
# default. The default support transparently (if slowly) when the 'raw' value is in a
# different memory space (e.g. GPU or other machine).
value = property(_value_get, _value_set,
doc=("DEPRECATED. Shortcut for self.get_value() and "
"self.set_value(). "
"The `borrow` argument to these methods is read from "
"`theano.config.shared.value_borrows`. "
"You should call get_value() and set_value() directly."))
def filter_update(self, update):
"""
When this shared variable is updated by a pfunc, the update value will be run through this function.
......
差异被折叠。
......@@ -57,7 +57,7 @@ class MaxAndArgmaxOptimizer(Optimizer):
if len(node.outputs[1].clients)==0:
try:
axis=get_constant_value(node.inputs[1])
except ValueError:
except (ValueError, TypeError), e:
return False
new = CAReduce(scal.maximum,axis)(node.inputs[0])
......
......@@ -13,91 +13,95 @@ from theano.tests import unittest_tools as utt
class T_max_and_argmax(unittest.TestCase):
def test_optimization(self):
#If we use only the max output, we should replace this op with a faster one.
mode = theano.compile.mode.get_default_mode().including('canonicalize','fast_run')
#If we use only the max output, we should replace this op with
#a faster one.
mode = theano.compile.mode.get_default_mode().including(
'canonicalize', 'fast_run')
data = numpy.asarray(numpy.random.rand(2,3),dtype=config.floatX)
n = tensor.matrix()
for axis in [0, 1, -1]:
data = numpy.asarray(numpy.random.rand(2, 3), dtype=config.floatX)
n = tensor.matrix()
f = function([n], tensor.max_and_argmax(n,0)[0], mode=mode)
topo = f.maker.env.toposort()
assert len(topo)==1
assert isinstance(topo[0].op, CAReduce)
f = function([n], tensor.max_and_argmax(n, axis)[0], mode=mode)
topo = f.maker.env.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce)
f = function([n], tensor.max_and_argmax(n,0), mode=mode)
topo = f.maker.env.toposort()
assert len(topo)==1
assert isinstance(topo[0].op, tensor.MaxAndArgmax)
f = function([n], tensor.max_and_argmax(n, axis), mode=mode)
topo = f.maker.env.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, tensor.MaxAndArgmax)
class T_min_max(unittest.TestCase):
def setUp(self):
utt.seed_rng()
self.mode = theano.compile.mode.get_default_mode().including('canonicalize','fast_run')
self.mode = theano.compile.mode.get_default_mode().including(
'canonicalize', 'fast_run')
def test_optimization_max(self):
data = numpy.asarray(numpy.random.rand(2,3),dtype=config.floatX)
data = numpy.asarray(numpy.random.rand(2, 3), dtype=config.floatX)
n = tensor.matrix()
f = function([n],tensor.max(n,0), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo)==1
assert isinstance(topo[0].op,CAReduce)
f(data)
f = function([n],tensor.max(-n,0), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo)==2
assert isinstance(topo[0].op, Elemwise)
assert isinstance(topo[0].op.scalar_op, scalar.Neg)
assert isinstance(topo[1].op,CAReduce)
f(data)
f = function([n],-tensor.max(n,0), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo)==2
assert isinstance(topo[0].op,CAReduce)
assert isinstance(topo[1].op, Elemwise)
assert isinstance(topo[1].op.scalar_op, scalar.Neg)
f(data)
f = function([n],-tensor.max(-n,0), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo)==1
assert isinstance(topo[0].op,CAReduce)#min
f(data)
for axis in [0, 1, -1]:
f = function([n], tensor.max(n, axis), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce)
f(data)
f = function([n], tensor.max(-n, axis), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo) == 2
assert isinstance(topo[0].op, Elemwise)
assert isinstance(topo[0].op.scalar_op, scalar.Neg)
assert isinstance(topo[1].op, CAReduce)
f(data)
f = function([n], -tensor.max(n, axis), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo) == 2
assert isinstance(topo[0].op, CAReduce)
assert isinstance(topo[1].op, Elemwise)
assert isinstance(topo[1].op.scalar_op, scalar.Neg)
f(data)
f = function([n], -tensor.max(-n, axis), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce) # min
f(data)
def test_optimization_min(self):
data = numpy.asarray(numpy.random.rand(2,3),dtype=config.floatX)
data = numpy.asarray(numpy.random.rand(2, 3), dtype=config.floatX)
n = tensor.matrix()
f = function([n],tensor.min(n,0), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo)==1
assert isinstance(topo[0].op,CAReduce)
f(data)
#test variant with neg to make sure we optimize correctly
f = function([n],tensor.min(-n,0), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo)==2
assert isinstance(topo[0].op,CAReduce)#max
assert isinstance(topo[1].op, Elemwise)
assert isinstance(topo[1].op.scalar_op, scalar.Neg)
f(data)
f = function([n],-tensor.min(n,0), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo)==2
assert isinstance(topo[0].op, Elemwise)
assert isinstance(topo[0].op.scalar_op, scalar.Neg)
assert isinstance(topo[1].op,CAReduce)#max
f(data)
f = function([n],-tensor.min(-n,0), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo)==1
assert isinstance(topo[0].op,CAReduce)#max
f(data)
for axis in [0, 1, -1]:
f = function([n], tensor.min(n, axis), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce)
f(data)
#test variant with neg to make sure we optimize correctly
f = function([n], tensor.min(-n, axis), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo) == 2
assert isinstance(topo[0].op, CAReduce) # max
assert isinstance(topo[1].op, Elemwise)
assert isinstance(topo[1].op.scalar_op, scalar.Neg)
f(data)
f = function([n], -tensor.min(n, axis), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo) == 2
assert isinstance(topo[0].op, Elemwise)
assert isinstance(topo[0].op.scalar_op, scalar.Neg)
assert isinstance(topo[1].op, CAReduce) # max
f(data)
f = function([n], -tensor.min(-n, axis), mode=self.mode)
topo = f.maker.env.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce) # max
f(data)
......@@ -350,27 +350,6 @@ def makeSharedTester(shared_constructor_,
assert may_share_memory(old_data, x_shared.container.storage[0])
x_shared.get_value(borrow=True)
# Test by .value
# As we know that .value is deprecated, we filter out the warning
warnings.filterwarnings(
action='ignore',
message='The .value property of shared variables is deprecated.'
)
try:
nd += 1
old_data = x_shared.container.storage[0]
x_shared.value = nd
assert numpy.allclose(self.ref_fct(x_shared.value), self.ref_fct(self.cast_value(nd)))
assert may_share_memory(old_data, x_shared.container.storage[0]) == self.set_value_inplace
finally:
# Restore the default behavior.
# TODO There is a cleaner way to do this in Python 2.6, once
# Theano drops support of Python 2.4 and 2.5.
warnings.filterwarnings(
action='default',
message='The .value property of shared variables is deprecated.'
)
# Test by set_value with borrow=False
nd += 1
old_data = x_shared.container.storage[0]
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论