提交 819412de authored 作者: Razvan Pascanu's avatar Razvan Pascanu

Fixed the tests for reduce, foldl and foldr following Fred's suggestion to

check if they still use as little memory as needed in FAST_RUN.
上级 d01b400e
...@@ -6,6 +6,8 @@ import theano ...@@ -6,6 +6,8 @@ import theano
import theano.sandbox.rng_mrg import theano.sandbox.rng_mrg
from theano import tensor from theano import tensor
from theano.tests import unittest_tools as utt from theano.tests import unittest_tools as utt
from theano.compile.pfunc import rebuild_collect_shared
''' '''
Questions and notes about scan that should be answered : Questions and notes about scan that should be answered :
...@@ -162,7 +164,29 @@ def scan_project_sum(*args, **kwargs): ...@@ -162,7 +164,29 @@ def scan_project_sum(*args, **kwargs):
def asarrayX(value): def asarrayX(value):
return theano._asarray(value, dtype=theano.config.floatX) return theano._asarray(value, dtype=theano.config.floatX)
def clone_optimized_graph(f):
maker_ins = [x for x in f.maker.env.inputs if not isinstance(x,
theano.tensor.sharedvar.SharedVariable)]
inps, outs, _ = rebuild_collect_shared(f.maker.env.outputs,
maker_ins,
copy_inputs_over = False)
ins = [x for x in inps if not isinstance(x, theano.tensor.sharedvar.SharedVariable)]
return (ins, outs)
def grab_scan_node(output):
if output.owner is None:
return None
if output.owner.op.__class__.__name__=='Scan':
return [output.owner]
rval =[]
for i in output.owner.inputs:
ri = grab_scan_node(i)
if ri is not None:
rval += ri
if rval is []:
return None
else:
return rval
class T_Scan(unittest.TestCase): class T_Scan(unittest.TestCase):
#class T_Scan(object): #class T_Scan(object):
...@@ -2044,16 +2068,16 @@ class T_Scan(unittest.TestCase): ...@@ -2044,16 +2068,16 @@ class T_Scan(unittest.TestCase):
o,_ = theano.reduce(lambda v,acc : acc+v, x, o,_ = theano.reduce(lambda v,acc : acc+v, x,
theano.tensor.constant(numpy.asarray(0.,dtype=theano.config.floatX)) theano.tensor.constant(numpy.asarray(0.,dtype=theano.config.floatX))
) )
mode = theano.compile.mode.FAST_RUN
#f1 = theano.function([],o) mode = mode.excluding('inplace')
f1 = theano.function([],o, mode= mode)
# Get the scan node inputs, outputs = clone_optimized_graph(f1)
#scan_node = [n for n in f1.maker.env.toposort()
# if n.op.__class__.__name__=='Scan'][0] scan_nodes = grab_scan_node(outputs[0])
# Check how much memory it uses assert scan_nodes is not None
# Can actually do that since things are hidden by the infershape scan_node = scan_nodes[0]
# mechanism f1 = theano.function(inputs, scan_node.inputs[2])
#assert scan_node.inputs[2].value.shape == () assert f1().shape[0] == 1
gx = theano.tensor.grad(o, x) gx = theano.tensor.grad(o, x)
f2 = theano.function([],gx) f2 = theano.function([],gx)
assert numpy.allclose( f2(), numpy.ones((10,))) assert numpy.allclose( f2(), numpy.ones((10,)))
...@@ -2067,15 +2091,16 @@ class T_Scan(unittest.TestCase): ...@@ -2067,15 +2091,16 @@ class T_Scan(unittest.TestCase):
theano.tensor.constant(numpy.asarray(0.,dtype=theano.config.floatX)) theano.tensor.constant(numpy.asarray(0.,dtype=theano.config.floatX))
) )
#f1 = theano.function([],o) mode = theano.compile.mode.FAST_RUN
mode = mode.excluding('inplace')
f1 = theano.function([],o, mode= mode)
inputs, outputs = clone_optimized_graph(f1)
# Get the scan node scan_nodes = grab_scan_node(outputs[0])
#scan_node = [n for n in f1.maker.env.toposort() assert scan_nodes is not None
# if n.op.__class__.__name__=='Scan'][0] scan_node = scan_nodes[0]
# Check how much memory it uses f1 = theano.function(inputs, scan_node.inputs[2])
# Can actually do that since things are hidden by the infershape assert f1().shape[0] == 1
# mechanism
#assert scan_node.inputs[2].value.shape == ()
gx = theano.tensor.grad(o, x) gx = theano.tensor.grad(o, x)
f2 = theano.function([],gx) f2 = theano.function([],gx)
assert numpy.allclose( f2(), numpy.ones((10,))) assert numpy.allclose( f2(), numpy.ones((10,)))
...@@ -2088,15 +2113,16 @@ class T_Scan(unittest.TestCase): ...@@ -2088,15 +2113,16 @@ class T_Scan(unittest.TestCase):
theano.tensor.constant(numpy.asarray(0.,dtype=theano.config.floatX)) theano.tensor.constant(numpy.asarray(0.,dtype=theano.config.floatX))
) )
#f1 = theano.function([],o) mode = theano.compile.mode.FAST_RUN
mode = mode.excluding('inplace')
f1 = theano.function([],o, mode= mode)
inputs, outputs = clone_optimized_graph(f1)
# Get the scan node scan_nodes = grab_scan_node(outputs[0])
#scan_node = [n for n in f1.maker.env.toposort() assert scan_nodes is not None
# if n.op.__class__.__name__=='Scan'][0] scan_node = scan_nodes[0]
# Check how much memory it uses f1 = theano.function(inputs, scan_node.inputs[2])
# Can actually do that since things are hidden by the infershape assert f1().shape[0] == 1
# mechanism
#assert scan_node.inputs[2].value.shape == ()
gx = theano.tensor.grad(o, x) gx = theano.tensor.grad(o, x)
f2 = theano.function([],gx) f2 = theano.function([],gx)
assert numpy.allclose( f2(), numpy.ones((10,))) assert numpy.allclose( f2(), numpy.ones((10,)))
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论