提交 4552f381 authored 作者: Frédéric Bastien's avatar Frédéric Bastien 提交者: GitHub

Merge pull request #6425 from nouiz/remove_node

Remove ViewOp subclass during optimization
......@@ -1333,9 +1333,12 @@ class LocalOptGroup(LocalOptimizer):
self.process_count.setdefault(o, 0)
self.applied_true.setdefault(o, 0)
self.node_created.setdefault(o, 0)
for c in o.tracks():
self.track_map[c].append(o)
tracks = o.tracks()
if tracks is None:
self.track_map[None].append(o)
else:
for c in tracks:
self.track_map[c].append(o)
def __str__(self):
return getattr(self, '__name__',
......
......@@ -39,7 +39,8 @@ def test_view():
a = rand_gpuarray(20, dtype=dtype)
g = GpuArrayType(dtype=dtype, broadcastable=(False,))('g')
f = theano.function([g], ViewOp()(g))
m = theano.compile.get_default_mode().excluding("local_view_op")
f = theano.function([g], ViewOp()(g), mode=m)
assert isinstance(f.maker.fgraph.toposort()[0].op, ViewOp)
......
......@@ -7488,25 +7488,14 @@ def local_useless_composite(node):
# # Remove consider_constant #
# ############################
# Although the ops ConsiderConstant, ZeroGrad and DisconnectedGrad
# just returns the input, it should be removed from the graph to
# make sure all possible optimizations can be applied.
register_canonicalize(gof.OpRemove(theano.gradient.consider_constant_),
'fast_compile', 'fast_run',
name='remove_consider_constant')
register_canonicalize(gof.OpRemove(theano.gradient.zero_grad_),
'fast_compile', 'fast_run', name='remove_zero_grad')
register_canonicalize(gof.OpRemove(theano.gradient.disconnected_grad_),
'fast_compile', 'fast_run',
name='remove_disconnected_grad')
@register_canonicalize
@gof.local_optimizer([theano.gradient.GradClip])
def local_grad_clip(node):
if isinstance(node.op, theano.gradient.GradClip):
@register_canonicalize('fast_compile')
@register_useless('fast_compile')
@gof.local_optimizer(None)
def local_view_op(node):
if isinstance(node.op, theano.compile.ops.ViewOp):
return node.inputs
......
......@@ -16,6 +16,7 @@ from theano.tests import unittest_tools as utt
from theano import gradient
from theano import config
from theano.gof.null_type import NullType
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
one = theano.tensor.as_tensor_variable(1.)
......@@ -784,5 +785,38 @@ def test_grad_clip():
assert np.allclose(out, (1, 4))
assert not np.allclose(out[0], out[1])
def test_grad_scale():
x = theano.tensor.scalar()
z = theano.tensor.grad(gradient.grad_scale(x, 2)**2, x)
z2 = theano.tensor.grad(x**2, x)
f = theano.function([x], outputs=[z, z2])
if theano.config.mode != "FAST_COMPILE":
topo = f.maker.fgraph.toposort()
assert not any([isinstance(node.op, gradient.GradScale)
for node in topo])
out = f(2.)
assert np.allclose(out, (8, 4))
def test_undefined_grad_opt():
# Make sure that undefined grad get removed in optimized graph.
random = RandomStreams(np.random.randint(1, 2147462579))
pvals = theano.shared(np.random.rand(10, 20).astype(theano.config.floatX))
pvals = pvals / pvals.sum(axis=1)
pvals = gradient.zero_grad(pvals)
samples = random.multinomial(pvals=pvals, n=1)
samples = theano.tensor.cast(samples, pvals.dtype)
samples = gradient.zero_grad(samples)
cost = theano.tensor.sum(samples + pvals)
grad = theano.tensor.grad(cost, samples)
f = theano.function([], grad)
theano.printing.debugprint(f)
assert not any([isinstance(node.op, gradient.UndefinedGrad) for node in f.maker.fgraph.apply_nodes])
if __name__ == '__main__':
unittest.main()
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论