提交 fbea8820 authored 作者: Frederic's avatar Frederic

Transform all Flatten op to the new GpuReshape

上级 8bd900f8
...@@ -138,10 +138,12 @@ def local_gpureshape(node): ...@@ -138,10 +138,12 @@ def local_gpureshape(node):
@op_lifter(tensor.Flatten) @op_lifter(tensor.Flatten)
def local_gpuflatten(node): def local_gpuflatten(node):
op = node.op op = node.op
shp =[]
if op.outdim != 1: if op.outdim != 1:
return None shp = [node.inputs[0].shape[i] for i in range(op.outdim - 1)]
shp += [-1]
res = GpuReshape(op.outdim, None) res = GpuReshape(op.outdim, None)
o = res(node.inputs[0], theano.tensor.constant([-1])) o = res(node.inputs[0], theano.tensor.as_tensor_variable(shp))
return o return o
......
...@@ -34,3 +34,26 @@ def test_flatten(): ...@@ -34,3 +34,26 @@ def test_flatten():
assert res.shape == val.flatten().shape assert res.shape == val.flatten().shape
assert GpuReshape in [type(node.op) assert GpuReshape in [type(node.op)
for node in f.maker.fgraph.toposort()] for node in f.maker.fgraph.toposort()]
val = numpy.random.rand(10, 11).astype("float32")
res = f(val)
utt.assert_allclose(res, val.flatten())
assert res.shape == val.flatten().shape
assert GpuReshape in [type(node.op)
for node in f.maker.fgraph.toposort()]
f = theano.function([m], m.flatten(ndim=2), mode=mode_with_gpu)
val = numpy.random.rand(10, 11).astype("float32")
res = f(val)
utt.assert_allclose(res, val)
assert res.shape == val.shape
assert GpuReshape in [type(node.op)
for node in f.maker.fgraph.toposort()]
m = theano.tensor.tensor3()
f = theano.function([m], m.flatten(ndim=2), mode=mode_with_gpu)
val = numpy.random.rand(10, 11, 12).astype("float32")
res = f(val)
utt.assert_allclose(res, val.reshape(10, -1))
assert res.shape == val.reshape(10, -1).shape
assert GpuReshape in [type(node.op)
for node in f.maker.fgraph.toposort()]
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论