提交 0bc13e93 authored 作者: Frederic's avatar Frederic

Fix GpuSplit error in the new back-end.

上级 50673363
...@@ -20,7 +20,7 @@ from theano.gof.python25 import all, any ...@@ -20,7 +20,7 @@ from theano.gof.python25 import all, any
from theano.tensor.nnet.conv import ConvOp from theano.tensor.nnet.conv import ConvOp
from theano.sandbox.gpuarray.type import GpuArrayType from theano.sandbox.gpuarray.type import GpuArrayType
from theano.sandbox.gpuarray.basic_ops import ( from theano.sandbox.gpuarray.basic_ops import (
host_from_gpu, gpu_from_host, HostFromGpu, host_from_gpu, gpu_from_host, HostFromGpu, GpuSplit,
gpu_alloc, GpuAlloc, GpuReshape, GpuEye, gpu_join, GpuJoin, gpu_alloc, GpuAlloc, GpuReshape, GpuEye, gpu_join, GpuJoin,
) )
from theano.sandbox.gpuarray.blas import gpu_dot22, GpuGemv, GpuGemm, GpuGer from theano.sandbox.gpuarray.blas import gpu_dot22, GpuGemv, GpuGemm, GpuGer
......
...@@ -356,6 +356,17 @@ class G_Join_and_Split(T_Join_and_Split): ...@@ -356,6 +356,17 @@ class G_Join_and_Split(T_Join_and_Split):
self.hide_error = theano.config.mode not in ['DebugMode', 'DEBUG_MODE'] self.hide_error = theano.config.mode not in ['DebugMode', 'DEBUG_MODE']
self.shared = gpuarray_shared_constructor self.shared = gpuarray_shared_constructor
def test_gpusplit_opt(self):
rng = numpy.random.RandomState(seed=utt.fetch_seed())
m = self.shared(rng.rand(4, 6).astype(self.floatX))
o = T.Split(2)(m, 0, [2, 2])
f = theano.function([], o, mode=self.mode)
assert any([isinstance(node.op, self.split_op)
for node in f.maker.fgraph.toposort()])
o1, o2 = f()
assert numpy.allclose(o1, m.get_value(borrow=True)[:2])
assert numpy.allclose(o2, m.get_value(borrow=True)[2:])
def test_gpujoin_gpualloc(): def test_gpujoin_gpualloc():
a = T.fmatrix('a') a = T.fmatrix('a')
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论