提交 ef2288d6 authored 作者: Frederic's avatar Frederic

move gpu join test to the cpu/gpu join test and remove duplicate test.

上级 13a81986
...@@ -646,36 +646,6 @@ def test_hostfromgpu_shape_i(): ...@@ -646,36 +646,6 @@ def test_hostfromgpu_shape_i():
# ----------------------------------------------------------------------- # -----------------------------------------------------------------------
import theano.sandbox.cuda as cuda_ndarray import theano.sandbox.cuda as cuda_ndarray
from theano.sandbox.cuda.basic_ops import gpu_join, GpuDimShuffle
def test_gpujoin_preserves_broadcasting():
_a = numpy.asarray([[1,2],[3,4]],dtype='float32')
_b = numpy.asarray([[5,6,7],[8,9,10]],dtype='float32')
a = tcn.shared_constructor(_a)
b = tcn.shared_constructor(_b)
# [0,0] : the two original dims were non-broadcastable
# [1,x,0]: new order and broadcastability
gpu_dimshuffle = GpuDimShuffle([0,0], [1,'x',0])
a_shuffled = gpu_dimshuffle(a)
b_shuffled = gpu_dimshuffle(b)
c = gpu_join(0,a_shuffled,b_shuffled)
assert c.type.broadcastable == (False,True,False)
f = theano.function([], c, mode=mode_with_gpu)
res = f()
a_reshaped = numpy.asarray([[[1,3]],[[2,4]]], dtype='float32')
b_reshaped = numpy.asarray([[[5,8]],[[6,9]],[[7,10]]], dtype='float32')
concat = numpy.concatenate([a_reshaped,b_reshaped], axis=0)
assert numpy.all(res == concat)
def test_gpujoin_assert_cndas(): def test_gpujoin_assert_cndas():
# this will end up being an ndarray, as it's float64 # this will end up being an ndarray, as it's float64
...@@ -683,7 +653,7 @@ def test_gpujoin_assert_cndas(): ...@@ -683,7 +653,7 @@ def test_gpujoin_assert_cndas():
a = theano.shared(_a) a = theano.shared(_a)
try: try:
c = gpu_join(1,a) c = cuda.basic_ops.gpu_join(1,a)
# can't "assert False" here, as we want the assertion # can't "assert False" here, as we want the assertion
# error from gpu_join # error from gpu_join
except AssertionError: except AssertionError:
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论