提交 4686f603 authored 作者: Frederic Bastien's avatar Frederic Bastien

Small update follow code review

上级 84a67842
......@@ -18,7 +18,7 @@ if (not theano.gpuarray.pygpu_activated and
if not theano.gpuarray.pygpu_activated:
if init_error:
raise SkipTest(e)
raise SkipTest(init_error)
else:
raise SkipTest("pygpu disabled")
......
......@@ -166,7 +166,7 @@ def test_validate_input_types_gpuarray_backend():
def test_f16_nonzero():
try:
# To have theano.shared(x
# To have theano.shared(x) try to move on the GPU
theano.compile.shared_constructor(gpuarray_shared_constructor)
cpu_f16_nonzero(mode=mode, op_to_check=GPUA_mrg_uniform)
finally:
......@@ -178,12 +178,18 @@ def test_cpu_target_with_shared_variable():
srng = MRG_RandomStreams()
s = np.random.rand(2, 3).astype('float32')
x = gpuarray_shared_constructor(s, name='x')
y = srng.uniform(x.shape, target='cpu')
y.name = 'y'
z = (x * y).sum()
z.name = 'z'
try:
# To have theano.shared(x) try to move on the GPU
theano.compile.shared_constructor(gpuarray_shared_constructor)
y = srng.uniform(x.shape, target='cpu')
y.name = 'y'
z = (x * y).sum()
z.name = 'z'
fz = theano.function([], z, mode=mode)
fz = theano.function([], z, mode=mode)
nodes = fz.maker.fgraph.toposort()
assert not any([isinstance(node.op, GPUA_mrg_uniform) for node in nodes])
nodes = fz.maker.fgraph.toposort()
assert not any([isinstance(node.op, GPUA_mrg_uniform) for node in nodes])
finally:
theano.compile.shared_constructor(gpuarray_shared_constructor,
remove=True)
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论