提交 965366d8 authored 作者: Reyhane Askari's avatar Reyhane Askari

fixed some instances where join object was used instead of it's class

上级 0029a22d
...@@ -591,7 +591,7 @@ def local_gpua_alloc2(node): ...@@ -591,7 +591,7 @@ def local_gpua_alloc2(node):
return return
if (isinstance(node.op, tensor.Alloc) and if (isinstance(node.op, tensor.Alloc) and
all(c != 'output' and all(c != 'output' and
c.op == tensor.join and isinstance(c.op, tensor.Join) and
all(i.owner and all(i.owner and
i.owner.op in [host_from_gpu, tensor.alloc] i.owner.op in [host_from_gpu, tensor.alloc]
for i in c.inputs[1:]) for i in c.inputs[1:])
......
...@@ -2207,7 +2207,7 @@ def local_gpualloc(node): ...@@ -2207,7 +2207,7 @@ def local_gpualloc(node):
# if all clients are on gpu # if all clients are on gpu
replace = True replace = True
elif all([c != 'output' and elif all([c != 'output' and
c.op == tensor.join and isinstance(c.op, tensor.Join) and
all(i.owner and all(i.owner and
i.owner.op in [host_from_gpu, tensor.alloc] i.owner.op in [host_from_gpu, tensor.alloc]
for i in c.inputs[1:]) for i in c.inputs[1:])
......
...@@ -945,7 +945,7 @@ def test_gpujoin_gpualloc(): ...@@ -945,7 +945,7 @@ def test_gpujoin_gpualloc():
mode=mode_with_gpu) mode=mode_with_gpu)
assert sum([node.op == T.alloc for node in f.maker.fgraph.toposort()]) == 2 assert sum([node.op == T.alloc for node in f.maker.fgraph.toposort()]) == 2
assert sum([node.op == T.join_ for node in f.maker.fgraph.toposort()]) == 1 assert sum([isinstance(node.op, T.Join) for node in f.maker.fgraph.toposort()]) == 1
assert sum([isinstance(node.op, B.GpuAlloc) assert sum([isinstance(node.op, B.GpuAlloc)
for node in f_gpu.maker.fgraph.toposort()]) == 2 for node in f_gpu.maker.fgraph.toposort()]) == 2
assert sum([node.op == B.gpu_join assert sum([node.op == B.gpu_join
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论