提交 911fa71a authored 作者: Nan Rosemary Ke's avatar Nan Rosemary Ke

remove name from reshape op

上级 74c0233b
...@@ -643,10 +643,7 @@ def local_gpua_contiguous(op, context_name, inputs, outputs): ...@@ -643,10 +643,7 @@ def local_gpua_contiguous(op, context_name, inputs, outputs):
@op_lifter([tensor.Reshape]) @op_lifter([tensor.Reshape])
@register_opt2([tensor.Reshape], 'fast_compile') @register_opt2([tensor.Reshape], 'fast_compile')
def local_gpua_reshape(op, context_name, inputs, outputs): def local_gpua_reshape(op, context_name, inputs, outputs):
name = op.name res = GpuReshape(op.ndim, None)
if name:
name = 'Gpu' + name
res = GpuReshape(op.ndim, op.name)
return res return res
......
...@@ -4591,7 +4591,7 @@ class Reshape(Op): ...@@ -4591,7 +4591,7 @@ class Reshape(Op):
return Op.c_code(self, node, name, inputs, outputs, sub) return Op.c_code(self, node, name, inputs, outputs, sub)
def reshape(x, newshape, ndim=None, name=None): def reshape(x, newshape, ndim=None):
if ndim is None: if ndim is None:
newshape = as_tensor_variable(newshape) newshape = as_tensor_variable(newshape)
if newshape.ndim != 1: if newshape.ndim != 1:
...@@ -4607,7 +4607,7 @@ def reshape(x, newshape, ndim=None, name=None): ...@@ -4607,7 +4607,7 @@ def reshape(x, newshape, ndim=None, name=None):
"to know what the number of dimensions of the reshaped " "to know what the number of dimensions of the reshaped "
"variable will be. You can provide the 'ndim' keyword " "variable will be. You can provide the 'ndim' keyword "
"argument to 'reshape' to avoid this problem." % newshape) "argument to 'reshape' to avoid this problem." % newshape)
op = Reshape(ndim, name) op = Reshape(ndim)
rval = op(x, newshape) rval = op(x, newshape)
return rval return rval
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论