提交 4aad70d8 authored 作者: Frédéric Bastien's avatar Frédéric Bastien 提交者: GitHub

Merge pull request #4888 from nke001/ccw4601_cpy

ccw4601 remove name from reshape op
......@@ -643,10 +643,7 @@ def local_gpua_contiguous(op, context_name, inputs, outputs):
@op_lifter([tensor.Reshape])
@register_opt2([tensor.Reshape], 'fast_compile')
def local_gpua_reshape(op, context_name, inputs, outputs):
name = op.name
if name:
name = 'Gpu' + name
res = GpuReshape(op.ndim, op.name)
res = GpuReshape(op.ndim)
return res
......@@ -665,7 +662,7 @@ def local_gpua_flatten(op, context_name, inputs, outputs):
if op.outdim != 1:
shp = [inputs[0].shape[i] for i in range(op.outdim - 1)]
shp += [-1]
res = GpuReshape(op.outdim, None)
res = GpuReshape(op.outdim)
o = res(inputs[0], theano.tensor.as_tensor_variable(shp))
return o
......
......@@ -4440,7 +4440,7 @@ class Reshape(Op):
def __init__(self, ndim, name=None):
self.ndim = ndim
self.name = name
assert name is None, 'name attribute for Reshape has been deprecated'
def __str__(self):
return '%s{%s}' % (self.__class__.__name__, self.ndim)
......@@ -4616,7 +4616,7 @@ class Reshape(Op):
return Op.c_code(self, node, name, inputs, outputs, sub)
def reshape(x, newshape, ndim=None, name=None):
def reshape(x, newshape, ndim=None):
if ndim is None:
newshape = as_tensor_variable(newshape)
if newshape.ndim != 1:
......@@ -4632,7 +4632,7 @@ def reshape(x, newshape, ndim=None, name=None):
"to know what the number of dimensions of the reshaped "
"variable will be. You can provide the 'ndim' keyword "
"argument to 'reshape' to avoid this problem." % newshape)
op = Reshape(ndim, name)
op = Reshape(ndim)
rval = op(x, newshape)
return rval
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论