提交 d7817ddc authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Revert 86b03fce2591d12e641d55d2e93d451601b369c7.

We use the ShapeFeature and SpecifyShape, now, tag.shape is being deprecated.
上级 278b7205
...@@ -6,7 +6,6 @@ ...@@ -6,7 +6,6 @@
import logging import logging
_logger = logging.getLogger('theano.tensor.opt') _logger = logging.getLogger('theano.tensor.opt')
import copy
import operator import operator
import itertools import itertools
import sys import sys
...@@ -573,14 +572,6 @@ class ShapeFeature(object): ...@@ -573,14 +572,6 @@ class ShapeFeature(object):
if hasattr(r.type,"broadcastable") and r.type.broadcastable[i]: if hasattr(r.type,"broadcastable") and r.type.broadcastable[i]:
return self.lscalar_one return self.lscalar_one
# NOTE: This may cause problems bacause the shape is not asserted
# there is an equivalent mechanism to do this, namely
# specify_shape that one should use
# If user provided size
#elif ( hasattr(r.tag,'shape') and
# r.tag.shape is not None and
# r.tag.shape[i] is not None):
# return T.constant(copy.copy(r.tag.shape[i]),dtype='int64')
else: else:
return Shape_i(i).make_node(r).outputs[0] return Shape_i(i).make_node(r).outputs[0]
...@@ -1093,7 +1084,6 @@ def local_alloc_elemwise(node): ...@@ -1093,7 +1084,6 @@ def local_alloc_elemwise(node):
return [node.op(*new)] return [node.op(*new)]
#TODO, global optimizer that lift the assert to the beginning of the graph. #TODO, global optimizer that lift the assert to the beginning of the graph.
#TODO, var.tag.shape to propagate the shape and lower the overhead of this op
#TODO, when all inputs can be optimized do all except one #TODO, when all inputs can be optimized do all except one
theano.configparser.AddConfigVar('experimental.local_alloc_elemwise', theano.configparser.AddConfigVar('experimental.local_alloc_elemwise',
...@@ -2741,14 +2731,8 @@ register_specialize(local_mul_specialize) ...@@ -2741,14 +2731,8 @@ register_specialize(local_mul_specialize)
@gof.local_optimizer([T.add]) @gof.local_optimizer([T.add])
def local_add_specialize(node): def local_add_specialize(node):
def fill_chain(v): def fill_chain(v):
# Not sure why this happens .. but I did not had the time to look
# into it, it probably has something to do with the dtype I'm
# providing the tag.shape of my variable
out = _fill_chain(v, node.inputs) out = _fill_chain(v, node.inputs)
if out[0].dtype != node.outputs[0].dtype: return out
return [T.cast(out[0], dtype = node.outputs[0].dtype)]
else:
return out
#here, we are past the point of canonicalization, so we don't want to put in un-necessary fills. #here, we are past the point of canonicalization, so we don't want to put in un-necessary fills.
if node.op == T.add: if node.op == T.add:
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论