提交 46903b1e authored 作者: Frederic Bastien's avatar Frederic Bastien

Add an UnShapeOp opt that remove the shape feature. To enable optimizer_including=UnShapeOpt

上级 f14af145
......@@ -1408,19 +1408,28 @@ class ShapeFeature(object):
class ShapeOptimizer(Optimizer):
"""Optimizer that serves to add ShapeFeature as an fgraph feature."""
def __init__(self):
Optimizer.__init__(self)
def add_requirements(self, fgraph):
fgraph.attach_feature(ShapeFeature())
def apply(self, fgraph):
pass
class UnShapeOptimizer(Optimizer):
"""Optimizer remove ShapeFeature as an fgraph feature."""
def apply(self, fgraph):
for feature in fgraph._features:
if isinstance(feature, ShapeFeature):
fgraph.remove_feature(feature)
# Register it after merge1 optimization at 0. We don't want to track
# the shape of merged node.
theano.compile.mode.optdb.register('ShapeOpt', ShapeOptimizer(),
0.1, 'fast_run', 'fast_compile')
# Not enabled by default for now. Some crossentropy opt use the
# shape_feature. They are at step 2.01. uncanonicalize is at step
# 3. After it goes to 48.5 that move to the gpu. So 10 seem resonable.
theano.compile.mode.optdb.register('UnShapeOpt', UnShapeOptimizer(),
10)
def local_elemwise_alloc_op(ElemwiseOP, AllocOP, DimShuffleOP):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论