提交 9dc99a12 authored 作者: James Bergstra's avatar James Bergstra

Replaced print statements with logger calls in tensor.opt

上级 4bcb0779
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
# TODO: intelligent merge for mul/add # TODO: intelligent merge for mul/add
# TODO: 0*x -> 0 # TODO: 0*x -> 0
import logging
_logger = logging.getLogger('theano.tensor.opt')
from theano import gof from theano import gof
from theano.gof import opt, InconsistencyError, TopoOptimizer, graph from theano.gof import opt, InconsistencyError, TopoOptimizer, graph
...@@ -829,7 +831,8 @@ class Canonizer(gof.LocalOptimizer): ...@@ -829,7 +831,8 @@ class Canonizer(gof.LocalOptimizer):
if new.type == out.type: if new.type == out.type:
return [new] return [new]
else: else:
print >> sys.stderr, 'CANONIZE FAILED: new, out = ', new, ',', out, 'types', new.type, ',', out.type _logger.warning(' '.join(('CANONIZE FAILED: new, out = ', new, ',', out, 'types',
new.type, ',', out.type)))
return False return False
def __str__(self): def __str__(self):
...@@ -1262,7 +1265,7 @@ def local_elemwise_fusion(node): ...@@ -1262,7 +1265,7 @@ def local_elemwise_fusion(node):
except NotImplementedError: except NotImplementedError:
catch = True catch = True
if catch: if catch:
print "OPTIMISATION WARNING: ",i.owner.op.scalar_op,"don't implement the c_code fonction. This is not fast and disable the fusion of loop." _logger.info("%s does not implement the c_code function. As well as being potentially slow, this disables loop fusion." % str(i.owner.op.scalar_op))
do_fusion=False do_fusion=False
if do_fusion: if do_fusion:
...@@ -1288,10 +1291,10 @@ def local_elemwise_fusion(node): ...@@ -1288,10 +1291,10 @@ def local_elemwise_fusion(node):
["x" for x in s_g], ["x" for x in s_g],
"z",{}) "z",{})
except MethodNotDefined: except MethodNotDefined:
print "OPTIMISATION WARNING: ",i.owner.op.scalar_op,"don't implement the c_code fonction. This is not fast and disable the fusion of loop." _logger.info("%s does not implement the c_code function. As well as being potentially slow, this disables loop fusion." % str(i.owner.op.scalar_op))
return False return False
except NotImplementedError: except NotImplementedError:
print "OPTIMISATION WARNING: ",s_new_out.owner.op,"don't implement the c_code fonction. This disable the fusion of loop." _logger.info("%s does not implement the c_code function. As well as being potentially slow, this disables loop fusion." % str(s_new_out.op.scalar_op))
return False return False
#create the composite op. #create the composite op.
...@@ -1309,10 +1312,10 @@ flags=os.getenv('THEANO_FLAGS',None) ...@@ -1309,10 +1312,10 @@ flags=os.getenv('THEANO_FLAGS',None)
if flags: if flags:
flags=flags.split(',') flags=flags.split(',')
if 'local_elemwise_fusion' in flags: if 'local_elemwise_fusion' in flags:
print "Will fusion elemwise" _logger.debug("enabling optimization: fusion elemwise")
register_specialize(local_elemwise_fusion) register_specialize(local_elemwise_fusion)
else: else:
print "Won't fuse elemwise" _logger.debug("not enabling optimization: fusion elemwise")
# def make_composite(inputs, outputs): # def make_composite(inputs, outputs):
# scalar_inputs = [scalar.Scalar(dtype = i.type.dtype)() for i in inputs] # scalar_inputs = [scalar.Scalar(dtype = i.type.dtype)() for i in inputs]
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论