提交 6e63a578 authored 作者: nouiz's avatar nouiz

Merge pull request #292 from delallea/minor

Minor fixes
......@@ -163,7 +163,9 @@ Others:
* Fixed some import problems.
* Filtering update. (James)
* On Windows, the default compiledir changed to be local to the computer/user and not transferred with roaming profile. (Sebastian Urban)
* New theano flag "on_shape_error". Default to "warn" (same as previous behavior): it print a warning when an error occur when infering the shape of some apply node. The other accepted value is "raise" to raise an error when this happen.
* New theano flag "on_shape_error". Defaults to "warn" (same as previous behavior):
it prints a warning when an error occurs when inferring the shape of some apply node.
The other accepted value is "raise" to raise an error when this happens. (Frederic)
* The buidbot now raises optimization/shape errors instead of just printing a warning. (Frederic)
Reviewers (alphabetical order):
......
......@@ -163,6 +163,9 @@ Others:
* Fixed some import problems.
* Filtering update. (James)
* On Windows, the default compiledir changed to be local to the computer/user and not transferred with roaming profile. (Sebastian Urban)
* New theano flag "on_shape_error". Defaults to "warn" (same as previous behavior):
it prints a warning when an error occurs when inferring the shape of some apply node.
The other accepted value is "raise" to raise an error when this happens. (Frederic)
* The buidbot now raises optimization/shape errors instead of just printing a warning. (Frederic)
Reviewers (alphabetical order):
......
......@@ -249,7 +249,7 @@ import theano and print the config variable, as in:
Default: 'warn'
When an exception is raised when infering the shape of some apply
When an exception is raised when inferring the shape of some apply
node, either warn the user and use a default value ('warn'), or
raise the exception ('raise').
......
......@@ -15,6 +15,7 @@ __contact__ = "Razvan Pascanu <r.pascanu@gmail>"
import itertools
import logging
import time
from itertools import izip
import numpy
......@@ -285,7 +286,7 @@ class Scan(PureOp):
# If everything went OK up to here, there is still one thing to
# check. Namely, do the internal graph represent same
# computations
for self_in, other_in in zip(self.inputs, other.inputs):
for self_in, other_in in izip(self.inputs, other.inputs):
if self_in.type != other_in.type:
return False
......@@ -909,12 +910,12 @@ class Scan(PureOp):
# Here, we build a list inner_ins_shape, such that inner_ins_shape[i]
# is the shape of self.inputs[i]
for inp, inp_shp in zip(node.inputs, input_shapes):
for inp, inp_shp in izip(node.inputs, input_shapes):
assert inp_shp is None or len(inp_shp) == inp.ndim
# sequences
# We skip iputs_shapes[0] as it is the total or current number
# of iteration
# of iterations.
seqs_shape = [x[1:] for x in input_shapes[1:1 + self.n_seqs]]
# mit_mot, mit_sot, sit_sot
......@@ -938,7 +939,7 @@ class Scan(PureOp):
# node.inputs
inner_non_sequences = self.inputs[len(seqs_shape) + len(outs_shape):]
out_equivalent = {}
for in_ns, out_ns in zip(inner_non_sequences, node.inputs[offset:]):
for in_ns, out_ns in izip(inner_non_sequences, node.inputs[offset:]):
out_equivalent[in_ns] = out_ns
if self.as_while:
self_outs = self.outputs[:-1]
......@@ -971,7 +972,7 @@ class Scan(PureOp):
r = node.outputs[n_outs + x]
assert r.ndim == 1 + len(out_shape_x)
shp = [node.inputs[offset + self.n_shared_outs + x]]
for i, shp_i in zip(xrange(1, r.ndim), out_shape_x):
for i, shp_i in izip(xrange(1, r.ndim), out_shape_x):
# Validate shp_i. v_shape_i is either None (if invalid),
# or a (variable, Boolean) tuple. The Boolean indicates
# whether variable is shp_i (if True), or an valid
......@@ -993,7 +994,7 @@ class Scan(PureOp):
# leading dimension so we replace it for every entry with Shape_i
if self.as_while:
scan_outs = [(Shape_i(0)(o),)+x[1:]
for o, x in zip(node.outputs,scan_outs)]
for o, x in izip(node.outputs, scan_outs)]
return scan_outs
### GRAD FUNCTION
......
......@@ -377,9 +377,9 @@ class ScanSaveMem(gof.Optimizer):
if hasattr(env, 'shape_feature'):
shape_of = node.env.shape_feature.shape_of
else:
# Each call site of shape_of is in a try..except
# That use a default version when the variable is not
# in the dictionary
# Each access to shape_of is in a try..except block in order to
# use a default version when the variable is not in the shape_of
# dictionary.
shape_of = {}
# 1. Initialization of variables
# Note 1) We do not actually care about outputs representing shared
......
......@@ -15,6 +15,7 @@ __contact__ = "Razvan Pascanu <r.pascanu@gmail>"
import copy
import logging
from itertools import izip
import numpy
......@@ -291,7 +292,7 @@ def equal_computations(xs, ys, in_xs=None, in_ys=None):
if in_ys is None:
in_ys = []
for x, y in zip(xs, ys):
for x, y in izip(xs, ys):
if x.owner and not y.owner:
return False
if y.owner and not x.owner:
......@@ -301,7 +302,7 @@ def equal_computations(xs, ys, in_xs=None, in_ys=None):
return False
if len(in_xs) != len(in_ys):
return False
for _x, _y in zip(in_xs, in_ys):
for _x, _y in izip(in_xs, in_ys):
if _x.type != _y.type:
return False
......@@ -313,7 +314,7 @@ def equal_computations(xs, ys, in_xs=None, in_ys=None):
n_nodes = len(nds_x)
cont = True
idx = 0
for dx, dy in zip(xs, ys):
for dx, dy in izip(xs, ys):
if not dx.owner or not dy.owner:
if dy.owner or dx.owner:
return False
......@@ -338,7 +339,7 @@ def equal_computations(xs, ys, in_xs=None, in_ys=None):
elif len(nd_x.outputs) != len(nd_y.outputs):
cont = False
else:
for dx, dy in zip(nd_x.inputs, nd_y.inputs):
for dx, dy in izip(nd_x.inputs, nd_y.inputs):
if (dx, dy) not in common:
if dx != dy:
if (isinstance(dx, tensor.Constant) and
......@@ -353,7 +354,7 @@ def equal_computations(xs, ys, in_xs=None, in_ys=None):
cont = False
if cont:
for dx, dy in zip(nd_x.outputs, nd_y.outputs):
for dx, dy in izip(nd_x.outputs, nd_y.outputs):
common.add((dx, dy))
idx += 1
......@@ -372,7 +373,7 @@ def infer_shape(outs, inputs, input_shapes):
# inside. We don't use the full ShapeFeature interface, but we
# let it initialize itself with an empty env, otherwise we will
# need to do it manually
for inp, inp_shp in zip(inputs, input_shapes):
for inp, inp_shp in izip(inputs, input_shapes):
if inp_shp is not None and len(inp_shp) != inp.ndim:
assert len(inp_shp) == inp.ndim
......@@ -380,7 +381,7 @@ def infer_shape(outs, inputs, input_shapes):
shape_feature.on_attach(theano.gof.Env([], []))
# Initialize shape_of with the input shapes
for inp, inp_shp in zip(inputs, input_shapes):
for inp, inp_shp in izip(inputs, input_shapes):
shape_feature.set_shape(inp, inp_shp)
def local_traverse(out):
......@@ -689,7 +690,7 @@ def reconstruct_graph(inputs, outputs, tag=None):
tag = ''
nw_inputs = [safe_new(x, tag) for x in inputs]
givens = {}
for nw_x, x in zip(nw_inputs, inputs):
for nw_x, x in izip(nw_inputs, inputs):
givens[x] = nw_x
allinputs = theano.gof.graph.inputs(outputs)
for inp in allinputs:
......
......@@ -786,9 +786,10 @@ class ShapeFeature(object):
self.shape_of[r] = s
else:
if r.ndim != len(s):
raise ShapeError("Something infered a shape with %d dimensions"
" for a variable with %d dimensions." % (
len(s), r.ndim))
raise ShapeError(
"Something inferred a shape with %d dimensions "
"for a variable with %d dimensions." % (
len(s), r.ndim))
shape_vars = [self.unpack(s_i) for s_i in s]
self.shape_of[r] = tuple(shape_vars)
......@@ -919,7 +920,7 @@ class ShapeFeature(object):
'supported, and one should now use tensor.ShapeError '
'instead. The original exception message is: %s' % e)
except Exception, e:
msg = ('Failed to infer_shape from Op %s.\nInput shapes:'
msg = ('Failed to infer_shape from Op %s.\nInput shapes: '
'%s\nException encountered during infer_shape: '
'%s\nException message: %s\nTraceback: %s') % (
node.op, [self.shape_of[r] for r in node.inputs],
......@@ -927,7 +928,7 @@ class ShapeFeature(object):
if config.on_shape_error == "raise":
raise Exception(msg)
else:
_logger.error(msg)
_logger.warning(msg)
o_shapes = self.default_infer_shape(
node, [self.shape_of[r] for r in node.inputs])
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论