提交 3144103b authored 作者: Frederic Bastien's avatar Frederic Bastien

Don't catch interupt and remove import not used.

上级 4a0f0342
......@@ -36,11 +36,9 @@ AddConfigVar('gcc.cxxflags',
StrParam(""))
# gof imports
from env import Env
import graph
import link
import utils
import op
from compilelock import get_lock, release_lock
......@@ -899,7 +897,7 @@ class CLinker(link.Linker):
isig = (i.signature(), topological_pos, i_idx)
try:
hash(isig)
except: #generic constants don't have a hashable signature
except Exception: #generic constants don't have a hashable signature
error_on_play[0] = True
return None
constant_ids[id(i)] = isig
......@@ -1145,7 +1143,7 @@ def _execute(cthunk, init_tasks, tasks, error_storage):
else:
exc_value = exc_type(_exc_value, task)
exc_value.__thunk_trace__ = trace # this can be used to retrieve the location the Op was declared
except:
except Exception:
print >> sys.stderr, 'ERROR retrieving error_storage', error_storage
raise
......@@ -1350,7 +1348,7 @@ class DualLinker(link.Linker):
thunk2()
for output1, output2 in zip(thunk1.outputs, thunk2.outputs):
self.checker(output1, output2)
except:
except Exception:
link.raise_with_op(node1)
return f, i1, o1
......@@ -887,7 +887,7 @@ class ModuleCache(object):
# the same process.
self.module_hash_to_key_data[module_hash] = key_data
except:
except Exception:
# This may happen e.g. when an Op has no C implementation. In
# any case, we do not want to keep around the temporary work
# directory, as it may cause trouble if we create too many of
......
......@@ -270,9 +270,9 @@ class Unlocker(object):
# not exist), we still want to try and remove the directory.
try:
self.os.remove(self.os.path.join(self.tmp_dir, 'lock'))
except:
except Exception:
pass
try:
self.os.rmdir(self.tmp_dir)
except:
except Exception:
pass
......@@ -226,7 +226,7 @@ class DestroyHandlerHelper2(toolbox.Bookkeeper):
droot, impact, root_destroyer = self.refresh_droot_impact()
try:
return [root_destroyer[droot[r]]]
except:
except Exception:
return []
env.destroyers = get_destroyers_of
......
......@@ -283,7 +283,7 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling =
thunk()
for old_s in old_storage:
old_s[0] = None
except:
except Exception:
raise_with_op(node)
f = streamline_default_f
elif nice_errors:
......@@ -294,7 +294,7 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling =
try:
for thunk, node in thunk_node_list:
thunk()
except:
except Exception:
raise_with_op(node)
f = streamline_nice_errors_f
else:
......@@ -554,7 +554,7 @@ class WrapLinker(Linker):
for i, (thunks, node) in enumerate(zip(thunk_groups, order)):
try:
wrapper(i, node, *thunks)
except:
except Exception:
raise_with_op(node)
f.thunk_groups = thunk_groups
......
......@@ -211,7 +211,7 @@ class _metadict:
def __setitem__(self, item, value):
try:
self.d[item] = value
except:
except Exception:
for i, (key,val) in enumerate(self.l):
if key == item:
self.l[i] = (item, value)
......@@ -220,14 +220,14 @@ class _metadict:
def get(self, item, default):
try:
return self.d[item]
except:
except Exception:
for item2, value in self.l:
try:
if item == item2:
return value
if item.equals(item2):
return value
except:
except Exception:
if item is item2:
return value
else:
......@@ -954,7 +954,7 @@ class TopoOptimizer(NavigatorOptimizer):
node = q.popleft()
current_node = node
self.process_node(env, node)
except:
except Exception:
self.detach_updater(env, u)
raise
self.detach_updater(env, u)
......@@ -988,7 +988,7 @@ class OpKeyOptimizer(NavigatorOptimizer):
node = q.pop()
current_node = node
self.process_node(env, node)
except:
except Exception:
self.detach_updater(env, u)
raise
self.detach_updater(env, u)
......@@ -1004,8 +1004,6 @@ class OpKeyOptimizer(NavigatorOptimizer):
from utils import D
class ChangeTracker:
def __init__(self):
self.changed = False
......
......@@ -342,7 +342,7 @@ class MatrixInverse(Op):
def perform(self, node, (x,), (z, )):
try:
z[0] = numpy.linalg.inv(x).astype(x.dtype)
except:
except Exception:
print 'Failed to invert', node.inputs[0]
raise
def grad(self, inputs, g_outputs):
......@@ -470,7 +470,7 @@ class Det(Op):
def perform(self, node, (x,), (z, )):
try:
z[0] = numpy.asarray(scipy.linalg.det(x), dtype=x.dtype)
except:
except Exception:
print 'Failed to compute determinant', x
raise
def grad(self, inputs, g_outputs):
......
......@@ -278,7 +278,7 @@ if 0:
if isinstance(val, theano.Variable):
try:
kres = klass.KlassMember(val)
except:
except Exception:
kres = klass.KlassVariable(val)
setattr(SymMod, key, kres)
elif callable(val) and getattr(val, '__is_symbolic'):
......
......@@ -341,7 +341,7 @@ def ldflags(libs=True, flags=False, libs_dir=False, include_dir=False):
try:
t0, t1, t2 = t[0:3]
assert t0 == '-'
except:
except Exception:
raise ValueError('invalid token in config.blas.ldflags', t)
if libs_dir and t1 == 'L':
rval.append(t[2:])
......@@ -997,7 +997,7 @@ def _gemm_canonicalize(r, scale, rval, maxclients):
return scale*thing
try:
r.type.broadcastable
except:
except Exception:
return None
if ((r.type.ndim not in (1, 2)) or
......@@ -1083,7 +1083,7 @@ def _factor_canonicalized(lst):
while i < len(lst)-1:
try:
s_i,M_i = lst[i]
except:
except Exception:
i += 1
continue
......@@ -1091,7 +1091,7 @@ def _factor_canonicalized(lst):
while j < len(lst):
try:
s_j,M_j = lst[j]
except:
except Exception:
j += 1
continue
......@@ -1113,7 +1113,7 @@ def _gemm_from_factored_list(lst):
try:
s, M = sM
return True
except:
except Exception:
return False
lst = [(T.cast(sM[0],sM[1].type.dtype), sM[1])
for sM in lst if is_pair(sM)]
......@@ -1135,7 +1135,7 @@ def _gemm_from_factored_list(lst):
if gemm_of_sM_list:
def item_to_var(t):
try: s,M = t
except: return t
except Exception: return t
if s == 1: return M
if s == -1: return -M
return s*M
......
......@@ -89,7 +89,7 @@ def conv2d(input, filters, image_shape=None, filter_shape=None,
if image_shape and filter_shape:
try:
assert image_shape[1]==filter_shape[1]
except:
except Exception:
print 'image ', image_shape, ' filters ', filter_shape
raise
......
......@@ -406,7 +406,7 @@ def local_softmax_with_bias(node):
try:
sm_bias = softmax_with_bias(non_vector_sum, vector_sum)
except:
except Exception:
#if our arguments have the wrong types, then forget about it
return
......@@ -1149,7 +1149,7 @@ def local_advanced_indexing_crossentropy_onehot(node):
if isinstance(node.op, tensor.AdvancedSubtensor):
try:
log, rows, labels = node.inputs
except:
except Exception:
pass
if log and log.owner and log.owner.op == tensor.log:
sm = log.owner.inputs[0]
......@@ -1160,7 +1160,7 @@ def local_advanced_indexing_crossentropy_onehot(node):
if pre_log and isinstance(pre_log.op, tensor.AdvancedSubtensor):
try:
sm, rows, labels = pre_log.inputs
except:
except Exception:
pass
......@@ -1187,7 +1187,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
sm = None
try:
d_sm, sm = node.inputs
except:
except Exception:
return
if (sm is not None) and sm.owner and (sm.owner.op in (softmax, softmax_with_bias)):
......@@ -1245,7 +1245,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
if d_sm.owner and isinstance(d_sm.owner.op, tensor.AdvancedIncSubtensor):
try:
z, incr, rows, labels = d_sm.owner.inputs
except:
except Exception:
return
# Check that z == zeros_like(softmax(x))
# We know z has the right size because z has the same size as d_sm,
......@@ -1313,7 +1313,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
if adv_subtensor is not None:
try:
maybe_sm, maybe_rows, maybe_labels = adv_subtensor.owner.inputs
except:
except Exception:
return
if not (maybe_sm is sm and maybe_rows is rows and maybe_labels is labels):
......@@ -1336,7 +1336,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
# AdvIncSubtensor(zeros, grad_nll, arange(len(y)), y) / softmax
try:
num, denom = d_sm.owner.inputs
except:
except Exception:
return
if denom != sm:
......@@ -1346,7 +1346,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
if num.owner and isinstance(num.owner.op, tensor.AdvancedIncSubtensor):
try:
z, incr, rows, labels = num.owner.inputs
except:
except Exception:
return
# Check z is zeros_like(log(sm))
......@@ -1499,7 +1499,7 @@ class Prepend_scalar_constant_to_each_row(gof.Op):
if output[0].shape!=new_shape:
try:
output[0].resize(new_shape)
except:
except Exception:
output[0]=numpy.empty(new_shape, dtype=mat.dtype)
out=output[0]
......@@ -1544,7 +1544,7 @@ class Prepend_scalar_to_each_row(gof.Op):
if output[0].shape!=new_shape:
try:
output[0].resize(new_shape)
except:
except Exception:
output[0]=numpy.empty(new_shape, dtype=mat.dtype)
out=output[0]
out[:,0].fill(val)
......
......@@ -78,7 +78,7 @@ def scalarconsts_rest(inputs):
v = get_constant_value(i)
consts.append(v)
origconsts.append(i)
except:
except Exception:
nonconsts.append(i)
return consts, origconsts, nonconsts
......@@ -937,7 +937,7 @@ def local_subtensor_make_vector(node):
if x.owner and x.owner.op == make_vector:
try:
idx, = node.op.idx_list
except:
except Exception:
#'how can you have multiple indexes into a shape?'
raise
......@@ -954,7 +954,7 @@ def local_subtensor_make_vector(node):
try:
v = get_constant_value(idx)
return [x.owner.inputs[v]]
except:
except Exception:
pass
else:
# it is a slice of ints and/or Variables
......@@ -964,7 +964,7 @@ def local_subtensor_make_vector(node):
return [make_vector(*x.owner.inputs.__getitem__(idx))]
except TypeError:
pass
except:
except Exception:
_logger.error('failed to index with "%s"' % str(idx))
raise
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论