提交 829da692 authored 作者: Razvan Pascanu's avatar Razvan Pascanu

merge; no conflicts

...@@ -36,11 +36,9 @@ AddConfigVar('gcc.cxxflags', ...@@ -36,11 +36,9 @@ AddConfigVar('gcc.cxxflags',
StrParam("")) StrParam(""))
# gof imports # gof imports
from env import Env
import graph import graph
import link import link
import utils import utils
import op
from compilelock import get_lock, release_lock from compilelock import get_lock, release_lock
...@@ -899,7 +897,7 @@ class CLinker(link.Linker): ...@@ -899,7 +897,7 @@ class CLinker(link.Linker):
isig = (i.signature(), topological_pos, i_idx) isig = (i.signature(), topological_pos, i_idx)
try: try:
hash(isig) hash(isig)
except: #generic constants don't have a hashable signature except Exception: #generic constants don't have a hashable signature
error_on_play[0] = True error_on_play[0] = True
return None return None
constant_ids[id(i)] = isig constant_ids[id(i)] = isig
...@@ -1145,7 +1143,7 @@ def _execute(cthunk, init_tasks, tasks, error_storage): ...@@ -1145,7 +1143,7 @@ def _execute(cthunk, init_tasks, tasks, error_storage):
else: else:
exc_value = exc_type(_exc_value, task) exc_value = exc_type(_exc_value, task)
exc_value.__thunk_trace__ = trace # this can be used to retrieve the location the Op was declared exc_value.__thunk_trace__ = trace # this can be used to retrieve the location the Op was declared
except: except Exception:
print >> sys.stderr, 'ERROR retrieving error_storage', error_storage print >> sys.stderr, 'ERROR retrieving error_storage', error_storage
raise raise
...@@ -1350,7 +1348,7 @@ class DualLinker(link.Linker): ...@@ -1350,7 +1348,7 @@ class DualLinker(link.Linker):
thunk2() thunk2()
for output1, output2 in zip(thunk1.outputs, thunk2.outputs): for output1, output2 in zip(thunk1.outputs, thunk2.outputs):
self.checker(output1, output2) self.checker(output1, output2)
except: except Exception:
link.raise_with_op(node1) link.raise_with_op(node1)
return f, i1, o1 return f, i1, o1
...@@ -887,7 +887,7 @@ class ModuleCache(object): ...@@ -887,7 +887,7 @@ class ModuleCache(object):
# the same process. # the same process.
self.module_hash_to_key_data[module_hash] = key_data self.module_hash_to_key_data[module_hash] = key_data
except: except Exception:
# This may happen e.g. when an Op has no C implementation. In # This may happen e.g. when an Op has no C implementation. In
# any case, we do not want to keep around the temporary work # any case, we do not want to keep around the temporary work
# directory, as it may cause trouble if we create too many of # directory, as it may cause trouble if we create too many of
......
...@@ -270,9 +270,9 @@ class Unlocker(object): ...@@ -270,9 +270,9 @@ class Unlocker(object):
# not exist), we still want to try and remove the directory. # not exist), we still want to try and remove the directory.
try: try:
self.os.remove(self.os.path.join(self.tmp_dir, 'lock')) self.os.remove(self.os.path.join(self.tmp_dir, 'lock'))
except: except Exception:
pass pass
try: try:
self.os.rmdir(self.tmp_dir) self.os.rmdir(self.tmp_dir)
except: except Exception:
pass pass
...@@ -226,7 +226,7 @@ class DestroyHandlerHelper2(toolbox.Bookkeeper): ...@@ -226,7 +226,7 @@ class DestroyHandlerHelper2(toolbox.Bookkeeper):
droot, impact, root_destroyer = self.refresh_droot_impact() droot, impact, root_destroyer = self.refresh_droot_impact()
try: try:
return [root_destroyer[droot[r]]] return [root_destroyer[droot[r]]]
except: except Exception:
return [] return []
env.destroyers = get_destroyers_of env.destroyers = get_destroyers_of
......
...@@ -283,7 +283,7 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling = ...@@ -283,7 +283,7 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling =
thunk() thunk()
for old_s in old_storage: for old_s in old_storage:
old_s[0] = None old_s[0] = None
except: except Exception:
raise_with_op(node) raise_with_op(node)
f = streamline_default_f f = streamline_default_f
elif nice_errors: elif nice_errors:
...@@ -294,7 +294,7 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling = ...@@ -294,7 +294,7 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling =
try: try:
for thunk, node in thunk_node_list: for thunk, node in thunk_node_list:
thunk() thunk()
except: except Exception:
raise_with_op(node) raise_with_op(node)
f = streamline_nice_errors_f f = streamline_nice_errors_f
else: else:
...@@ -554,7 +554,7 @@ class WrapLinker(Linker): ...@@ -554,7 +554,7 @@ class WrapLinker(Linker):
for i, (thunks, node) in enumerate(zip(thunk_groups, order)): for i, (thunks, node) in enumerate(zip(thunk_groups, order)):
try: try:
wrapper(i, node, *thunks) wrapper(i, node, *thunks)
except: except Exception:
raise_with_op(node) raise_with_op(node)
f.thunk_groups = thunk_groups f.thunk_groups = thunk_groups
......
...@@ -211,7 +211,7 @@ class _metadict: ...@@ -211,7 +211,7 @@ class _metadict:
def __setitem__(self, item, value): def __setitem__(self, item, value):
try: try:
self.d[item] = value self.d[item] = value
except: except Exception:
for i, (key,val) in enumerate(self.l): for i, (key,val) in enumerate(self.l):
if key == item: if key == item:
self.l[i] = (item, value) self.l[i] = (item, value)
...@@ -220,14 +220,14 @@ class _metadict: ...@@ -220,14 +220,14 @@ class _metadict:
def get(self, item, default): def get(self, item, default):
try: try:
return self.d[item] return self.d[item]
except: except Exception:
for item2, value in self.l: for item2, value in self.l:
try: try:
if item == item2: if item == item2:
return value return value
if item.equals(item2): if item.equals(item2):
return value return value
except: except Exception:
if item is item2: if item is item2:
return value return value
else: else:
...@@ -954,7 +954,7 @@ class TopoOptimizer(NavigatorOptimizer): ...@@ -954,7 +954,7 @@ class TopoOptimizer(NavigatorOptimizer):
node = q.popleft() node = q.popleft()
current_node = node current_node = node
self.process_node(env, node) self.process_node(env, node)
except: except Exception:
self.detach_updater(env, u) self.detach_updater(env, u)
raise raise
self.detach_updater(env, u) self.detach_updater(env, u)
...@@ -988,7 +988,7 @@ class OpKeyOptimizer(NavigatorOptimizer): ...@@ -988,7 +988,7 @@ class OpKeyOptimizer(NavigatorOptimizer):
node = q.pop() node = q.pop()
current_node = node current_node = node
self.process_node(env, node) self.process_node(env, node)
except: except Exception:
self.detach_updater(env, u) self.detach_updater(env, u)
raise raise
self.detach_updater(env, u) self.detach_updater(env, u)
...@@ -1004,8 +1004,6 @@ class OpKeyOptimizer(NavigatorOptimizer): ...@@ -1004,8 +1004,6 @@ class OpKeyOptimizer(NavigatorOptimizer):
from utils import D
class ChangeTracker: class ChangeTracker:
def __init__(self): def __init__(self):
self.changed = False self.changed = False
......
...@@ -702,7 +702,7 @@ def pydotprint_variables(vars, ...@@ -702,7 +702,7 @@ def pydotprint_variables(vars,
config.device + '.' + format) config.device + '.' + format)
try: try:
import pydot as pd import pydot as pd
except: except ImportError:
print "failed to import pydot. Yous must install pydot for this function to work." print "failed to import pydot. Yous must install pydot for this function to work."
return return
g=pd.Dot() g=pd.Dot()
......
...@@ -264,7 +264,7 @@ def nvcc_module_compile_str( ...@@ -264,7 +264,7 @@ def nvcc_module_compile_str(
continue continue
if l[l.index(':'):].startswith(': warning: label'): if l[l.index(':'):].startswith(': warning: label'):
continue continue
except: except Exception:
pass pass
print >> sys.stderr, l print >> sys.stderr, l
print >> sys.stderr, '===============================' print >> sys.stderr, '==============================='
......
import sys, time import sys, time
from theano.compile.sharedvalue import shared
from theano.compile.pfunc import pfunc from theano.compile.pfunc import pfunc
from theano import tensor from theano import tensor
import numpy import numpy
import theano.sandbox.cuda as tcn import theano.sandbox.cuda as tcn
from theano.sandbox.cuda.basic_ops import host_from_gpu, gpu_from_host
def compare_fns(fns, input, reps=10): def compare_fns(fns, input, reps=10):
times = {} times = {}
...@@ -15,7 +13,7 @@ def compare_fns(fns, input, reps=10): ...@@ -15,7 +13,7 @@ def compare_fns(fns, input, reps=10):
print 'TOPOSORT', implname print 'TOPOSORT', implname
for i, n in enumerate(impl.maker.env.toposort()): for i, n in enumerate(impl.maker.env.toposort()):
print i, n print i, n
except: except Exception:
pass pass
t0 = time.time() t0 = time.time()
for i in xrange(reps): for i in xrange(reps):
......
...@@ -12,7 +12,7 @@ from theano.gof.opt import Optimizer ...@@ -12,7 +12,7 @@ from theano.gof.opt import Optimizer
try: try:
import scipy.linalg import scipy.linalg
except: except ImportError:
pass # some ops (e.g. Cholesky) won't work pass # some ops (e.g. Cholesky) won't work
class Hint(Op): class Hint(Op):
...@@ -342,7 +342,7 @@ class MatrixInverse(Op): ...@@ -342,7 +342,7 @@ class MatrixInverse(Op):
def perform(self, node, (x,), (z, )): def perform(self, node, (x,), (z, )):
try: try:
z[0] = numpy.linalg.inv(x).astype(x.dtype) z[0] = numpy.linalg.inv(x).astype(x.dtype)
except: except Exception:
print 'Failed to invert', node.inputs[0] print 'Failed to invert', node.inputs[0]
raise raise
def grad(self, inputs, g_outputs): def grad(self, inputs, g_outputs):
...@@ -470,7 +470,7 @@ class Det(Op): ...@@ -470,7 +470,7 @@ class Det(Op):
def perform(self, node, (x,), (z, )): def perform(self, node, (x,), (z, )):
try: try:
z[0] = numpy.asarray(scipy.linalg.det(x), dtype=x.dtype) z[0] = numpy.asarray(scipy.linalg.det(x), dtype=x.dtype)
except: except Exception:
print 'Failed to compute determinant', x print 'Failed to compute determinant', x
raise raise
def grad(self, inputs, g_outputs): def grad(self, inputs, g_outputs):
......
...@@ -61,7 +61,7 @@ class SymbolicModule(object): ...@@ -61,7 +61,7 @@ class SymbolicModule(object):
def issymbolicmodule(thing): def issymbolicmodule(thing):
try: try:
return issubclass(thing, SymbolicModule) return issubclass(thing, SymbolicModule)
except: except Exception:
return False return False
def issymbolicmethod(thing): def issymbolicmethod(thing):
...@@ -278,7 +278,7 @@ if 0: ...@@ -278,7 +278,7 @@ if 0:
if isinstance(val, theano.Variable): if isinstance(val, theano.Variable):
try: try:
kres = klass.KlassMember(val) kres = klass.KlassMember(val)
except: except Exception:
kres = klass.KlassVariable(val) kres = klass.KlassVariable(val)
setattr(SymMod, key, kres) setattr(SymMod, key, kres)
elif callable(val) and getattr(val, '__is_symbolic'): elif callable(val) and getattr(val, '__is_symbolic'):
...@@ -431,4 +431,3 @@ if 0: ...@@ -431,4 +431,3 @@ if 0:
"""stats_collector(nnet_on_pca.x, 'mean') """stats_collector(nnet_on_pca.x, 'mean')
""" """
return mean_collector(x=r) return mean_collector(x=r)
...@@ -1104,7 +1104,7 @@ class Scan(PureOp): ...@@ -1104,7 +1104,7 @@ class Scan(PureOp):
# states) it is more of a safety check ( all random # states) it is more of a safety check ( all random
# states should be after n_outs_not_shared ... # states should be after n_outs_not_shared ...
g_outs[i] = tensor.zeros_like(scan_outputs[i]) g_outs[i] = tensor.zeros_like(scan_outputs[i])
except: except Exception:
g_outs[i] = theano.tensor.constant( g_outs[i] = theano.tensor.constant(
numpy.array(0, theano.config.floatX)) numpy.array(0, theano.config.floatX))
......
...@@ -341,7 +341,7 @@ def ldflags(libs=True, flags=False, libs_dir=False, include_dir=False): ...@@ -341,7 +341,7 @@ def ldflags(libs=True, flags=False, libs_dir=False, include_dir=False):
try: try:
t0, t1, t2 = t[0:3] t0, t1, t2 = t[0:3]
assert t0 == '-' assert t0 == '-'
except: except Exception:
raise ValueError('invalid token in config.blas.ldflags', t) raise ValueError('invalid token in config.blas.ldflags', t)
if libs_dir and t1 == 'L': if libs_dir and t1 == 'L':
rval.append(t[2:]) rval.append(t[2:])
...@@ -997,7 +997,7 @@ def _gemm_canonicalize(r, scale, rval, maxclients): ...@@ -997,7 +997,7 @@ def _gemm_canonicalize(r, scale, rval, maxclients):
return scale*thing return scale*thing
try: try:
r.type.broadcastable r.type.broadcastable
except: except Exception:
return None return None
if ((r.type.ndim not in (1, 2)) or if ((r.type.ndim not in (1, 2)) or
...@@ -1083,7 +1083,7 @@ def _factor_canonicalized(lst): ...@@ -1083,7 +1083,7 @@ def _factor_canonicalized(lst):
while i < len(lst)-1: while i < len(lst)-1:
try: try:
s_i,M_i = lst[i] s_i,M_i = lst[i]
except: except Exception:
i += 1 i += 1
continue continue
...@@ -1091,7 +1091,7 @@ def _factor_canonicalized(lst): ...@@ -1091,7 +1091,7 @@ def _factor_canonicalized(lst):
while j < len(lst): while j < len(lst):
try: try:
s_j,M_j = lst[j] s_j,M_j = lst[j]
except: except Exception:
j += 1 j += 1
continue continue
...@@ -1113,7 +1113,7 @@ def _gemm_from_factored_list(lst): ...@@ -1113,7 +1113,7 @@ def _gemm_from_factored_list(lst):
try: try:
s, M = sM s, M = sM
return True return True
except: except Exception:
return False return False
lst = [(T.cast(sM[0],sM[1].type.dtype), sM[1]) lst = [(T.cast(sM[0],sM[1].type.dtype), sM[1])
for sM in lst if is_pair(sM)] for sM in lst if is_pair(sM)]
...@@ -1135,7 +1135,7 @@ def _gemm_from_factored_list(lst): ...@@ -1135,7 +1135,7 @@ def _gemm_from_factored_list(lst):
if gemm_of_sM_list: if gemm_of_sM_list:
def item_to_var(t): def item_to_var(t):
try: s,M = t try: s,M = t
except: return t except Exception: return t
if s == 1: return M if s == 1: return M
if s == -1: return -M if s == -1: return -M
return s*M return s*M
...@@ -1608,4 +1608,3 @@ from opt import register_specialize, register_canonicalize ...@@ -1608,4 +1608,3 @@ from opt import register_specialize, register_canonicalize
def local_print_as_we_go_along(node): def local_print_as_we_go_along(node):
if node.op in (T.sub, T.add): if node.op in (T.sub, T.add):
debugprint(node) debugprint(node)
...@@ -89,7 +89,7 @@ def conv2d(input, filters, image_shape=None, filter_shape=None, ...@@ -89,7 +89,7 @@ def conv2d(input, filters, image_shape=None, filter_shape=None,
if image_shape and filter_shape: if image_shape and filter_shape:
try: try:
assert image_shape[1]==filter_shape[1] assert image_shape[1]==filter_shape[1]
except: except Exception:
print 'image ', image_shape, ' filters ', filter_shape print 'image ', image_shape, ' filters ', filter_shape
raise raise
......
...@@ -406,7 +406,7 @@ def local_softmax_with_bias(node): ...@@ -406,7 +406,7 @@ def local_softmax_with_bias(node):
try: try:
sm_bias = softmax_with_bias(non_vector_sum, vector_sum) sm_bias = softmax_with_bias(non_vector_sum, vector_sum)
except: except Exception:
#if our arguments have the wrong types, then forget about it #if our arguments have the wrong types, then forget about it
return return
...@@ -1149,7 +1149,7 @@ def local_advanced_indexing_crossentropy_onehot(node): ...@@ -1149,7 +1149,7 @@ def local_advanced_indexing_crossentropy_onehot(node):
if isinstance(node.op, tensor.AdvancedSubtensor): if isinstance(node.op, tensor.AdvancedSubtensor):
try: try:
log, rows, labels = node.inputs log, rows, labels = node.inputs
except: except Exception:
pass pass
if log and log.owner and log.owner.op == tensor.log: if log and log.owner and log.owner.op == tensor.log:
sm = log.owner.inputs[0] sm = log.owner.inputs[0]
...@@ -1160,7 +1160,7 @@ def local_advanced_indexing_crossentropy_onehot(node): ...@@ -1160,7 +1160,7 @@ def local_advanced_indexing_crossentropy_onehot(node):
if pre_log and isinstance(pre_log.op, tensor.AdvancedSubtensor): if pre_log and isinstance(pre_log.op, tensor.AdvancedSubtensor):
try: try:
sm, rows, labels = pre_log.inputs sm, rows, labels = pre_log.inputs
except: except Exception:
pass pass
...@@ -1187,7 +1187,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node): ...@@ -1187,7 +1187,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
sm = None sm = None
try: try:
d_sm, sm = node.inputs d_sm, sm = node.inputs
except: except Exception:
return return
if (sm is not None) and sm.owner and (sm.owner.op in (softmax, softmax_with_bias)): if (sm is not None) and sm.owner and (sm.owner.op in (softmax, softmax_with_bias)):
...@@ -1245,7 +1245,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node): ...@@ -1245,7 +1245,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
if d_sm.owner and isinstance(d_sm.owner.op, tensor.AdvancedIncSubtensor): if d_sm.owner and isinstance(d_sm.owner.op, tensor.AdvancedIncSubtensor):
try: try:
z, incr, rows, labels = d_sm.owner.inputs z, incr, rows, labels = d_sm.owner.inputs
except: except Exception:
return return
# Check that z == zeros_like(softmax(x)) # Check that z == zeros_like(softmax(x))
# We know z has the right size because z has the same size as d_sm, # We know z has the right size because z has the same size as d_sm,
...@@ -1313,7 +1313,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node): ...@@ -1313,7 +1313,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
if adv_subtensor is not None: if adv_subtensor is not None:
try: try:
maybe_sm, maybe_rows, maybe_labels = adv_subtensor.owner.inputs maybe_sm, maybe_rows, maybe_labels = adv_subtensor.owner.inputs
except: except Exception:
return return
if not (maybe_sm is sm and maybe_rows is rows and maybe_labels is labels): if not (maybe_sm is sm and maybe_rows is rows and maybe_labels is labels):
...@@ -1336,7 +1336,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node): ...@@ -1336,7 +1336,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
# AdvIncSubtensor(zeros, grad_nll, arange(len(y)), y) / softmax # AdvIncSubtensor(zeros, grad_nll, arange(len(y)), y) / softmax
try: try:
num, denom = d_sm.owner.inputs num, denom = d_sm.owner.inputs
except: except Exception:
return return
if denom != sm: if denom != sm:
...@@ -1346,7 +1346,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node): ...@@ -1346,7 +1346,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
if num.owner and isinstance(num.owner.op, tensor.AdvancedIncSubtensor): if num.owner and isinstance(num.owner.op, tensor.AdvancedIncSubtensor):
try: try:
z, incr, rows, labels = num.owner.inputs z, incr, rows, labels = num.owner.inputs
except: except Exception:
return return
# Check z is zeros_like(log(sm)) # Check z is zeros_like(log(sm))
...@@ -1499,7 +1499,7 @@ class Prepend_scalar_constant_to_each_row(gof.Op): ...@@ -1499,7 +1499,7 @@ class Prepend_scalar_constant_to_each_row(gof.Op):
if output[0].shape!=new_shape: if output[0].shape!=new_shape:
try: try:
output[0].resize(new_shape) output[0].resize(new_shape)
except: except Exception:
output[0]=numpy.empty(new_shape, dtype=mat.dtype) output[0]=numpy.empty(new_shape, dtype=mat.dtype)
out=output[0] out=output[0]
...@@ -1544,7 +1544,7 @@ class Prepend_scalar_to_each_row(gof.Op): ...@@ -1544,7 +1544,7 @@ class Prepend_scalar_to_each_row(gof.Op):
if output[0].shape!=new_shape: if output[0].shape!=new_shape:
try: try:
output[0].resize(new_shape) output[0].resize(new_shape)
except: except Exception:
output[0]=numpy.empty(new_shape, dtype=mat.dtype) output[0]=numpy.empty(new_shape, dtype=mat.dtype)
out=output[0] out=output[0]
out[:,0].fill(val) out[:,0].fill(val)
......
...@@ -78,7 +78,7 @@ def scalarconsts_rest(inputs): ...@@ -78,7 +78,7 @@ def scalarconsts_rest(inputs):
v = get_constant_value(i) v = get_constant_value(i)
consts.append(v) consts.append(v)
origconsts.append(i) origconsts.append(i)
except: except Exception:
nonconsts.append(i) nonconsts.append(i)
return consts, origconsts, nonconsts return consts, origconsts, nonconsts
...@@ -937,7 +937,7 @@ def local_subtensor_make_vector(node): ...@@ -937,7 +937,7 @@ def local_subtensor_make_vector(node):
if x.owner and x.owner.op == make_vector: if x.owner and x.owner.op == make_vector:
try: try:
idx, = node.op.idx_list idx, = node.op.idx_list
except: except Exception:
#'how can you have multiple indexes into a shape?' #'how can you have multiple indexes into a shape?'
raise raise
...@@ -954,7 +954,7 @@ def local_subtensor_make_vector(node): ...@@ -954,7 +954,7 @@ def local_subtensor_make_vector(node):
try: try:
v = get_constant_value(idx) v = get_constant_value(idx)
return [x.owner.inputs[v]] return [x.owner.inputs[v]]
except: except Exception:
pass pass
else: else:
# it is a slice of ints and/or Variables # it is a slice of ints and/or Variables
...@@ -964,7 +964,7 @@ def local_subtensor_make_vector(node): ...@@ -964,7 +964,7 @@ def local_subtensor_make_vector(node):
return [make_vector(*x.owner.inputs.__getitem__(idx))] return [make_vector(*x.owner.inputs.__getitem__(idx))]
except TypeError: except TypeError:
pass pass
except: except Exception:
_logger.error('failed to index with "%s"' % str(idx)) _logger.error('failed to index with "%s"' % str(idx))
raise raise
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论