提交 829da692 authored 作者: Razvan Pascanu's avatar Razvan Pascanu

merge; no conflicts

......@@ -36,11 +36,9 @@ AddConfigVar('gcc.cxxflags',
StrParam(""))
# gof imports
from env import Env
import graph
import link
import utils
import op
from compilelock import get_lock, release_lock
......@@ -899,7 +897,7 @@ class CLinker(link.Linker):
isig = (i.signature(), topological_pos, i_idx)
try:
hash(isig)
except: #generic constants don't have a hashable signature
except Exception: #generic constants don't have a hashable signature
error_on_play[0] = True
return None
constant_ids[id(i)] = isig
......@@ -1145,7 +1143,7 @@ def _execute(cthunk, init_tasks, tasks, error_storage):
else:
exc_value = exc_type(_exc_value, task)
exc_value.__thunk_trace__ = trace # this can be used to retrieve the location the Op was declared
except:
except Exception:
print >> sys.stderr, 'ERROR retrieving error_storage', error_storage
raise
......@@ -1350,7 +1348,7 @@ class DualLinker(link.Linker):
thunk2()
for output1, output2 in zip(thunk1.outputs, thunk2.outputs):
self.checker(output1, output2)
except:
except Exception:
link.raise_with_op(node1)
return f, i1, o1
......@@ -887,7 +887,7 @@ class ModuleCache(object):
# the same process.
self.module_hash_to_key_data[module_hash] = key_data
except:
except Exception:
# This may happen e.g. when an Op has no C implementation. In
# any case, we do not want to keep around the temporary work
# directory, as it may cause trouble if we create too many of
......
......@@ -270,9 +270,9 @@ class Unlocker(object):
# not exist), we still want to try and remove the directory.
try:
self.os.remove(self.os.path.join(self.tmp_dir, 'lock'))
except:
except Exception:
pass
try:
self.os.rmdir(self.tmp_dir)
except:
except Exception:
pass
......@@ -226,7 +226,7 @@ class DestroyHandlerHelper2(toolbox.Bookkeeper):
droot, impact, root_destroyer = self.refresh_droot_impact()
try:
return [root_destroyer[droot[r]]]
except:
except Exception:
return []
env.destroyers = get_destroyers_of
......
......@@ -283,7 +283,7 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling =
thunk()
for old_s in old_storage:
old_s[0] = None
except:
except Exception:
raise_with_op(node)
f = streamline_default_f
elif nice_errors:
......@@ -294,7 +294,7 @@ def streamline(env, thunks, order, post_thunk_old_storage = None, no_recycling =
try:
for thunk, node in thunk_node_list:
thunk()
except:
except Exception:
raise_with_op(node)
f = streamline_nice_errors_f
else:
......@@ -554,7 +554,7 @@ class WrapLinker(Linker):
for i, (thunks, node) in enumerate(zip(thunk_groups, order)):
try:
wrapper(i, node, *thunks)
except:
except Exception:
raise_with_op(node)
f.thunk_groups = thunk_groups
......
......@@ -211,7 +211,7 @@ class _metadict:
def __setitem__(self, item, value):
try:
self.d[item] = value
except:
except Exception:
for i, (key,val) in enumerate(self.l):
if key == item:
self.l[i] = (item, value)
......@@ -220,14 +220,14 @@ class _metadict:
def get(self, item, default):
try:
return self.d[item]
except:
except Exception:
for item2, value in self.l:
try:
if item == item2:
return value
if item.equals(item2):
return value
except:
except Exception:
if item is item2:
return value
else:
......@@ -954,7 +954,7 @@ class TopoOptimizer(NavigatorOptimizer):
node = q.popleft()
current_node = node
self.process_node(env, node)
except:
except Exception:
self.detach_updater(env, u)
raise
self.detach_updater(env, u)
......@@ -988,7 +988,7 @@ class OpKeyOptimizer(NavigatorOptimizer):
node = q.pop()
current_node = node
self.process_node(env, node)
except:
except Exception:
self.detach_updater(env, u)
raise
self.detach_updater(env, u)
......@@ -1004,8 +1004,6 @@ class OpKeyOptimizer(NavigatorOptimizer):
from utils import D
class ChangeTracker:
def __init__(self):
self.changed = False
......
......@@ -702,7 +702,7 @@ def pydotprint_variables(vars,
config.device + '.' + format)
try:
import pydot as pd
except:
except ImportError:
print "failed to import pydot. Yous must install pydot for this function to work."
return
g=pd.Dot()
......
......@@ -264,7 +264,7 @@ def nvcc_module_compile_str(
continue
if l[l.index(':'):].startswith(': warning: label'):
continue
except:
except Exception:
pass
print >> sys.stderr, l
print >> sys.stderr, '==============================='
......
import sys, time
from theano.compile.sharedvalue import shared
from theano.compile.pfunc import pfunc
from theano import tensor
import numpy
import theano.sandbox.cuda as tcn
from theano.sandbox.cuda.basic_ops import host_from_gpu, gpu_from_host
def compare_fns(fns, input, reps=10):
times = {}
......@@ -15,7 +13,7 @@ def compare_fns(fns, input, reps=10):
print 'TOPOSORT', implname
for i, n in enumerate(impl.maker.env.toposort()):
print i, n
except:
except Exception:
pass
t0 = time.time()
for i in xrange(reps):
......
......@@ -12,7 +12,7 @@ from theano.gof.opt import Optimizer
try:
import scipy.linalg
except:
except ImportError:
pass # some ops (e.g. Cholesky) won't work
class Hint(Op):
......@@ -342,7 +342,7 @@ class MatrixInverse(Op):
def perform(self, node, (x,), (z, )):
try:
z[0] = numpy.linalg.inv(x).astype(x.dtype)
except:
except Exception:
print 'Failed to invert', node.inputs[0]
raise
def grad(self, inputs, g_outputs):
......@@ -470,7 +470,7 @@ class Det(Op):
def perform(self, node, (x,), (z, )):
try:
z[0] = numpy.asarray(scipy.linalg.det(x), dtype=x.dtype)
except:
except Exception:
print 'Failed to compute determinant', x
raise
def grad(self, inputs, g_outputs):
......
......@@ -40,14 +40,14 @@ class InitGraph(type):
class SymbolicModule(object):
#installs class attributes from build_graph after declaration
__metaclass__ = InitGraph
__metaclass__ = InitGraph
#if we call this function, it will return a new SymbolicModule
def __new__(self, **kwargs):
class SymMod(SymbolicModule):
@staticmethod
def build_graph(*bg_args, **bg_kwargs):
#this one is like self.build_graph,
#this one is like self.build_graph,
#except that the kwargs are automatically inserted
kwcopy = copy.copy(kwargs)
kwcopy.update(bg_kwargs)
......@@ -55,13 +55,13 @@ class SymbolicModule(object):
setattr(SymMod, '__name__', self.__name__ + '_derived')
return SymMod
@staticmethod
def build_graph():
def build_graph():
return {}
def issymbolicmodule(thing):
try:
return issubclass(thing, SymbolicModule)
except:
except Exception:
return False
def issymbolicmethod(thing):
......@@ -87,7 +87,7 @@ def compile_fn(f, path_locals, common_inputs):
#make new inputs for the vars named in args
# this has the effect of creating new storage for these arguments
# The common storage doesn't get messed with.
inputs = [In(path_locals.get(name,name)) for name in args]
inputs = [In(path_locals.get(name,name)) for name in args]
inputs.extend([v for k,v in common_inputs.items() if k not in args])
outputs = f()
#print 'inputs', inputs
......@@ -136,7 +136,7 @@ def compile(smod, initial_values={}):
inputs = {}
for path_locals, val in walker(smod):
if isinstance(val, theano.Variable) and (val.owner is None) and (val not in inputs):
inputs[val] = theano.In(val, value=theano.gof.Container(val, ['a']))
inputs[val] = theano.In(val, value=theano.gof.Container(val, ['a']))
assert len(inputs) == len([v for v in inputs.items()])
......@@ -188,7 +188,7 @@ def compile(smod, initial_values={}):
else :
# check for weird objects that we would like to disallow
# not all objects can be transfered by the clone mechanism below
raise TypeError('reflecting not supported for',
raise TypeError('reflecting not supported for',
(thing, type(thing), getattr(thing, '__name__', None)))
return reflected[thing]
rval = reflect(smod)
......@@ -278,7 +278,7 @@ if 0:
if isinstance(val, theano.Variable):
try:
kres = klass.KlassMember(val)
except:
except Exception:
kres = klass.KlassVariable(val)
setattr(SymMod, key, kres)
elif callable(val) and getattr(val, '__is_symbolic'):
......@@ -320,7 +320,7 @@ if 0:
else:
def params(): return [w, b]
return just_symbolic(locals())
if 0:
print 'logistic_regression', logistic_regression
print 'tanh_layer', tanh_layer
......@@ -349,7 +349,7 @@ if 0:
name = symbolic_module.name if name is None else name
def __init__(self, constructor_fn=None):
""" A constructor fn builds
""" A constructor fn builds
- a graph on top of the variable table, and
- compilable methods.
"""
......@@ -387,7 +387,7 @@ if 0:
return locals()
#at this point there is a neural_net module all built and compiled,
#at this point there is a neural_net module all built and compiled,
# there is also a neural_net.symbolic_module which can be imported.
@SymbolicModule_fromFn
......@@ -404,7 +404,7 @@ if 0:
transform = d[:npc,:].T / v[:npc]
return locals()
#at this point there is a neural_net module all built and compiled,
#at this point there is a neural_net module all built and compiled,
# there is also a neural_net.symbolic_module which can be imported.
......@@ -431,4 +431,3 @@ if 0:
"""stats_collector(nnet_on_pca.x, 'mean')
"""
return mean_collector(x=r)
......@@ -1104,7 +1104,7 @@ class Scan(PureOp):
# states) it is more of a safety check ( all random
# states should be after n_outs_not_shared ...
g_outs[i] = tensor.zeros_like(scan_outputs[i])
except:
except Exception:
g_outs[i] = theano.tensor.constant(
numpy.array(0, theano.config.floatX))
......
......@@ -341,7 +341,7 @@ def ldflags(libs=True, flags=False, libs_dir=False, include_dir=False):
try:
t0, t1, t2 = t[0:3]
assert t0 == '-'
except:
except Exception:
raise ValueError('invalid token in config.blas.ldflags', t)
if libs_dir and t1 == 'L':
rval.append(t[2:])
......@@ -997,7 +997,7 @@ def _gemm_canonicalize(r, scale, rval, maxclients):
return scale*thing
try:
r.type.broadcastable
except:
except Exception:
return None
if ((r.type.ndim not in (1, 2)) or
......@@ -1083,7 +1083,7 @@ def _factor_canonicalized(lst):
while i < len(lst)-1:
try:
s_i,M_i = lst[i]
except:
except Exception:
i += 1
continue
......@@ -1091,7 +1091,7 @@ def _factor_canonicalized(lst):
while j < len(lst):
try:
s_j,M_j = lst[j]
except:
except Exception:
j += 1
continue
......@@ -1113,7 +1113,7 @@ def _gemm_from_factored_list(lst):
try:
s, M = sM
return True
except:
except Exception:
return False
lst = [(T.cast(sM[0],sM[1].type.dtype), sM[1])
for sM in lst if is_pair(sM)]
......@@ -1135,7 +1135,7 @@ def _gemm_from_factored_list(lst):
if gemm_of_sM_list:
def item_to_var(t):
try: s,M = t
except: return t
except Exception: return t
if s == 1: return M
if s == -1: return -M
return s*M
......@@ -1608,4 +1608,3 @@ from opt import register_specialize, register_canonicalize
def local_print_as_we_go_along(node):
if node.op in (T.sub, T.add):
debugprint(node)
......@@ -89,7 +89,7 @@ def conv2d(input, filters, image_shape=None, filter_shape=None,
if image_shape and filter_shape:
try:
assert image_shape[1]==filter_shape[1]
except:
except Exception:
print 'image ', image_shape, ' filters ', filter_shape
raise
......
......@@ -406,7 +406,7 @@ def local_softmax_with_bias(node):
try:
sm_bias = softmax_with_bias(non_vector_sum, vector_sum)
except:
except Exception:
#if our arguments have the wrong types, then forget about it
return
......@@ -1149,7 +1149,7 @@ def local_advanced_indexing_crossentropy_onehot(node):
if isinstance(node.op, tensor.AdvancedSubtensor):
try:
log, rows, labels = node.inputs
except:
except Exception:
pass
if log and log.owner and log.owner.op == tensor.log:
sm = log.owner.inputs[0]
......@@ -1160,7 +1160,7 @@ def local_advanced_indexing_crossentropy_onehot(node):
if pre_log and isinstance(pre_log.op, tensor.AdvancedSubtensor):
try:
sm, rows, labels = pre_log.inputs
except:
except Exception:
pass
......@@ -1187,7 +1187,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
sm = None
try:
d_sm, sm = node.inputs
except:
except Exception:
return
if (sm is not None) and sm.owner and (sm.owner.op in (softmax, softmax_with_bias)):
......@@ -1245,7 +1245,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
if d_sm.owner and isinstance(d_sm.owner.op, tensor.AdvancedIncSubtensor):
try:
z, incr, rows, labels = d_sm.owner.inputs
except:
except Exception:
return
# Check that z == zeros_like(softmax(x))
# We know z has the right size because z has the same size as d_sm,
......@@ -1313,7 +1313,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
if adv_subtensor is not None:
try:
maybe_sm, maybe_rows, maybe_labels = adv_subtensor.owner.inputs
except:
except Exception:
return
if not (maybe_sm is sm and maybe_rows is rows and maybe_labels is labels):
......@@ -1336,7 +1336,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
# AdvIncSubtensor(zeros, grad_nll, arange(len(y)), y) / softmax
try:
num, denom = d_sm.owner.inputs
except:
except Exception:
return
if denom != sm:
......@@ -1346,7 +1346,7 @@ def local_advanced_indexing_crossentropy_onehot_grad(node):
if num.owner and isinstance(num.owner.op, tensor.AdvancedIncSubtensor):
try:
z, incr, rows, labels = num.owner.inputs
except:
except Exception:
return
# Check z is zeros_like(log(sm))
......@@ -1499,7 +1499,7 @@ class Prepend_scalar_constant_to_each_row(gof.Op):
if output[0].shape!=new_shape:
try:
output[0].resize(new_shape)
except:
except Exception:
output[0]=numpy.empty(new_shape, dtype=mat.dtype)
out=output[0]
......@@ -1544,7 +1544,7 @@ class Prepend_scalar_to_each_row(gof.Op):
if output[0].shape!=new_shape:
try:
output[0].resize(new_shape)
except:
except Exception:
output[0]=numpy.empty(new_shape, dtype=mat.dtype)
out=output[0]
out[:,0].fill(val)
......
......@@ -78,7 +78,7 @@ def scalarconsts_rest(inputs):
v = get_constant_value(i)
consts.append(v)
origconsts.append(i)
except:
except Exception:
nonconsts.append(i)
return consts, origconsts, nonconsts
......@@ -937,7 +937,7 @@ def local_subtensor_make_vector(node):
if x.owner and x.owner.op == make_vector:
try:
idx, = node.op.idx_list
except:
except Exception:
#'how can you have multiple indexes into a shape?'
raise
......@@ -954,7 +954,7 @@ def local_subtensor_make_vector(node):
try:
v = get_constant_value(idx)
return [x.owner.inputs[v]]
except:
except Exception:
pass
else:
# it is a slice of ints and/or Variables
......@@ -964,7 +964,7 @@ def local_subtensor_make_vector(node):
return [make_vector(*x.owner.inputs.__getitem__(idx))]
except TypeError:
pass
except:
except Exception:
_logger.error('failed to index with "%s"' % str(idx))
raise
......@@ -1291,7 +1291,7 @@ def local_useless_subtensor(node):
# tracker keep. Subtensor accept any scalar int{8,16,32,64}
# as index type.
assert str(length_pos.type.dtype) == "int64"
assert str(length_pos_shape_i.type.dtype) in ["int8", "int16",
assert str(length_pos_shape_i.type.dtype) in ["int8", "int16",
"int32", "int64"]
# We already know that start and step are not variables
# and so they don't appear in the input of the node
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论