提交 4566125b authored 作者: Olivier Delalleau's avatar Olivier Delalleau

Merge pull request #226 from nouiz/remove_deprecated

Remove deprecated stuff
......@@ -312,15 +312,9 @@ class Mode(object):
# string as the key
FAST_COMPILE = Mode('py', 'fast_compile')
FAST_RUN = Mode('c|py', 'fast_run')
FAST_RUN_NOGC = Mode("c|py_nogc", 'fast_run')
STABILIZE = Mode("c|py", OPT_STABILIZE)
# The strings 'FAST_RUN_NOGC' and 'STABILIZE' are deprecated,
# the modes in question should be built when needed.
predefined_modes = {'FAST_COMPILE': FAST_COMPILE,
'FAST_RUN': FAST_RUN,
'FAST_RUN_NOGC':FAST_RUN_NOGC,
'STABILIZE': STABILIZE,
}
instanciated_default_mode=None
......@@ -355,17 +349,6 @@ def get_mode(orig_string):
from profilemode import ProfileMode,prof_mode_instance_to_print
ret = eval(string+'(linker=config.linker, optimizer=config.optimizer)')
elif predefined_modes.has_key(string):
# 'FAST_RUN_NOGC' and 'STABILIZE' are deprecated
if string == 'FAST_RUN_NOGC':
warnings.warn("Using the string 'FAST_RUN_NOGC' as a mode is "
"deprecated, you should use the object "
"Mode(linker='c|py_nogc') instead.",
stacklevel=5)
elif string == 'STABILIZE':
warnings.warn("Using the string 'STABILIZE' as a mode is "
"deprecated, you should use the object "
"Mode(optimizer='stabilize') instead.",
stacklevel=5)
ret = predefined_modes[string]
else:
raise Exception("No predefined mode exist for string: %s"%string)
......
......@@ -26,6 +26,11 @@ AddConfigVar('cast_policy',
),
)
# python 2.* define int / int to return int and int // int to return int.
# python 3* define int / int to return float and int // int to return int.
# numpy 1.6.1 behaves as python 2.*. I think we should not change it faster
# than numpy. When we will do the transition, we should create an int_warn
# and floatX_warn option.
AddConfigVar('int_division',
"What to do when one computes x / y, where both x and y are of "
"integer types",
......
差异被折叠。
......@@ -4175,10 +4175,6 @@ class Join(Op):
outputs = [output_maker(bcastable)]
node = Apply(self, inputs, outputs)
if python_any(not x.type.broadcastable[0] for x in orig):
node.tag.shape_zero = None
else:
node.tag.shape_zero = len(orig)
return node
def perform(self, node, axis_and_tensors, out_):
......@@ -4225,17 +4221,6 @@ class Join(Op):
[slice(None)] * (n_dims - axis - 1)] \
for k in xrange(len(sizes_along_axis))]
def vec_length(self, node):
"""Guess the length of a Join Variable"""
assert isinstance(node.owner.op, Join)
if node.ndim != 1:
raise TypeError('argument must be symbolic vector')
if node.owner.tag.shape_zero is None:
raise ValueError("could not determine vector length")
else:
return node.owner.tag.shape_zero
def infer_shape(self, node, ishapes):
# ishapes[0] contains the size of the axis on which we join
# Join op should get at least one input to join
......@@ -4427,11 +4412,6 @@ def get_vector_length(v):
return 1
if isinstance(v, gof.Constant) and v.type.ndim == 1:
return len(v.data)
if v.owner and isinstance(v.owner.op, Join):
try:
return join.vec_length(v)
except ValueError:
pass
if v.owner and isinstance(v.owner.op, theano.tensor.opt.MakeVector):
return len(v.owner.inputs)
if v.owner and isinstance(v.owner.op, Shape):
......
......@@ -259,9 +259,8 @@ def grad(cost, wrt, g_cost=None, consider_constant=None, warn_type=False,
:return: symbolic expression of gradient of `cost` with respect to `wrt`.
If an element of `wrt` is not differentiable with respect
to the output, then a zero variable is returned.
If `wrt` is a list/tuple, longer then 1, a list will be returned.
DEPRECATION: In Theano 0.5, grad will return an object of the same
type as `wrt`: a list/tuple or TensorVariable in all case.
It returns an object of same type as `wrt`: a list/tuple
or TensorVariable in all cases.
This function is a wrapper around the more general function
`theano.gradient.grad_sources_inputs``.
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论