提交 459d004b authored 作者: carriepl's avatar carriepl

Merge pull request #2750 from SinaHonari/issue2500

Tracking Ops with grad methods returning None
......@@ -385,8 +385,8 @@ class Shape_i(gof.Op):
return [()]
def grad(self, inp, grads):
return [None]
return [theano.gradient.grad_not_implemented(op=self, x_pos=0, x=inp[0],
comment="No gradient for the shape of a matrix is implemented.")]
def shape_i(var, i, fgraph=None):
"""Equivalent of var.shape[i], but apply if possible the shape
......
......@@ -341,12 +341,6 @@ class ScanOp(PureOp):
rval.lazy = False
return rval
def grad(self, args, g_outs):
pass
def R_op(self, inputs, eval_points):
pass
@theano.compile.profilemode.register_profiler_printer
def profile_printer(fct_name, compile_time, fct_call_time, fct_call,
......
......@@ -1155,8 +1155,22 @@ class InRange(LogicalComparison):
return ("%(z)s = %(x)s %(cmp1)s %(low)s &&"
" %(x)s %(cmp2)s %(hi)s;" % locals())
def get_grad(self, elem):
if elem.type in complex_types:
msg = "No gradient implemented for complex numbers in\
class scalar.basic.InRange"
raise NotImplementedError(msg)
elif elem.type in discrete_types:
return elem.zeros_like().astype(theano.config.floatX)
else:
return elem.zeros_like()
def grad(self, (x, low, hi), (gz, )):
return None, None, None
grads = []
for elem in [x, low, hi]:
grads.append(get_grad(elem))
return grads
inopenrange = InRange(True, True)
inclosedrange = InRange(False, False)
......@@ -2928,7 +2942,8 @@ class Imag(UnaryScalarOp):
elif x.type in float_types:
return [second(x, 0)]
else:
return [None]
return [x.zeros_like(dtype=theano.config.floatX)]
imag = Imag(real_out, name='imag')
......@@ -2959,7 +2974,7 @@ class Angle(UnaryScalarOp):
elif c in float_types:
return [cast(second(x, 0), x.type.dtype)]
else:
return [None]
return [c.zeros_like(dtype=theano.config.floatX)]
angle = Angle(specific_out(float64), name='angle')
......
......@@ -832,12 +832,24 @@ class Cast(gof.op.Op):
out[0] = x.astype(self.out_type)
def grad(self, inputs, outputs_gradients):
if inputs[0].dtype in tensor.continuous_dtypes:
gz = outputs_gradients[0]
return [Cast(inputs[0].dtype)(gz)]
else:
return [None]
gz = outputs_gradients[0]
if gz.dtype in complex_dtypes:
raise NotImplementedError("grad not implemented for complex types")
if inputs[0].dtype in complex_dtypes:
raise NotImplementedError("grad not implemented for complex types")
if gz.dtype in discrete_dtypes:
if inputs[0].dtype in discrete_dtypes:
return [inputs[0].zeros_like(dtype=theano.config.floatX)]
else:
return [inputs[0].zeros_like()]
else:
if inputs[0].dtype in discrete_dtypes:
return [gz]
else:
return [Cast(inputs[0].dtype)(gz)]
def infer_shape(self, node, ins_shapes):
return ins_shapes
......@@ -1647,8 +1659,7 @@ class SpSum(gof.op.Op):
def grad(self, (x,), (gz,)):
if x.dtype not in continuous_dtypes:
return [None]
return [x.zeros_like(dtype=theano.config.floatX)]
if self.structured:
if self.axis is None:
r = gz * theano.sparse.sp_ones_like(x)
......
import numpy
import theano
import scipy.sparse
from theano import gof, tensor
......@@ -79,7 +80,10 @@ class Poisson(gof.op.Op):
out[0].eliminate_zeros()
def grad(self, inputs, outputs_gradients):
return [None]
comment = "No gradient exists for class Poisson in\
theano/sparse/sandbox/sp2.py"
return [theano.gradient.grad_undefined(op=self, x_pos=0, x=inputs[0],
comment=comment)]
def infer_shape(self, node, ins_shapes):
return ins_shapes
......@@ -131,8 +135,19 @@ class Binomial(gof.op.Op):
csx_matrix = getattr(scipy.sparse, self.format + '_matrix')
out[0] = csx_matrix(binomial, dtype=self.dtype)
def connection_pattern(self, node):
return [[True], [True], [False]]
def grad(self, (n, p, shape, ), (gz,)):
return None, None, None
comment_n = "No gradient exists for the number of samples in class\
Binomial of theano/sparse/sandbox/sp2.py"
comment_p = "No gradient exists for the prob of success in class\
Binomial of theano/sparse/sandbox/sp2.py"
return [theano.gradient.grad_undefined(op=self, x_pos=0, x=n,
comment=comment_n),
theano.gradient.grad_undefined(op=self, x_pos=1, x=p,
comment=comment_p),
theano.gradient.disconnected_type()]
def infer_shape(self, node, ins_shapes):
return [(node.inputs[2][0], node.inputs[2][1])]
......@@ -202,7 +217,14 @@ class Multinomial(gof.op.Op):
out[0].data[k:l] = numpy.random.multinomial(n[i], p.data[k:l])
def grad(self, inputs, outputs_gradients):
return [None, None]
comment_n = "No gradient exists for the number of samples in class\
Multinomial of theano/sparse/sandbox/sp2.py"
comment_p = "No gradient exists for the prob of success in class\
Multinomial of theano/sparse/sandbox/sp2.py"
return [theano.gradient.grad_undefined(op=self, x_pos=0, x=inputs[0],
comment=comment_n),
theano.gradient.grad_undefined(op=self, x_pos=1, x=inputs[1],
comment=comment_p)]
def infer_shape(self, node, ins_shapes):
return [ins_shapes[1]]
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论