提交 a206d3f0 authored 作者: Frédéric Bastien's avatar Frédéric Bastien 提交者: GitHub

Merge pull request #5567 from matt-graham/modified_bessel_functions

Adding scipy.special modified Bessel function ops
from __future__ import absolute_import, print_function, division
# definition theano.scalar op that have their python implementation taked from scipy
# as scipy is not always available, we treat them separatly
# Definitions of theano.scalar ops that have their python implementation taken
# from SciPy. As SciPy is not always available, we treat them separately.
import numpy
import theano
from theano.gradient import grad_not_implemented
from theano.scalar.basic import (UnaryScalarOp, BinaryScalarOp,
exp, upgrade_to_float,
upgrade_to_float64,
......@@ -373,9 +375,33 @@ class Chi2SF(BinaryScalarOp):
chi2sf = Chi2SF(upgrade_to_float64, name='chi2sf')
class Jv(BinaryScalarOp):
"""
Bessel function of the first kind of order v (real).
"""
@staticmethod
def st_impl(v, x):
return scipy.special.jv(v, x)
def impl(self, v, x):
if imported_scipy_special:
return self.st_impl(v, x)
else:
super(Jv, self).impl(v, x)
def grad(self, inputs, grads):
v, x = inputs
gz, = grads
return [grad_not_implemented(self, 0, v),
gz * (jv(v - 1, x) - jv(v + 1, x)) / 2.]
jv = Jv(upgrade_to_float, name='jv')
class J1(UnaryScalarOp):
"""
Bessel function of the 1'th kind
Bessel function of the first kind of order 1.
"""
@staticmethod
......@@ -388,8 +414,10 @@ class J1(UnaryScalarOp):
else:
super(J1, self).impl(x)
def grad(self, inp, grads):
raise NotImplementedError()
def grad(self, inputs, grads):
x, = inputs
gz, = grads
return [gz * (j0(x) - jv(2, x)) / 2.]
def c_code(self, node, name, inp, out, sub):
x, = inp
......@@ -398,12 +426,13 @@ class J1(UnaryScalarOp):
return """%(z)s =
j1(%(x)s);""" % locals()
raise NotImplementedError('only floating point is implemented')
j1 = J1(upgrade_to_float, name='j1')
class J0(UnaryScalarOp):
"""
Bessel function of the 0'th kind
Bessel function of the first kind of order 0.
"""
@staticmethod
......@@ -428,4 +457,75 @@ class J0(UnaryScalarOp):
return """%(z)s =
j0(%(x)s);""" % locals()
raise NotImplementedError('only floating point is implemented')
j0 = J0(upgrade_to_float, name='j0')
class Iv(BinaryScalarOp):
"""
Modified Bessel function of the first kind of order v (real).
"""
@staticmethod
def st_impl(v, x):
return scipy.special.iv(v, x)
def impl(self, v, x):
if imported_scipy_special:
return self.st_impl(v, x)
else:
super(Iv, self).impl(v, x)
def grad(self, inputs, grads):
v, x = inputs
gz, = grads
return [grad_not_implemented(self, 0, v),
gz * (iv(v - 1, x) + iv(v + 1, x)) / 2.]
iv = Iv(upgrade_to_float, name='iv')
class I1(UnaryScalarOp):
"""
Modified Bessel function of the first kind of order 1.
"""
@staticmethod
def st_impl(x):
return scipy.special.i1(x)
def impl(self, x):
if imported_scipy_special:
return self.st_impl(x)
else:
super(I1, self).impl(x)
def grad(self, inputs, grads):
x, = inputs
gz, = grads
return [gz * (i0(x) + iv(2, x)) / 2.]
i1 = I1(upgrade_to_float, name='i1')
class I0(UnaryScalarOp):
"""
Modified Bessel function of the first kind of order 0.
"""
@staticmethod
def st_impl(x):
return scipy.special.i0(x)
def impl(self, x):
if imported_scipy_special:
return self.st_impl(x)
else:
super(I0, self).impl(x)
def grad(self, inp, grads):
x, = inp
gz, = grads
return [gz * i1(x)]
i0 = I0(upgrade_to_float, name='i0')
......@@ -2300,13 +2300,33 @@ def chi2sf(x, k):
@_scal_elemwise
def j0(a):
"""Bessel function of the 0'th kind"""
def j0(x):
"""Bessel function of the first kind of order 0."""
@_scal_elemwise
def j1(a):
"""Bessel function of the 1'th kind"""
def j1(x):
"""Bessel function of the first kind of order 1."""
@_scal_elemwise
def jv(v, x):
"""Bessel function of the first kind of order v (real)."""
@_scal_elemwise
def i0(x):
"""Modified Bessel function of the first kind of order 0."""
@_scal_elemwise
def i1(x):
"""Modified Bessel function of the first kind of order 1."""
@_scal_elemwise
def iv(v, x):
"""Modified Bessel function of the first kind of order v (real)."""
@_scal_elemwise
......
......@@ -281,13 +281,33 @@ def chi2sf_inplace(x, k):
@_scal_inplace
def j0_inplace(a):
"""Bessel function of the 0'th kind"""
def j0_inplace(x):
"""Bessel function of the first kind of order 0."""
@_scal_inplace
def j1_inplace(a):
"""Bessel function of the 0'th kind"""
def j1_inplace(x):
"""Bessel function of the first kind of order 1."""
@_scal_inplace
def jv_inplace(v, x):
"""Bessel function of the first kind of order v (real)."""
@_scal_inplace
def i0_inplace(x):
"""Modified Bessel function of the first kind of order 0."""
@_scal_inplace
def i1_inplace(x):
"""Modified Bessel function of the first kind of order 1."""
@_scal_inplace
def iv_inplace(v, x):
"""Modified Bessel function of the first kind of order v (real)."""
@_scal_inplace
......
......@@ -1711,6 +1711,10 @@ if imported_scipy_special:
expected_chi2sf = scipy.stats.chi2.sf
expected_j0 = scipy.special.j0
expected_j1 = scipy.special.j1
expected_jv = scipy.special.jv
expected_i0 = scipy.special.i0
expected_i1 = scipy.special.i1
expected_iv = scipy.special.iv
skip_scipy = False
expected_erfcx = scipy.special.erfcx
else:
......@@ -1725,6 +1729,10 @@ else:
expected_chi2sf = []
expected_j0 = []
expected_j1 = []
expected_jv = []
expected_i0 = []
expected_i1 = []
expected_iv = []
skip_scipy = "scipy is not present"
ErfTester = makeBroadcastTester(
......@@ -1903,22 +1911,46 @@ Chi2SFInplaceTester = makeBroadcastTester(
skip=skip_scipy,
name='Chi2SF')
_good_broadcast_unary_j = dict(
normal=(rand_ranged(0.1, 8, (2, 3)),),)
_good_broadcast_unary_bessel = dict(
normal=(rand_ranged(-10, 10, (2, 3)),),
empty=(numpy.asarray([], dtype=config.floatX),),
int=(randint_ranged(-10, 10, (2, 3)),),
uint8=(randint_ranged(0, 10, (2, 3)).astype('uint8'),),
uint16=(randint_ranged(0, 10, (2, 3)).astype('uint16'),))
_grad_broadcast_unary_bessel = dict(
normal=(rand_ranged(-10., 10., (2, 3)),),)
_good_broadcast_binary_bessel = dict(
normal=(rand_ranged(-5, 5, (2, 3)),
rand_ranged(0, 10, (2, 3))),
empty=(numpy.asarray([], dtype=config.floatX),
numpy.asarray([], dtype=config.floatX)),
integers=(randint_ranged(-5, 5, (2, 3)),
randint_ranged(-10, 10, (2, 3))),
uint8=(randint_ranged(0, 5, (2, 3)).astype('uint8'),
randint_ranged(0, 10, (2, 3)).astype('uint8')),
uint16=(randint_ranged(0, 5, (2, 3)).astype('uint16'),
randint_ranged(0, 10, (2, 3)).astype('uint16')))
_grad_broadcast_binary_bessel = dict(
normal=(rand_ranged(1, 5, (2, 3)),
rand_ranged(0, 10, (2, 3))))
J0Tester = makeBroadcastTester(
op=tensor.j0,
expected=expected_j0,
good=_good_broadcast_unary_j,
grad=_good_broadcast_unary_j,
good=_good_broadcast_unary_bessel,
grad=_grad_broadcast_unary_bessel,
eps=2e-10,
mode=mode_no_scipy,
skip=skip_scipy)
J0InplaceTester = makeBroadcastTester(
op=inplace.j0_inplace,
expected=expected_j0,
good=_good_broadcast_unary_j,
grad=_good_broadcast_unary_j,
good=_good_broadcast_unary_bessel,
grad=_grad_broadcast_unary_bessel,
eps=2e-10,
mode=mode_no_scipy,
inplace=True,
......@@ -1927,19 +1959,124 @@ J0InplaceTester = makeBroadcastTester(
J1Tester = makeBroadcastTester(
op=tensor.j1,
expected=expected_j1,
good=_good_broadcast_unary_j,
good=_good_broadcast_unary_bessel,
grad=_grad_broadcast_unary_bessel,
eps=2e-10,
mode=mode_no_scipy,
skip=skip_scipy)
J1InplaceTester = makeBroadcastTester(
op=inplace.j1_inplace,
expected=expected_j1,
good=_good_broadcast_unary_j,
good=_good_broadcast_unary_bessel,
grad=_grad_broadcast_unary_bessel,
eps=2e-10,
mode=mode_no_scipy,
inplace=True,
skip=skip_scipy)
JvTester = makeBroadcastTester(
op=tensor.jv,
expected=expected_jv,
good=_good_broadcast_binary_bessel,
eps=2e-10,
mode=mode_no_scipy,
skip=skip_scipy)
JvInplaceTester = makeBroadcastTester(
op=inplace.jv_inplace,
expected=expected_jv,
good=_good_broadcast_binary_bessel,
eps=2e-10,
mode=mode_no_scipy,
inplace=True,
skip=skip_scipy)
def test_verify_jv_grad():
"""Verify Jv gradient.
Implemented separately due to need to fix first input for which grad is
not defined.
"""
v_val, x_val = _grad_broadcast_binary_bessel['normal']
def fixed_first_input_jv(x):
return tensor.jv(v_val, x)
utt.verify_grad(fixed_first_input_jv, [x_val])
I0Tester = makeBroadcastTester(
op=tensor.i0,
expected=expected_i0,
good=_good_broadcast_unary_bessel,
grad=_grad_broadcast_unary_bessel,
eps=2e-10,
mode=mode_no_scipy,
skip=skip_scipy)
I0InplaceTester = makeBroadcastTester(
op=inplace.i0_inplace,
expected=expected_i0,
good=_good_broadcast_unary_bessel,
grad=_grad_broadcast_unary_bessel,
eps=2e-10,
mode=mode_no_scipy,
inplace=True,
skip=skip_scipy)
I1Tester = makeBroadcastTester(
op=tensor.i1,
expected=expected_i1,
good=_good_broadcast_unary_bessel,
grad=_grad_broadcast_unary_bessel,
eps=2e-10,
mode=mode_no_scipy,
skip=skip_scipy)
I1InplaceTester = makeBroadcastTester(
op=inplace.i1_inplace,
expected=expected_i1,
good=_good_broadcast_unary_bessel,
grad=_grad_broadcast_unary_bessel,
eps=2e-10,
mode=mode_no_scipy,
inplace=True,
skip=skip_scipy)
IvTester = makeBroadcastTester(
op=tensor.iv,
expected=expected_iv,
good=_good_broadcast_binary_bessel,
eps=2e-10,
mode=mode_no_scipy,
skip=skip_scipy)
IvInplaceTester = makeBroadcastTester(
op=inplace.iv_inplace,
expected=expected_iv,
good=_good_broadcast_binary_bessel,
eps=2e-10,
mode=mode_no_scipy,
inplace=True,
skip=skip_scipy)
def test_verify_iv_grad():
"""Verify Iv gradient.
Implemented separately due to need to fix first input for which grad is
not defined.
"""
v_val, x_val = _grad_broadcast_binary_bessel['normal']
def fixed_first_input_iv(x):
return tensor.iv(v_val, x)
utt.verify_grad(fixed_first_input_iv, [x_val])
ZerosLikeTester = makeBroadcastTester(
op=tensor.zeros_like,
expected=numpy.zeros_like,
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论