提交 005a3a02 authored 作者: ColtAllen's avatar ColtAllen 提交者: Thomas Wiecki

Add factorial and poch helpers

上级 8051ffbb
...@@ -7,7 +7,7 @@ import scipy ...@@ -7,7 +7,7 @@ import scipy
from pytensor.graph.basic import Apply from pytensor.graph.basic import Apply
from pytensor.link.c.op import COp from pytensor.link.c.op import COp
from pytensor.tensor.basic import as_tensor_variable from pytensor.tensor.basic import as_tensor_variable
from pytensor.tensor.math import neg, sum from pytensor.tensor.math import gamma, neg, sum
class SoftmaxGrad(COp): class SoftmaxGrad(COp):
...@@ -768,7 +768,25 @@ def log_softmax(c, axis=UNSET_AXIS): ...@@ -768,7 +768,25 @@ def log_softmax(c, axis=UNSET_AXIS):
return LogSoftmax(axis=axis)(c) return LogSoftmax(axis=axis)(c)
def poch(z, m):
"""
Pochhammer symbol (rising factorial) function.
"""
return gamma(z + m) / gamma(z)
def factorial(n):
"""
Factorial function of a scalar or array of numbers.
"""
return gamma(n + 1)
__all__ = [ __all__ = [
"softmax", "softmax",
"log_softmax", "log_softmax",
"poch",
"factorial",
] ]
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import factorial as scipy_factorial
from scipy.special import log_softmax as scipy_log_softmax from scipy.special import log_softmax as scipy_log_softmax
from scipy.special import poch as scipy_poch
from scipy.special import softmax as scipy_softmax from scipy.special import softmax as scipy_softmax
from pytensor.compile.function import function from pytensor.compile.function import function
...@@ -9,11 +11,14 @@ from pytensor.tensor.special import ( ...@@ -9,11 +11,14 @@ from pytensor.tensor.special import (
LogSoftmax, LogSoftmax,
Softmax, Softmax,
SoftmaxGrad, SoftmaxGrad,
factorial,
log_softmax, log_softmax,
poch,
softmax, softmax,
) )
from pytensor.tensor.type import matrix, tensor3, tensor4, vector from pytensor.tensor.type import matrix, tensor3, tensor4, vector, vectors
from tests import unittest_tools as utt from tests import unittest_tools as utt
from tests.tensor.utils import random_ranged
class TestSoftmax(utt.InferShapeTester): class TestSoftmax(utt.InferShapeTester):
...@@ -140,3 +145,29 @@ class TestSoftmaxGrad(utt.InferShapeTester): ...@@ -140,3 +145,29 @@ class TestSoftmaxGrad(utt.InferShapeTester):
with pytest.raises(ValueError): with pytest.raises(ValueError):
SoftmaxGrad(-4)(*x) SoftmaxGrad(-4)(*x)
def test_poch():
_z, _m = vectors("z", "m")
actual_fn = function([_z, _m], poch(_z, _m))
z = random_ranged(0, 5, (2,))
m = random_ranged(0, 5, (2,))
actual = actual_fn(z, m)
expected = scipy_poch(z, m)
np.testing.assert_allclose(
actual, expected, rtol=1e-7 if config.floatX == "float64" else 1e-5
)
@pytest.mark.parametrize("n", random_ranged(0, 5, (1,)))
def test_factorial(n):
_n = vector("n")
actual_fn = function([_n], factorial(_n))
n = random_ranged(0, 5, (2,))
actual = actual_fn(n)
expected = scipy_factorial(n)
np.testing.assert_allclose(
actual, expected, rtol=1e-7 if config.floatX == "float64" else 1e-5
)
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论