提交 a5b73b01 authored 作者: vipulraheja's avatar vipulraheja 提交者: Arnaud Bergeron

Add selu activation

上级 e1911566
......@@ -18,7 +18,7 @@ from .nnet import (
graph_merge_softmax_with_crossentropy_softmax, h_softmax,
logsoftmax, logsoftmax_op, prepend_0_to_each_row, prepend_1_to_each_row,
prepend_scalar_to_each_row, relu, softmax, softmax_grad, softmax_graph,
softmax_op, softmax_simplifier, softmax_with_bias, elu,
softmax_op, softmax_simplifier, softmax_with_bias, elu, selu,
confusion_matrix, softsign)
from . import opt
from .conv import ConvOp
......
......@@ -2448,6 +2448,29 @@ def elu(x, alpha=1):
return tensor.switch(x > 0, x, alpha * tensor.expm1(x))
def selu(x, alpha=1):
"""Compute the element-wise Scaled Exponential Linear unit.
Parameters
----------
x : symbolic tensor
Tensor to compute the activation function for.
Returns
-------
symbolic tensor
Element-wise scaled exponential linear activation function applied to `x`.
References
----------
.. [1] Klambauer, Gunter, et al.
"Self-Normalizing Neural Networks" <https://arxiv.org/abs/1706.02515>
"""
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
return scale * elu(x, alpha)
class ScalarSoftsign(theano.scalar.UnaryScalarOp):
"""
Softsign activation function
......
......@@ -32,6 +32,7 @@ from theano.tensor.nnet import (categorical_crossentropy,
relu,
h_softmax,
elu,
selu,
binary_crossentropy,
sigmoid_binary_crossentropy,
confusion_matrix)
......@@ -1737,6 +1738,19 @@ def test_elu():
utt.assert_allclose(y, np.where(X > 0, X, alpha * (np.exp(X) - 1)))
def test_selu():
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
x = matrix('x')
seed = theano.tests.unittest_tools.fetch_seed()
rng = np.random.RandomState(seed)
X = rng.randn(20, 30).astype(config.floatX)
y = selu(x).eval({x: X})
utt.assert_allclose(y, np.where(X > 0, scale * X, scale * alpha * (np.exp(X) - 1)))
def test_binary_crossentropy_reshape():
# Reported as https://github.com/Theano/Theano/issues/4086
a = tensor.tensor4('a')
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论