提交 2055feec authored 作者: Anish Shah's avatar Anish Shah

Exponential Linear Units (ELUs)

上级 b2362664
......@@ -16,7 +16,7 @@ from .nnet import (
graph_merge_softmax_with_crossentropy_softmax, h_softmax,
logsoftmax, logsoftmax_op, prepend_0_to_each_row, prepend_1_to_each_row,
prepend_scalar_to_each_row, relu, softmax, softmax_grad, softmax_graph,
softmax_op, softmax_simplifier, softmax_with_bias)
softmax_op, softmax_simplifier, softmax_with_bias, elu)
from . import opt
from .conv import ConvOp
from .Conv3D import *
......
......@@ -2329,3 +2329,30 @@ def h_softmax(x, batch_size, n_outputs, n_classes, n_outputs_per_class,
output_probs = target_class_probs * output_probs
return output_probs
def elu(x, alpha=1):
"""
Compute the element-wise exponential linear activation function.
.. versionadded:: 0.8.0
Parameters
----------
x : symbolic tensor
Tensor to compute the activation function for.
alpha : scalar
Returns
-------
symbolic tensor
Element-wise exponential linear activation function applied to `x`.
References
-----
.. [1] Djork-Arne Clevert, Thomas Unterthiner, Sepp Hochreiter
"Fast and Accurate Deep Network Learning by
Exponential Linear Units (ELUs)" <http://arxiv.org/abs/1511.07289>`.
"""
return tensor.switch(x > 0, x, alpha * (tensor.exp(x) - 1))
......@@ -29,7 +29,8 @@ from theano.tensor.nnet import (categorical_crossentropy,
Prepend_scalar_constant_to_each_row,
Prepend_scalar_to_each_row,
relu,
h_softmax)
h_softmax,
elu)
from theano.tensor import matrix, vector, lvector, scalar
......@@ -1625,3 +1626,19 @@ def test_h_softmax():
# computed by fun_output.
utt.assert_allclose(
all_outputs[numpy.arange(0, batch_size), y_mat], tg_output)
def test_elu():
x = matrix('x')
seed = theano.tests.unittest_tools.fetch_seed()
rng = numpy.random.RandomState(seed)
X = rng.randn(20, 30).astype(config.floatX)
# test the base case, without custom alpha value
y = elu(x).eval({x: X})
utt.assert_allclose(y, numpy.where(X > 0, X, numpy.exp(X) - 1))
# test for different constant alpha values
for alpha in 1.5, 2, -1, -1.5, -2:
y = elu(x, alpha).eval({x: X})
utt.assert_allclose(y, numpy.where(X > 0, X, alpha * (numpy.exp(X) - 1)))
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论