提交 116f70fe authored 作者: Frederic's avatar Frederic

pep8

上级 025d484e
import theano
import numpy
import theano.tensor
class ScalarSoftsign(theano.scalar.UnaryScalarOp):
@staticmethod
def static_impl(x):
return x / (1.0 + abs(x))
def impl(self, x):
return ScalarSoftsign.static_impl(x)
def grad(self, inp, grads):
x, = inp
gz, = grads
......@@ -17,11 +19,15 @@ class ScalarSoftsign(theano.scalar.UnaryScalarOp):
return [gz / (d * d)]
else:
return NotImplemented
def c_code(self, node, name, inp, out, sub):
x, = inp
z, = out
if node.inputs[0].type in [theano.scalar.float32, theano.scalar.float64]:
if node.inputs[0].type in [theano.scalar.float32,
theano.scalar.float64]:
return "%(z)s = %(x)s / (1.0+fabs(%(x)s));" % locals()
raise NotImplementedError('only floating point x is implemented')
scalar_softsign = ScalarSoftsign(theano.scalar.upgrade_to_float, name='scalar_softsign')
scalar_softsign = ScalarSoftsign(theano.scalar.upgrade_to_float,
name='scalar_softsign')
softsign = theano.tensor.Elemwise(scalar_softsign, name='softsign')
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论