提交 2611ec31 authored 作者: amrithasuresh's avatar amrithasuresh

Updated numpy as np

上级 13cb7ed2
from __future__ import absolute_import, print_function, division
import unittest
import numpy
import numpy as np
from theano.compat import imap
import theano.tensor.inplace
......@@ -26,12 +26,12 @@ class T_sigmoid(unittest.TestCase):
utt.seed_rng()
def test_elemwise(self):
utt.verify_grad(sigmoid, [numpy.random.rand(3, 4)])
utt.verify_grad(sigmoid, [np.random.rand(3, 4)])
SigmoidTester = makeBroadcastTester(
op=sigmoid,
expected=upcast_int8_nfunc(lambda inputs: check_floatX(
inputs, 1 / (1 + numpy.exp(-inputs)))),
inputs, 1 / (1 + np.exp(-inputs)))),
good=copymod(_good_broadcast_unary_normal_no_complex,
without=['uint16']), # The reason that 'uint16' is excluted is that
# theano works well but numpy overflows resulting
......@@ -43,7 +43,7 @@ SigmoidTester = makeBroadcastTester(
UltraFastSigmoidTester = makeBroadcastTester(
op=ultra_fast_sigmoid,
expected=upcast_int8_nfunc(lambda inputs: check_floatX(
inputs, 1 / (1 + numpy.exp(-inputs)))),
inputs, 1 / (1 + np.exp(-inputs)))),
good=copymod(_good_broadcast_unary_normal_no_complex,
without=['uint16']), # numpy fucnting overflows with uint16.
# grad=_grad_broadcast_unary_normal,
......@@ -54,7 +54,7 @@ UltraFastSigmoidTester = makeBroadcastTester(
HardSigmoidTester = makeBroadcastTester(
op=hard_sigmoid,
expected=upcast_int8_nfunc(lambda inputs: check_floatX(
inputs, 1 / (1 + numpy.exp(-inputs)))),
inputs, 1 / (1 + np.exp(-inputs)))),
good=copymod(_good_broadcast_unary_normal_no_complex,
without=['uint16']), # numpy fucnting overflows with uint16.
# grad=_grad_broadcast_unary_normal,
......@@ -66,11 +66,11 @@ HardSigmoidTester = makeBroadcastTester(
SoftplusTester = makeBroadcastTester(
op=softplus,
expected=upcast_int8_nfunc(lambda inputs: check_floatX(
inputs, numpy.log1p(numpy.exp(inputs)))),
inputs, np.log1p(np.exp(inputs)))),
good=dict(copymod(_good_broadcast_unary_normal_no_complex,
without=['uint8', 'uint16']), # numpy fucnting overflows with uint16.
uint8=[numpy.arange(0, 89, dtype='uint8')], # the range is different in new added uint8.
int8=[numpy.arange(-127, 89, dtype='int8')]),
uint8=[np.arange(0, 89, dtype='uint8')], # the range is different in new added uint8.
int8=[np.arange(-127, 89, dtype='int8')]),
# grad=_grad_broadcast_unary_normal,
name='SoftplusTester',
)
......@@ -81,7 +81,7 @@ class T_softplus(unittest.TestCase):
utt.seed_rng()
def test_elemwise(self):
utt.verify_grad(softplus, [numpy.random.rand(3, 4)])
utt.verify_grad(softplus, [np.random.rand(3, 4)])
class T_sigmoid_opts(unittest.TestCase):
......@@ -112,7 +112,7 @@ class T_sigmoid_opts(unittest.TestCase):
m = self.get_mode(excluding=['local_elemwise_fusion'])
x = T.vector()
data = numpy.random.rand(54).astype(config.floatX)
data = np.random.rand(54).astype(config.floatX)
backup = config.warn.identify_1pexp_bug
config.warn.identify_1pexp_bug = False
......@@ -321,7 +321,7 @@ class T_sigmoid_opts(unittest.TestCase):
if not isinstance(mode, theano.compile.DebugMode):
f = theano.function([x, lr], ux, mode=mode)
ux_v = f([[50]], 0.1)
assert not numpy.isnan(ux_v)
assert not np.isnan(ux_v)
def test_local_ultra_fast_sigmoid(self):
x = tensor.matrix('x')
......@@ -391,7 +391,7 @@ class T_softplus_opts(unittest.TestCase):
assert isinstance(topo[1].op.scalar_op,
theano.tensor.nnet.sigm.ScalarSoftplus)
assert isinstance(topo[2].op.scalar_op, theano.scalar.Neg)
f(numpy.random.rand(54).astype(config.floatX))
f(np.random.rand(54).astype(config.floatX))
def test_log1msigm_to_softplus(self):
x = T.matrix()
......@@ -404,7 +404,7 @@ class T_softplus_opts(unittest.TestCase):
theano.tensor.nnet.sigm.ScalarSoftplus)
assert isinstance(topo[1].op.scalar_op, theano.scalar.Neg)
# assert check_stack_trace(f, ops_to_check='all')
f(numpy.random.rand(54, 11).astype(config.floatX))
f(np.random.rand(54, 11).astype(config.floatX))
# Same test with a flatten
out = T.log(1 - T.flatten(sigmoid(x)))
......@@ -417,7 +417,7 @@ class T_softplus_opts(unittest.TestCase):
assert isinstance(topo[1].op.scalar_op,
theano.tensor.nnet.sigm.ScalarSoftplus)
assert isinstance(topo[2].op.scalar_op, theano.scalar.Neg)
f(numpy.random.rand(54, 11).astype(config.floatX))
f(np.random.rand(54, 11).astype(config.floatX))
# Same test with a reshape
out = T.log(1 - sigmoid(x).reshape([x.size]))
......@@ -428,7 +428,7 @@ class T_softplus_opts(unittest.TestCase):
assert any(isinstance(getattr(node.op, 'scalar_op', None),
theano.tensor.nnet.sigm.ScalarSoftplus)
for node in topo)
f(numpy.random.rand(54, 11).astype(config.floatX))
f(np.random.rand(54, 11).astype(config.floatX))
def test_log1pexp_to_softplus(self):
m = theano.config.mode
......@@ -446,7 +446,7 @@ class T_softplus_opts(unittest.TestCase):
assert len(topo) == 1
assert isinstance(topo[0].op.scalar_op,
theano.tensor.nnet.sigm.ScalarSoftplus)
f(numpy.random.rand(54).astype(config.floatX))
f(np.random.rand(54).astype(config.floatX))
class T_sigmoid_utils(unittest.TestCase):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论