提交 41518f9e authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Move tests to theano/tensor, call verify_grad

Calling theano.grad on the graph built from theano.scalar.clip is not working, so we have to use theano.tensor.clip instead.
上级 7ef9e747
......@@ -20,7 +20,7 @@ from theano.tests import unittest_tools as utt
from theano.scalar.basic import (floats, float32, float64,
ints, int8, int32, complex64,
ComplexError, IntDiv, TrueDiv,
Composite, add, div_proxy, clip,
Composite, add, div_proxy,
and_, eq, neq, invert, mul, Scalar)
from theano.scalar.basic import (
true_div, inv, log, log2, log10, log1p, exp, exp2, expm1, sqrt, deg2rad,
......@@ -62,41 +62,6 @@ class test_ScalarOps(unittest.TestCase):
):
self.assertTrue(fn(a, b) == a%b, (a,))
def test_clip_grad(self):
# This is testing for the issue #633
x, y = floats('xy')
a = theano.tensor.clip(x, y, x)
g = theano.gradient.grad(a, x)
fn = gof.DualLinker().accept(FunctionGraph([x, y], [g])).make_function()
# Test the other way around as well
a2 = theano.tensor.clip(x, x, y)
g2 = theano.gradient.grad(a2, x)
fn2 = gof.DualLinker().accept(FunctionGraph([x, y], [g2])).make_function()
# Test for the equal case too .
a3 = theano.tensor.clip(x, x, x)
g3 = theano.gradient.grad(a3, x)
fn3 = gof.DualLinker().accept(FunctionGraph([x], [g3])).make_function()
rng = np.random.RandomState(utt.fetch_seed())
ntests = 50
for i in xrange(ntests):
xval = rng.rand(1)
# To ensure that the min < x .
yval_mn = rng.rand(1) - 1.0
# To ensure that the max > x.
yval_mx = rng.rand(1) + 1.0
aval = fn(xval, yval_mn)
aval2 = fn2(xval, yval_mx)
aval3 = fn3(xval)
self.assertTrue(aval == 1.)
self.assertTrue(aval2 == 1.)
self.assertTrue(aval3 == 1.)
class test_composite(unittest.TestCase):
......
......@@ -2997,8 +2997,10 @@ def clip(x, min, max):
"""clip x to be between min and max.
:note: When `x` is equal to the boundaries, the output is considered
to be `x`, so at these points, the gradient will flow through `x`,
not through `min` nor `max`.
to be `x`, so at these points, the gradient of the cost wrt the output
will be propagated to `x`, not to `min` nor `max`. In other words,
on these points, the gradient wrt `x` will be equal to the gradient wrt
the output, and the gradient wrt `min` and `max` will be zero.
"""
# see decorator for function body
# for grep: clamp, bound
......
......@@ -2484,6 +2484,59 @@ class T_Clip(unittest.TestCase):
c = tensor.scalar()
self.assertRaises(TypeError, clip, a, b, c)
def test_clip_repeat_grad(self):
# This is testing for the issue #633
x, y = tensor.vectors('xy')
a = clip(x, y, x)
g = theano.gradient.grad(a.sum(), x)
fn = theano.function([x, y], [g])
# Test the other way around as well
a2 = clip(x, x, y)
g2 = theano.gradient.grad(a2.sum(), x)
fn2 = theano.function([x, y], [g2])
# Test for the equal case too
a3 = theano.tensor.clip(x, x, x)
g3 = theano.gradient.grad(a3.sum(), x)
fn3 = theano.function([x], [g3])
rng = numpy.random.RandomState(utt.fetch_seed())
nvals = 50
xval = rng.rand(nvals)
# To ensure that the min < x
yval_mn = rng.rand(nvals) - 1.0
# To ensure that the max > x
yval_mx = rng.rand(nvals) + 1.0
aval, = fn(xval, yval_mn)
aval2, = fn2(xval, yval_mx)
aval3, = fn3(xval)
self.assertTrue(numpy.all(aval == 1.))
self.assertTrue(numpy.all(aval2 == 1.))
self.assertTrue(numpy.all(aval3 == 1.))
def test_clip_repeat_verify_grad(self):
# Additional tests for issue gh-633
utt.verify_grad(
op=lambda x: clip(x, 0, x),
pt=[rand_nonzero((3, 7))])
utt.verify_grad(
op=lambda x: clip(x, x, 0),
pt=[rand_nonzero((3, 7))])
utt.verify_grad(
op=lambda x: clip(0, x, x),
pt=[rand_nonzero((3, 7))])
utt.verify_grad(
op=lambda x: clip(x, x, x),
pt=[rand_nonzero((3, 7))])
# TODO: consider moving this function / functionality to gradient.py
# rationale: it's tricky, and necessary everytime you want to verify
# gradient numerically
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论