提交 b1425c93 authored 作者: Frederic's avatar Frederic

Add one more assert and test the 2 fixes with those assert by modifing

an existing tests.
上级 e645134c
......@@ -1582,12 +1582,23 @@ class Scan(PureOp):
outer_inp_seqs.append(x[::-1])
if hasattr(inputs[0].tag, 'test_value'):
# Here we tests that the new scan input sequence all have
# the same shape[0]. This is a properties that the scan()
# fct add and we want to keep it for all Scan op. This is
# used in T_Scan.test_grad_multiple_outs_taps to test
# that.
for x in self.outer_mitsot_outs(outs):
if hasattr(x[::-1][:inputs[0]].tag, 'test_value'):
assert x[::-1][:inputs[0]].tag.test_value.shape[0] == inputs[0].tag.test_value
assert (x[::-1][:inputs[0]].tag.test_value.shape[0] ==
inputs[0].tag.test_value)
for x in self.outer_sitsot_outs(outs):
if hasattr(x[::-1][:-1].tag, 'test_value'):
assert x[::-1][:-1].tag.test_value.shape[0] == inputs[0].tag.test_value
assert (x[::-1][:-1].tag.test_value.shape[0] ==
inputs[0].tag.test_value)
for x in self.outer_nitsot_outs(outs):
if hasattr(x[::-1].tag, 'test_value'):
assert (x[::-1].tag.test_value.shape[0] ==
inputs[0].tag.test_value)
outer_inp_seqs += [x[::-1][:inputs[0]]
for x in self.outer_mitsot_outs(outs)]
outer_inp_seqs += [x[::-1][:-1] for x in self.outer_sitsot_outs(outs)]
......
......@@ -1545,6 +1545,12 @@ class T_Scan(unittest.TestCase):
x0 = theano.tensor.vector('x0')
y0 = theano.tensor.vector('y0')
W_in1.tag.test_value = vW_in1
u1.tag.test_value = v_u1
u2.tag.test_value = v_u2
x0.tag.test_value = v_x0
y0.tag.test_value = v_y0
def f_rnn_cmpl(u1_t,
u2_tm1,
u2_t,
......@@ -1553,33 +1559,46 @@ class T_Scan(unittest.TestCase):
y_tm1,
y_tm3,
W_in1):
return [theano.dot(u1_t, W_in1) + \
(u2_t + u2_tm1 * u2_tp1) * W_in2 + \
theano.dot(x_tm1, W),
return [theano.dot(u1_t, W_in1) +
(u2_t + u2_tm1 * u2_tp1) * W_in2 +
theano.dot(x_tm1, W),
(y_tm1 + y_tm3) * theano.dot(x_tm1, W_out),
theano.dot(u1_t, W_in1)]
cost, updates = scan_project_sum(
f_rnn_cmpl,
[u1, dict(input=u2, taps=[-1, 0, 1])],
[x0, dict(initial=y0, taps=[-1, -3]), None],
W_in1,
n_steps=None,
truncate_gradient=-1,
go_backwards=False)
vparams = [v_u1, v_u2, v_x0, v_y0, vW_in1]
params = [u1, u2, x0, y0, W_in1]
gparams = theano.tensor.grad(cost, params)
grad_fn = theano.function([u1, u2, x0, y0, W_in1],
gparams,
updates=updates,
no_default_updates=True,
allow_input_downcast=True)
cost_fn = theano.function([u1, u2, x0, y0, W_in1],
cost,
updates=updates,
no_default_updates=True,
allow_input_downcast=True)
# We change the compute_test_value[_opt] flag to run the
# assert in Scan.grad() of the new scan input sequence related
# to outer_mitsot_outs, outer_sitsot_outs and
# outer_nitsot_outs. This allow to test an old Scan bug.
old1 = theano.config.compute_test_value
old2 = theano.config.compute_test_value_opt
theano.config.compute_test_value = 'raise'
theano.config.compute_test_value_opt = 'raise'
try:
cost, updates = scan_project_sum(
f_rnn_cmpl,
[u1, dict(input=u2, taps=[-1, 0, 1])],
[x0, dict(initial=y0, taps=[-1, -3]), None],
W_in1,
n_steps=None,
truncate_gradient=-1,
go_backwards=False)
vparams = [v_u1, v_u2, v_x0, v_y0, vW_in1]
params = [u1, u2, x0, y0, W_in1]
gparams = theano.tensor.grad(cost, params)
grad_fn = theano.function([u1, u2, x0, y0, W_in1],
gparams,
updates=updates,
no_default_updates=True,
allow_input_downcast=True)
cost_fn = theano.function([u1, u2, x0, y0, W_in1],
cost,
updates=updates,
no_default_updates=True,
allow_input_downcast=True)
finally:
theano.config.compute_test_value = old1
theano.config.compute_test_value_opt = old2
num_grad = multiple_outputs_numeric_grad(cost_fn,
[v_u1,
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论