Skip to content
项目
群组
代码片段
帮助
当前项目
正在载入...
登录 / 注册
切换导航面板
P
pytensor
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
图表
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
日程
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
图像
聊天
创建新问题
作业
提交
问题看板
Open sidebar
testgroup
pytensor
Commits
98cba59e
提交
98cba59e
authored
12月 17, 2012
作者:
Razvan Pascanu
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix disconnected input case
Conflicts: theano/scan_module/scan_op.py
上级
a8358593
隐藏空白字符变更
内嵌
并排
正在显示
2 个修改的文件
包含
76 行增加
和
5 行删除
+76
-5
scan_op.py
theano/scan_module/scan_op.py
+65
-5
test_scan.py
theano/scan_module/tests/test_scan.py
+11
-0
没有找到文件。
theano/scan_module/scan_op.py
浏览文件 @
98cba59e
...
@@ -1334,6 +1334,16 @@ class Scan(PureOp):
...
@@ -1334,6 +1334,16 @@ class Scan(PureOp):
tmp
=
ils
tmp
=
ils
if
any
([
x
is
not
None
for
x
in
tmp
]):
if
any
([
x
is
not
None
for
x
in
tmp
]):
connection_pattern
[
iidx
+
1
][
oidx
]
=
True
connection_pattern
[
iidx
+
1
][
oidx
]
=
True
old_conn
=
[
[
v
for
v
in
cp
]
for
cp
in
connection_pattern
]
n_outs
=
len
(
node
.
outputs
)
for
steps
in
xrange
(
n_outs
):
for
iidx
in
xrange
(
n_outs
):
for
jidx
in
xrange
(
n_outs
):
j_inp_idx
=
self
.
get_input_pos
(
jidx
)
+
1
if
connection_pattern
[
j_inp_idx
][
iidx
]
==
True
:
for
k
in
xrange
(
len
(
connection_pattern
)):
if
connection_pattern
[
k
][
iidx
]:
connection_pattern
[
k
][
jidx
]
=
True
return
connection_pattern
return
connection_pattern
### GRAD FUNCTION
### GRAD FUNCTION
...
@@ -1371,17 +1381,53 @@ class Scan(PureOp):
...
@@ -1371,17 +1381,53 @@ class Scan(PureOp):
self
.
inner_mitsot_outs
(
self_outputs
)
+
self
.
inner_mitsot_outs
(
self_outputs
)
+
self
.
inner_sitsot_outs
(
self_outputs
)
+
self
.
inner_sitsot_outs
(
self_outputs
)
+
self
.
inner_nitsot_outs
(
self_outputs
))
self
.
inner_nitsot_outs
(
self_outputs
))
scan_node
=
outs
[
0
]
.
owner
connection_pattern
=
self
.
connection_pattern
(
scan_node
)
def
get_inp_idx
(
iidx
):
if
iidx
<
self
.
n_seqs
:
return
1
+
iidx
oidx
=
1
+
self
.
n_seqs
iidx
=
iidx
-
self
.
n_seqs
for
taps
in
self
.
mitmot_taps
():
if
len
(
taps
)
>
iidx
:
return
oidx
else
:
oidx
+=
1
iidx
-=
len
(
taps
)
for
taps
in
self
.
mitsot_taps
():
if
len
(
taps
)
>
iidx
:
return
oidx
else
:
oidx
+=
1
iidx
-=
len
(
taps
)
if
iidx
<
self
.
info
[
'n_sit_sot'
]:
return
oidx
+
iidx
else
:
return
oidx
+
iidx
+
self
.
info
[
'n_nit_sot'
]
def
get_out_idx
(
iidx
):
oidx
=
0
for
taps
in
self
.
mitmot_out_taps
():
if
len
(
taps
)
>
iidx
:
return
oidx
else
:
oidx
+=
1
iidx
-=
len
(
taps
)
return
oidx
+
iidx
def
compute_gradient
(
y
,
g_y
):
def
compute_gradient
(
y
,
g_y
):
if
'int'
in
str
(
g_y
.
dtype
):
if
'int'
in
str
(
g_y
.
dtype
):
raise
TypeError
(
"Gradients may never be integers but g_y "
raise
TypeError
(
"Gradients may never be integers but g_y "
"has type "
+
str
(
g_y
.
type
))
"has type "
+
str
(
g_y
.
type
))
wrt
=
[
x
for
x
in
theano
.
gof
.
graph
.
inputs
([
y
])
odx
=
get_out_idx
(
self_outputs
.
index
(
y
))
if
x
in
diff_inputs
]
wrt
=
[
x
for
x
in
theano
.
gof
.
graph
.
inputs
([
y
])
grads
=
gradient
.
grad
(
if
(
x
in
diff_inputs
)
and
cost
=
None
,
(
connection_pattern
[
get_inp_idx
(
self_inputs
.
index
(
x
))][
odx
])]
known_grads
=
{
y
:
g_y
},
grads
=
gradient
.
grad
(
cost
=
None
,
known_grads
=
{
y
:
g_y
},
wrt
=
wrt
,
consider_constant
=
wrt
,
wrt
=
wrt
,
consider_constant
=
wrt
,
disconnected_inputs
=
'ignore'
,
disconnected_inputs
=
'ignore'
,
return_disconnected
=
'None'
)
return_disconnected
=
'None'
)
...
@@ -1757,6 +1803,20 @@ class Scan(PureOp):
...
@@ -1757,6 +1803,20 @@ class Scan(PureOp):
'Depends on a shared variable'
))
'Depends on a shared variable'
))
else
:
else
:
gradients
.
append
(
x
[
-
1
])
gradients
.
append
(
x
[
-
1
])
# Mask disconnected gradients
# Ideally we would want to assert that the gradients we are
# replacing do indeed evaluate to 0, though that is not practical
# from a computational point of view
# The gradients of scan are computed replacing Disconnected with 0,
# because through the recurrence they can become nonzero
for
idx
in
xrange
(
len
(
gradients
)):
disconnected
=
True
for
kdx
in
xrange
(
len
(
node
.
outputs
)):
if
connection_pattern
[
idx
][
kdx
]
and
\
not
isinstance
(
dC_douts
[
kdx
]
.
type
,
DisconnectedType
):
disconnected
=
False
if
disconnected
:
gradients
[
idx
]
=
DisconnectedType
()()
return
gradients
return
gradients
def
R_op
(
self
,
inputs
,
eval_points
):
def
R_op
(
self
,
inputs
,
eval_points
):
...
...
theano/scan_module/tests/test_scan.py
浏览文件 @
98cba59e
...
@@ -3295,6 +3295,17 @@ class T_Scan(unittest.TestCase):
...
@@ -3295,6 +3295,17 @@ class T_Scan(unittest.TestCase):
cost
=
x
.
sum
()
cost
=
x
.
sum
()
self
.
assertRaises
(
ValueError
,
tensor
.
grad
,
cost
,
y0
)
self
.
assertRaises
(
ValueError
,
tensor
.
grad
,
cost
,
y0
)
def
test_disconnected_gradient
(
self
):
v
=
tensor
.
vector
(
'v'
)
m
=
tensor
.
matrix
(
'm'
)
u0
=
tensor
.
zeros
((
7
,))
[
u
,
m2
],
_
=
theano
.
scan
(
lambda
_
,
u
:
[
u
,
v
],
sequences
=
m
,
outputs_info
=
[
u0
,
None
])
# This used to raise an exception with older versions becasue for a
# disconnected gradient a non disconnected type was returned
tensor
.
grad
((
m
*
m2
)
.
sum
(),
v
)
def
test_pregreedy_optimizer
(
self
):
def
test_pregreedy_optimizer
(
self
):
W
=
tensor
.
zeros
((
5
,
4
))
W
=
tensor
.
zeros
((
5
,
4
))
...
...
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论