Skip to content
项目
群组
代码片段
帮助
当前项目
正在载入...
登录 / 注册
切换导航面板
P
pytensor
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
图表
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
日程
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
图像
聊天
创建新问题
作业
提交
问题看板
Open sidebar
testgroup
pytensor
Commits
9733b595
提交
9733b595
authored
5月 04, 2015
作者:
David Warde-Farley
提交者:
Arnaud Bergeron
6月 22, 2015
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
dict(zip()) -> dict(izip()) (and OrderedDict)
上级
30340e72
隐藏空白字符变更
内嵌
并排
正在显示
10 个修改的文件
包含
39 行增加
和
39 行删除
+39
-39
builders.py
theano/compile/builders.py
+5
-4
function_module.py
theano/compile/function_module.py
+4
-4
opt.py
theano/gof/opt.py
+2
-1
gradient.py
theano/gradient.py
+2
-3
scan.py
theano/sandbox/scan.py
+5
-5
test_scan.py
theano/sandbox/scan_module/tests/test_scan.py
+1
-2
basic.py
theano/scalar/basic.py
+7
-8
scan.py
theano/scan_module/scan.py
+6
-6
utils.py
theano/tensor/utils.py
+2
-1
test_gradient.py
theano/tests/test_gradient.py
+5
-5
没有找到文件。
theano/compile/builders.py
浏览文件 @
9733b595
import
theano
from
theano
import
gof
from
theano.compat
import
izip
from
theano.compile.function_module
import
orig_function
from
theano.compile
import
SharedVariable
,
rebuild_collect_shared
from
theano.gof
import
ops_with_inner_function
...
...
@@ -80,8 +81,8 @@ class OpFromGraph(gof.Op):
if
isinstance
(
var
,
SharedVariable
)]
shared_vars
=
[
var
.
type
()
for
var
in
self
.
shared_inputs
]
new
=
rebuild_collect_shared
(
outputs
,
inputs
=
inputs
+
shared_vars
,
replace
=
dict
(
zip
(
self
.
shared_inputs
,
shared_vars
)),
replace
=
dict
(
i
zip
(
self
.
shared_inputs
,
shared_vars
)),
copy_inputs_over
=
False
)
(
new_inputs
,
new_outputs
,
[
clone_d
,
update_d
,
update_expr
,
shared_inputs
])
=
new
...
...
@@ -143,8 +144,8 @@ class OpFromGraph(gof.Op):
grad_ops
=
self
.
grad_ops
else
:
gs
=
theano
.
gradient
.
grad
(
cost
=
None
,
known_grads
=
dict
(
zip
(
self
.
new_outputs
,
output_grads
)),
known_grads
=
dict
(
i
zip
(
self
.
new_outputs
,
output_grads
)),
wrt
=
self
.
new_inputs
,
disconnected_inputs
=
'ignore'
)
...
...
theano/compile/function_module.py
浏览文件 @
9733b595
...
...
@@ -1047,11 +1047,11 @@ class FunctionMaker(object):
t2
=
removeAllFgraph
(
t2
)
givens
=
dict
(
zip
(
gof
.
graph
.
inputs
([
t1
]),
gof
.
graph
.
inputs
([
t2
])))
givens
=
dict
(
i
zip
(
gof
.
graph
.
inputs
([
t1
]),
gof
.
graph
.
inputs
([
t2
])))
temp
=
dict
(
zip
(
gof
.
graph
.
inputs
([
t1
]),
gof
.
graph
.
inputs
([
t2
])))
temp
=
dict
(
i
zip
(
gof
.
graph
.
inputs
([
t1
]),
gof
.
graph
.
inputs
([
t2
])))
# hack to remove inconstent entry in givens
# seems to work that but source of inconsistency
...
...
theano/gof/opt.py
浏览文件 @
9733b595
...
...
@@ -17,6 +17,7 @@ import numpy
import
theano
from
theano
import
config
from
theano.compat
import
izip
from
theano.compat.six
import
string_types
,
iteritems
from
theano.compat.six.moves
import
reduce
from
theano.gof
import
graph
,
op
,
utils
,
unify
,
toolbox
...
...
@@ -1211,7 +1212,7 @@ class PatternSub(LocalOptimizer):
ret
=
self
.
transform
(
real_node
,
get_nodes
=
False
)
if
ret
is
not
False
and
ret
is
not
None
:
assert
len
(
real_node
.
outputs
)
==
len
(
ret
)
return
dict
(
zip
(
real_node
.
outputs
,
ret
))
return
dict
(
i
zip
(
real_node
.
outputs
,
ret
))
if
node
.
op
!=
self
.
op
:
return
False
...
...
theano/gradient.py
浏览文件 @
9733b595
"""Driver for gradient calculations."""
from
__future__
import
print_function
import
theano.compat.six.moves.builtins
as
builtins
from
theano.compat
import
izip
import
logging
import
time
import
warnings
...
...
@@ -12,7 +11,7 @@ import theano
from
theano
import
gof
from
theano.gof
import
Variable
from
theano.compat
import
OrderedDict
from
theano.compat
import
OrderedDict
,
izip
from
theano.compat.six.moves
import
xrange
from
theano.gof.null_type
import
NullType
,
null_type
from
theano.gof.op
import
get_debug_values
...
...
@@ -700,7 +699,7 @@ def subgraph_grad(wrt, end, start=None, cost=None, details=False):
for
i
in
range
(
len
(
grads
)):
grads
[
i
]
+=
cost_grads
[
i
]
pgrads
=
OrderedDict
(
zip
(
params
,
grads
))
pgrads
=
OrderedDict
(
i
zip
(
params
,
grads
))
# separate wrt from end grads:
wrt_grads
=
list
(
pgrads
[
k
]
for
k
in
wrt
)
end_grads
=
list
(
pgrads
[
k
]
for
k
in
end
)
...
...
theano/sandbox/scan.py
浏览文件 @
9733b595
...
...
@@ -18,6 +18,7 @@ from theano.compile import SharedVariable, function
from
theano.compat.six
import
iteritems
from
theano
import
compile
from
theano
import
gof
from
theano.compat
import
izip
from
theano.compat
import
OrderedDict
,
ifilter
from
theano.tensor
import
opt
from
theano
import
tensor
...
...
@@ -457,8 +458,8 @@ def scan(fn,
# as non sequences at the end of our args
fake_nonseqs
=
[
x
.
type
()
for
x
in
non_seqs
]
fake_outputs
=
scan_utils
.
clone
(
outputs
+
updates
.
values
(),
replace
=
dict
(
zip
(
non_seqs
,
fake_nonseqs
)))
replace
=
dict
(
i
zip
(
non_seqs
,
fake_nonseqs
)))
all_inputs
=
ifilter
(
lambda
x
:
(
isinstance
(
x
,
gof
.
Variable
)
and
not
isinstance
(
x
,
SharedVariable
)
and
...
...
@@ -567,7 +568,7 @@ def scan(fn,
if
(
not
isinstance
(
arg
,
SharedVariable
)
and
not
isinstance
(
arg
,
tensor
.
Constant
))]
givens
.
update
(
dict
(
zip
(
other_scan_args
,
other_inner_args
)))
givens
.
update
(
dict
(
i
zip
(
other_scan_args
,
other_inner_args
)))
other_shared_scan_args
=
[
arg
.
variable
for
arg
in
dummy_f
.
maker
.
expanded_inputs
if
(
isinstance
(
arg
.
variable
,
SharedVariable
)
and
...
...
@@ -576,8 +577,7 @@ def scan(fn,
in
dummy_f
.
maker
.
expanded_inputs
if
(
isinstance
(
arg
.
variable
,
SharedVariable
)
and
not
arg
.
update
)]
givens
.
update
(
dict
(
zip
(
other_shared_scan_args
,
other_shared_inner_args
)))
givens
.
update
(
dict
(
izip
(
other_shared_scan_args
,
other_shared_inner_args
)))
##
# Step 6. Re-order the outputs and clone them replacing things
...
...
theano/sandbox/scan_module/tests/test_scan.py
浏览文件 @
9733b595
...
...
@@ -136,8 +136,7 @@ class TestScan(unittest.TestCase):
shared_outs
=
[
sh
*
5
for
sh
in
shared_vars
]
states_out
=
[
x
for
x
in
states_out
]
pure_outs
=
[
2
for
x
in
xrange
(
n_outputs
)]
return
states_out
+
pure_outs
,
dict
(
zip
(
shared_vars
,
shared_outs
))
return
states_out
+
pure_outs
,
dict
(
izip
(
shared_vars
,
shared_outs
))
def
execute_inner_graph
(
*
args
):
"""
...
...
theano/scalar/basic.py
浏览文件 @
9733b595
...
...
@@ -13,6 +13,7 @@ you probably want to use theano.tensor.[c,z,f,d,b,w,i,l,]scalar!
"""
from
__future__
import
print_function
from
itertools
import
chain
import
math
import
warnings
from
copy
import
copy
...
...
@@ -21,7 +22,7 @@ from textwrap import dedent
import
numpy
import
theano
from
theano.compat
import
PY3
,
imap
from
theano.compat
import
PY3
,
imap
,
izip
from
theano
import
gof
,
printing
from
theano.gof
import
(
Op
,
utils
,
Variable
,
Constant
,
Type
,
Apply
,
FunctionGraph
)
...
...
@@ -3425,7 +3426,7 @@ class Composite(ScalarOp):
res2
=
theano
.
compile
.
rebuild_collect_shared
(
inputs
=
outputs
[
0
]
.
owner
.
op
.
inputs
,
outputs
=
outputs
[
0
]
.
owner
.
op
.
outputs
,
replace
=
dict
(
zip
(
outputs
[
0
]
.
owner
.
op
.
inputs
,
res
[
1
]))
replace
=
dict
(
i
zip
(
outputs
[
0
]
.
owner
.
op
.
inputs
,
res
[
1
]))
)
assert
len
(
res2
[
1
])
==
len
(
outputs
)
assert
len
(
res
[
0
])
==
len
(
inputs
)
...
...
@@ -3461,7 +3462,7 @@ class Composite(ScalarOp):
assert
len
(
inputs
)
==
self
.
nin
res
=
theano
.
compile
.
rebuild_collect_shared
(
self
.
outputs
,
replace
=
dict
(
zip
(
self
.
inputs
,
inputs
)),
replace
=
dict
(
i
zip
(
self
.
inputs
,
inputs
)),
rebuild_strict
=
False
)
# After rebuild_collect_shared, the Variable in inputs
# are not necessarily in the graph represented by res.
...
...
@@ -3485,11 +3486,9 @@ class Composite(ScalarOp):
raise
NotImplementedError
(
"grad is not implemented for Composite"
)
def
c_code
(
self
,
node
,
nodename
,
inames
,
onames
,
sub
):
d
=
dict
(
zip
([
"i
%
i"
%
i
for
i
in
xrange
(
len
(
inames
))],
inames
)
+
zip
([
"o
%
i"
%
i
for
i
in
xrange
(
len
(
onames
))],
onames
),
**
sub
)
d
=
dict
(
chain
(
izip
((
"i
%
i"
%
i
for
i
in
xrange
(
len
(
inames
))),
inames
),
izip
((
"o
%
i"
%
i
for
i
in
xrange
(
len
(
onames
))),
onames
)),
**
sub
)
d
[
'nodename'
]
=
nodename
if
not
'id'
in
sub
:
# The use of a dummy id is safe as the code is in a separate block.
...
...
theano/scan_module/scan.py
浏览文件 @
9733b595
...
...
@@ -46,7 +46,7 @@ import logging
import
numpy
import
warnings
from
theano.compat
import
ifilter
from
theano.compat
import
ifilter
,
izip
from
theano.compat.six
import
iteritems
from
theano.compile
import
SharedVariable
,
function
from
theano
import
compile
...
...
@@ -790,8 +790,8 @@ def scan(fn,
# as non sequences at the end of our args
fake_nonseqs
=
[
x
.
type
()
for
x
in
non_seqs
]
fake_outputs
=
scan_utils
.
clone
(
outputs
,
replace
=
OrderedDict
(
zip
(
non_seqs
,
fake_nonseqs
)))
replace
=
OrderedDict
(
i
zip
(
non_seqs
,
fake_nonseqs
)))
all_inputs
=
ifilter
(
lambda
x
:
(
isinstance
(
x
,
gof
.
Variable
)
and
not
isinstance
(
x
,
SharedVariable
)
and
...
...
@@ -915,7 +915,7 @@ def scan(fn,
if
(
not
isinstance
(
arg
,
SharedVariable
)
and
not
isinstance
(
arg
,
tensor
.
Constant
))]
givens
.
update
(
OrderedDict
(
zip
(
other_scan_args
,
other_inner_args
)))
givens
.
update
(
OrderedDict
(
i
zip
(
other_scan_args
,
other_inner_args
)))
if
strict
:
non_seqs_set
=
set
(
non_sequences
if
non_sequences
!=
None
else
[])
...
...
@@ -939,8 +939,8 @@ def scan(fn,
in
dummy_f
.
maker
.
expanded_inputs
if
(
isinstance
(
arg
.
variable
,
SharedVariable
)
and
not
arg
.
update
)]
givens
.
update
(
OrderedDict
(
zip
(
other_shared_scan_args
,
other_shared_inner_args
)))
givens
.
update
(
OrderedDict
(
i
zip
(
other_shared_scan_args
,
other_shared_inner_args
)))
##
# Step 6. Re-order the outputs and clone them replacing things
...
...
theano/tensor/utils.py
浏览文件 @
9733b595
import
numpy
import
theano
from
theano.compat
import
izip
from
theano.gof.cc
import
hash_from_code
...
...
@@ -96,7 +97,7 @@ def shape_of_variables(fgraph, input_shapes):
for
dim
in
input_shapes
[
inp
]]
numeric_output_dims
=
compute_shapes
(
*
numeric_input_dims
)
sym_to_num_dict
=
dict
(
zip
(
output_dims
,
numeric_output_dims
))
sym_to_num_dict
=
dict
(
i
zip
(
output_dims
,
numeric_output_dims
))
l
=
{}
for
var
in
fgraph
.
shape_feature
.
shape_of
:
...
...
theano/tests/test_gradient.py
浏览文件 @
9733b595
...
...
@@ -8,7 +8,7 @@ import numpy as np
import
theano
from
theano
import
gof
from
theano.compat
import
OrderedDict
from
theano.compat
import
OrderedDict
,
izip
from
theano.tests
import
unittest_tools
as
utt
from
theano
import
gradient
...
...
@@ -26,8 +26,8 @@ def grad_sources_inputs(sources, inputs):
"""
if
inputs
is
None
:
inputs
=
theano
.
gof
.
graph
.
inputs
([
source
[
0
]
for
source
in
sources
])
return
dict
(
zip
(
inputs
,
theano
.
gradient
.
grad
(
cost
=
None
,
known_grads
=
dict
(
sources
),
wrt
=
inputs
,
consider_constant
=
inputs
)))
return
dict
(
i
zip
(
inputs
,
theano
.
gradient
.
grad
(
cost
=
None
,
known_grads
=
dict
(
sources
),
wrt
=
inputs
,
consider_constant
=
inputs
)))
class
testgrad_sources_inputs
(
unittest
.
TestCase
):
...
...
@@ -467,7 +467,7 @@ def test_known_grads():
for
layer
in
layers
:
print
(
'Testing by separately computing '
,
layer
)
first
=
theano
.
tensor
.
grad
(
cost
,
layer
,
disconnected_inputs
=
'ignore'
)
known
=
dict
(
zip
(
layer
,
first
))
known
=
dict
(
i
zip
(
layer
,
first
))
full
=
theano
.
tensor
.
grad
(
cost
=
None
,
known_grads
=
known
,
wrt
=
inputs
,
disconnected_inputs
=
'ignore'
)
full
=
theano
.
function
(
inputs
,
full
)
...
...
@@ -599,7 +599,7 @@ def test_subgraph_grad():
wrt
=
params
[
i
],
end
=
grad_ends
[
i
],
start
=
next_grad
,
cost
=
costs
[
i
]
)
next_grad
=
OrderedDict
(
zip
(
grad_ends
[
i
],
next_grad
))
next_grad
=
OrderedDict
(
i
zip
(
grad_ends
[
i
],
next_grad
))
param_grads
.
extend
(
param_grad
)
pgrads
=
theano
.
function
(
inputs
,
param_grads
)
...
...
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论