Skip to content
项目
群组
代码片段
帮助
当前项目
正在载入...
登录 / 注册
切换导航面板
P
pytensor
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
图表
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
日程
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
图像
聊天
创建新问题
作业
提交
问题看板
Open sidebar
testgroup
pytensor
Commits
d0c93bbe
提交
d0c93bbe
authored
7月 21, 2014
作者:
Hengjean
提交者:
Frederic
10月 21, 2014
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Added flag, defaulted to False.
上级
99157a6d
隐藏空白字符变更
内嵌
并排
正在显示
2 个修改的文件
包含
105 行增加
和
93 行删除
+105
-93
function_module.py
theano/compile/function_module.py
+98
-93
configdefaults.py
theano/configdefaults.py
+7
-0
没有找到文件。
theano/compile/function_module.py
浏览文件 @
d0c93bbe
...
...
@@ -1104,104 +1104,109 @@ class FunctionMaker(object):
graph_db
=
{}
print
'loaded graph_db from
%
s, size=
%
d'
%
(
graph_db_file
,
len
(
graph_db
))
need_optimize
=
True
# the sole purpose of this loop is to set 'need_optimize'
for
i
,
graph_old
in
enumerate
(
graph_db
.
keys
()):
inputs_old
=
graph_old
.
inputs
outputs_old
=
graph_old
.
outputs
size_old
=
len
(
graph_old
.
apply_nodes
)
print
'looping through graph_db
%
d/
%
d'
%
(
i
+
1
,
len
(
graph_db
))
# Some heuristics to check is the same graphs have
# already been optimized before.
if
len
(
inputs_new
)
!=
len
(
inputs_old
):
# If the inputs are of different size,
# two graphs are for sure different
print
'need to optimize, because input size is different'
continue
elif
len
(
outputs_new
)
!=
len
(
outputs_old
):
# If the inputs are of different size,
# two graphs are for sure different
print
'need to optimize, because output size is different'
continue
elif
not
all
(
input_new
.
type
==
input_old
.
type
for
input_new
,
input_old
in
zip
(
inputs_new
,
inputs_old
)):
print
'need to optimize, because inputs are of different types'
continue
elif
not
all
(
output_new
.
type
==
output_old
.
type
for
output_new
,
output_old
in
zip
(
outputs_new
,
outputs_old
)):
print
'need to optimize, because outputs are of different types'
continue
elif
not
size_old
==
size_new
:
print
'need to optimize, because numbers of nodes in graph are different'
continue
if
theano
.
config
.
cache_optimizations
:
need_optimize
=
True
# the sole purpose of this loop is to set 'need_optimize'
for
i
,
graph_old
in
enumerate
(
graph_db
.
keys
()):
inputs_old
=
graph_old
.
inputs
outputs_old
=
graph_old
.
outputs
size_old
=
len
(
graph_old
.
apply_nodes
)
print
'looping through graph_db
%
d/
%
d'
%
(
i
+
1
,
len
(
graph_db
))
# Some heuristics to check is the same graphs have
# already been optimized before.
if
len
(
inputs_new
)
!=
len
(
inputs_old
):
# If the inputs are of different size,
# two graphs are for sure different
print
'need to optimize, because input size is different'
continue
elif
len
(
outputs_new
)
!=
len
(
outputs_old
):
# If the inputs are of different size,
# two graphs are for sure different
print
'need to optimize, because output size is different'
continue
elif
not
all
(
input_new
.
type
==
input_old
.
type
for
input_new
,
input_old
in
zip
(
inputs_new
,
inputs_old
)):
print
'need to optimize, because inputs are of different types'
continue
elif
not
all
(
output_new
.
type
==
output_old
.
type
for
output_new
,
output_old
in
zip
(
outputs_new
,
outputs_old
)):
print
'need to optimize, because outputs are of different types'
continue
elif
not
size_old
==
size_new
:
print
'need to optimize, because numbers of nodes in graph are different'
continue
else
:
flags
=
[]
for
output_new
,
output_old
,
i
in
zip
(
outputs_new
,
outputs_old
,
range
(
len
(
outputs_new
))):
print
'loop through outputs node for both graphs'
f2
=
output_old
.
owner
.
fgraph
.
clone
()
t1
=
output_new
t2
=
f2
.
outputs
[
i
]
def
removeAllFgraph
(
remove
):
if
hasattr
(
remove
,
'fgraph'
):
del
remove
.
fgraph
if
hasattr
(
remove
,
'owner'
):
if
remove
.
owner
==
None
:
pass
else
:
if
hasattr
(
remove
.
owner
,
'fgraph'
):
del
remove
.
owner
.
fgraph
if
hasattr
(
remove
.
owner
,
'inputs'
):
remove
.
owner
.
inputs
=
[
removeAllFgraph
(
i
)
for
i
in
remove
.
owner
.
inputs
]
for
o
in
remove
.
owner
.
outputs
:
if
hasattr
(
o
,
'fgraph'
):
del
o
.
fgraph
return
remove
t2
=
removeAllFgraph
(
t2
)
givens
=
dict
(
zip
(
gof
.
graph
.
inputs
([
t1
]),
gof
.
graph
.
inputs
([
t2
])))
temp
=
dict
(
zip
(
gof
.
graph
.
inputs
([
t1
]),
gof
.
graph
.
inputs
([
t2
])))
for
key
,
value
in
temp
.
iteritems
():
if
key
.
type
!=
value
.
type
:
del
givens
[
key
]
flag
=
is_same_graph
(
t1
,
t2
,
givens
=
givens
)
flags
.
append
(
flag
)
is_same
=
all
(
flags
)
if
is_same
:
# found the match
print
'found #TODO: he match, no need to optimize'
need_optimize
=
False
key
=
graph_old
break
# now optimize or not
if
need_optimize
:
# this is a brand new graph, optimize it, save it to graph_db
print
'optimizing the graph'
before_opt
=
fgraph
.
clone
()
start_optimizer
=
time
.
time
()
optimizer_profile
=
optimizer
(
fgraph
)
end_optimizer
=
time
.
time
()
opt_time
=
end_optimizer
-
start_optimizer
graph_db
.
update
({
before_opt
:
fgraph
})
f
=
open
(
graph_db_file
,
'w+b'
)
cPickle
.
dump
(
graph_db
,
f
,
-
1
)
f
.
close
()
print
'saved into graph_db'
else
:
flags
=
[]
for
output_new
,
output_old
,
i
in
zip
(
outputs_new
,
outputs_old
,
range
(
len
(
outputs_new
))):
print
'loop through outputs node for both graphs'
f2
=
output_old
.
owner
.
fgraph
.
clone
()
t1
=
output_new
t2
=
f2
.
outputs
[
i
]
def
removeAllFgraph
(
remove
):
if
hasattr
(
remove
,
'fgraph'
):
del
remove
.
fgraph
if
hasattr
(
remove
,
'owner'
):
if
remove
.
owner
==
None
:
pass
else
:
if
hasattr
(
remove
.
owner
,
'fgraph'
):
del
remove
.
owner
.
fgraph
if
hasattr
(
remove
.
owner
,
'inputs'
):
remove
.
owner
.
inputs
=
[
removeAllFgraph
(
i
)
for
i
in
remove
.
owner
.
inputs
]
for
o
in
remove
.
owner
.
outputs
:
if
hasattr
(
o
,
'fgraph'
):
del
o
.
fgraph
return
remove
t2
=
removeAllFgraph
(
t2
)
givens
=
dict
(
zip
(
gof
.
graph
.
inputs
([
t1
]),
gof
.
graph
.
inputs
([
t2
])))
temp
=
dict
(
zip
(
gof
.
graph
.
inputs
([
t1
]),
gof
.
graph
.
inputs
([
t2
])))
for
key
,
value
in
temp
.
iteritems
():
if
key
.
type
!=
value
.
type
:
del
givens
[
key
]
flag
=
is_same_graph
(
t1
,
t2
,
givens
=
givens
)
flags
.
append
(
flag
)
is_same
=
all
(
flags
)
if
is_same
:
# found the match
print
'found #TODO: he match, no need to optimize'
need_optimize
=
False
key
=
graph_old
break
# now optimize or not
if
need_optimize
:
# this is a brand new graph, optimize it, save it to graph_db
print
'optimizing the graph'
before_opt
=
fgraph
.
clone
()
print
'no opt, get graph from graph_db'
# just read the optmized graph from graph_db
opt_time
=
0
self
.
fgraph
=
graph_db
[
key
]
fgraph
=
self
.
fgraph
# release stuff
release_lock
()
else
:
start_optimizer
=
time
.
time
()
optimizer_profile
=
optimizer
(
fgraph
)
end_optimizer
=
time
.
time
()
opt_time
=
end_optimizer
-
start_optimizer
graph_db
.
update
({
before_opt
:
fgraph
})
f
=
open
(
graph_db_file
,
'w+b'
)
cPickle
.
dump
(
graph_db
,
f
,
-
1
)
f
.
close
()
print
'saved into graph_db'
else
:
print
'no opt, get graph from graph_db'
# just read the optmized graph from graph_db
opt_time
=
0
self
.
fgraph
=
graph_db
[
key
]
fgraph
=
self
.
fgraph
# release stuff
release_lock
()
print
'opt took
%
s'
%
opt_time
if
profile
:
profile
.
optimizer_time
+=
opt_time
...
...
theano/configdefaults.py
浏览文件 @
d0c93bbe
...
...
@@ -538,3 +538,10 @@ AddConfigVar('check_input',
"(particularly for scalars) and reduce the number of generated C "
"files."
,
BoolParam
(
True
))
AddConfigVar
(
'cache_optimizations'
,
"Specify if the optimization cache should be used. This cache will"
"any optimized graph and its optimization. Actually slow downs a lot"
"the first optimization, and could possibly still contains some bugs."
"Use at your own risks."
,
BoolParam
(
False
))
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论