Skip to content
项目
群组
代码片段
帮助
当前项目
正在载入...
登录 / 注册
切换导航面板
P
pytensor
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
图表
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
日程
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
图像
聊天
创建新问题
作业
提交
问题看板
Open sidebar
testgroup
pytensor
Commits
b7001383
提交
b7001383
authored
6月 26, 2015
作者:
Iulian Vlad Serban
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Further work on issue #3018.
上级
891236a8
隐藏空白字符变更
内嵌
并排
正在显示
5 个修改的文件
包含
92 行增加
和
24 行删除
+92
-24
fg.py
theano/gof/fg.py
+7
-5
link.py
theano/gof/link.py
+8
-6
op.py
theano/gof/op.py
+8
-5
utils.py
theano/gof/utils.py
+1
-1
opt.py
theano/tensor/opt.py
+68
-7
没有找到文件。
theano/gof/fg.py
浏览文件 @
b7001383
...
@@ -333,13 +333,15 @@ class FunctionGraph(utils.object2):
...
@@ -333,13 +333,15 @@ class FunctionGraph(utils.object2):
assert
path
is
not
None
assert
path
is
not
None
tr
=
getattr
(
r
.
tag
,
'trace'
,
None
)
tr
=
getattr
(
r
.
tag
,
'trace'
,
None
)
detailed_err_msg
=
""
detailed_err_msg
=
""
if
tr
:
if
len
(
tr
)
>
0
:
sio
=
StringIO
()
traceback
.
print_list
(
tr
,
sio
)
tr
=
sio
.
getvalue
()
detailed_err_msg
+=
"
\n
Backtrace when the variable is created:
\n
"
detailed_err_msg
+=
"
\n
Backtrace when the variable is created:
\n
"
detailed_err_msg
+=
str
(
tr
)
# Print separate message for each element in
# the list of batcktraces
sio
=
StringIO
()
for
subtr
in
tr
:
traceback
.
print_list
(
subtr
,
sio
)
detailed_err_msg
+=
str
(
sio
.
getvalue
())
raise
MissingInputError
(
raise
MissingInputError
(
'A variable that is an input to the graph was '
'A variable that is an input to the graph was '
'neither provided as an input to the function '
'neither provided as an input to the function '
...
...
theano/gof/link.py
浏览文件 @
b7001383
...
@@ -156,14 +156,16 @@ def raise_with_op(node, thunk=None, exc_info=None, storage_map=None):
...
@@ -156,14 +156,16 @@ def raise_with_op(node, thunk=None, exc_info=None, storage_map=None):
"HINT: Use another linker then the c linker to"
"HINT: Use another linker then the c linker to"
" have the inputs shapes and strides printed."
)
" have the inputs shapes and strides printed."
)
# Print node backtrace
# Print node backtrace
s
tr
=
getattr
(
node
.
outputs
[
0
]
.
tag
,
'trace'
,
None
)
tr
=
getattr
(
node
.
outputs
[
0
]
.
tag
,
'trace'
,
None
)
if
tr
:
if
len
(
tr
)
>
0
:
sio
=
StringIO
()
traceback
.
print_list
(
tr
,
sio
)
tr
=
sio
.
getvalue
()
detailed_err_msg
+=
"
\n
Backtrace when the node is created:
\n
"
detailed_err_msg
+=
"
\n
Backtrace when the node is created:
\n
"
detailed_err_msg
+=
str
(
tr
)
# Print separate message for each element in the list of batcktraces
sio
=
StringIO
()
for
subtr
in
tr
:
traceback
.
print_list
(
subtr
,
sio
)
detailed_err_msg
+=
str
(
sio
.
getvalue
())
else
:
else
:
hints
.
append
(
hints
.
append
(
"HINT: Re-running with most Theano optimization disabled could"
"HINT: Re-running with most Theano optimization disabled could"
...
...
theano/gof/op.py
浏览文件 @
b7001383
...
@@ -459,13 +459,16 @@ class PureOp(object):
...
@@ -459,13 +459,16 @@ class PureOp(object):
"For compute_test_value, one input test value does not"
"For compute_test_value, one input test value does not"
" have the requested type.
\n
"
)
" have the requested type.
\n
"
)
tr
=
getattr
(
v
.
tag
,
'trace'
,
None
)
tr
=
getattr
(
v
.
tag
,
'trace'
,
None
)
if
tr
:
if
len
(
tr
)
>
0
:
sio
=
StringIO
()
traceback
.
print_list
(
tr
,
sio
)
tr
=
sio
.
getvalue
()
detailed_err_msg
+=
(
detailed_err_msg
+=
(
"
\n
Backtrace when that variable is created:
\n
"
)
"
\n
Backtrace when that variable is created:
\n
"
)
detailed_err_msg
+=
str
(
tr
)
# Print separate message for each element in the list
# of batcktraces
sio
=
StringIO
()
for
subtr
in
tr
:
traceback
.
print_list
(
subtr
,
sio
)
detailed_err_msg
+=
str
(
sio
.
getvalue
())
detailed_err_msg
+=
(
detailed_err_msg
+=
(
"
\n
The error when converting the test value to that"
"
\n
The error when converting the test value to that"
" variable type:"
)
" variable type:"
)
...
...
theano/gof/utils.py
浏览文件 @
b7001383
...
@@ -94,7 +94,7 @@ def add_tag_trace(thing, user_line=1):
...
@@ -94,7 +94,7 @@ def add_tag_trace(thing, user_line=1):
# The order is from the oldest to the newest
# The order is from the oldest to the newest
if
len
(
tr
)
>
user_line
:
if
len
(
tr
)
>
user_line
:
tr
=
tr
[
-
user_line
:]
tr
=
tr
[
-
user_line
:]
thing
.
tag
.
trace
=
tr
thing
.
tag
.
trace
=
[
tr
]
return
thing
return
thing
...
...
theano/tensor/opt.py
浏览文件 @
b7001383
...
@@ -62,6 +62,53 @@ theano.configparser.AddConfigVar('on_shape_error',
...
@@ -62,6 +62,53 @@ theano.configparser.AddConfigVar('on_shape_error',
# Utilities
# Utilities
def
copy_stack_trace
(
from_var
,
to_var
):
"""
Copies the stack trace from one or more tensor variables to
one or more tensor variables.
:param from_var: tensor variable or list of tensor variables to
copy stack traces from.
:param to_var: tensor variable or list of tensor variables to
copy stack traces to.
.. note:: The stacktrace is assumed to be of the form of a list of lists
of tuples. Each tuple contains the filename, line number, function name
and so on. Each list of tuples contains the truples belonging to a
particular variable.
"""
# Store stack traces from from_var
tr
=
[]
if
type
(
from_var
)
is
list
:
# If from_var is a list, store concatenated stack traces
if
len
(
from_var
)
>
0
:
for
v
in
from_var
:
if
hasattr
(
v
.
tag
,
'trace'
)
and
len
(
v
.
tag
.
trace
)
>
0
:
tr
=
tr
+
v
.
tag
.
trace
else
:
# If from_var is not a list, it must be a single tensor
# variable, so just store that particular stack trace
if
hasattr
(
from_var
.
tag
,
'trace'
):
tr
=
from_var
.
tag
.
trace
# Copy over stack traces to to_var
if
type
(
to_var
)
is
list
:
# Copy over stack traces from from_var to each variable in
# to_var, including the stack_trace of the to_var before
for
v
in
to_var
:
if
hasattr
(
v
.
tag
,
'trace'
):
v
.
tag
.
trace
=
v
.
tag
.
trace
+
tr
else
:
v
.
tag
.
trace
=
tr
else
:
# Copy over stack traces from from_var to each variable to
# to_var, including the stack_trace of the to_var before
if
hasattr
(
to_var
.
tag
,
'trace'
):
to_var
.
tag
.
trace
=
to_var
.
tag
.
trace
+
tr
else
:
to_var
.
tag
.
trace
=
tr
def
out2in
(
*
local_opts
,
**
kwargs
):
def
out2in
(
*
local_opts
,
**
kwargs
):
"""WRITEME """
"""WRITEME """
...
@@ -480,6 +527,7 @@ def local_dimshuffle_lift(node):
...
@@ -480,6 +527,7 @@ def local_dimshuffle_lift(node):
op
.
new_order
,
op
.
new_order
,
op
.
inplace
)(
inp
)
op
.
inplace
)(
inp
)
new_inputs
.
append
(
apply_local_dimshuffle_lift
(
new_inp
))
new_inputs
.
append
(
apply_local_dimshuffle_lift
(
new_inp
))
copy_stack_trace
(
node
.
outputs
[
0
],
new_inputs
)
ret
=
inode
.
op
(
*
new_inputs
,
**
dict
(
return_list
=
True
))
ret
=
inode
.
op
(
*
new_inputs
,
**
dict
(
return_list
=
True
))
return
ret
return
ret
if
inode
and
isinstance
(
inode
.
op
,
DimShuffle
):
if
inode
and
isinstance
(
inode
.
op
,
DimShuffle
):
...
@@ -487,6 +535,7 @@ def local_dimshuffle_lift(node):
...
@@ -487,6 +535,7 @@ def local_dimshuffle_lift(node):
op
.
new_order
]
op
.
new_order
]
inplace
=
op
.
inplace
and
inode
.
op
.
inplace
inplace
=
op
.
inplace
and
inode
.
op
.
inplace
iinput
=
inode
.
inputs
[
0
]
iinput
=
inode
.
inputs
[
0
]
# remove useless dimshuffle
# remove useless dimshuffle
if
(
new_order
==
list
(
range
(
len
(
new_order
)))
and
if
(
new_order
==
list
(
range
(
len
(
new_order
)))
and
len
(
new_order
)
==
iinput
.
type
.
ndim
):
len
(
new_order
)
==
iinput
.
type
.
ndim
):
...
@@ -494,8 +543,9 @@ def local_dimshuffle_lift(node):
...
@@ -494,8 +543,9 @@ def local_dimshuffle_lift(node):
else
:
else
:
ret
=
op
.
__class__
(
iinput
.
type
.
broadcastable
,
new_order
,
ret
=
op
.
__class__
(
iinput
.
type
.
broadcastable
,
new_order
,
inplace
)(
iinput
)
inplace
)(
iinput
)
return
[
apply_local_dimshuffle_lift
(
ret
)]
ret
=
apply_local_dimshuffle_lift
(
ret
)
copy_stack_trace
(
node
.
outputs
[
0
],
ret
)
return
[
ret
]
@register_canonicalize
@register_canonicalize
@gof.local_optimizer
([
T
.
DimShuffle
])
@gof.local_optimizer
([
T
.
DimShuffle
])
...
@@ -528,7 +578,9 @@ def dimshuffle_as_view(node):
...
@@ -528,7 +578,9 @@ def dimshuffle_as_view(node):
if
not
isinstance
(
op
,
DimShuffle
)
or
op
.
inplace
:
if
not
isinstance
(
op
,
DimShuffle
)
or
op
.
inplace
:
return
False
return
False
new_op
=
op
.
__class__
(
op
.
input_broadcastable
,
op
.
new_order
,
inplace
=
True
)
new_op
=
op
.
__class__
(
op
.
input_broadcastable
,
op
.
new_order
,
inplace
=
True
)
return
[
new_op
(
*
node
.
inputs
)]
v
=
new_op
(
*
node
.
inputs
)
copy_stack_trace
(
node
.
outputs
[
0
],
v
)
return
[
v
]
# Step 60 is the inplace optimization stage.
# Step 60 is the inplace optimization stage.
compile
.
optdb
.
register
(
'dimshuffle_as_view'
,
compile
.
optdb
.
register
(
'dimshuffle_as_view'
,
...
@@ -1290,7 +1342,9 @@ def local_fill_to_alloc(node):
...
@@ -1290,7 +1342,9 @@ def local_fill_to_alloc(node):
rval
=
[
T
.
cast
(
v
,
node
.
outputs
[
0
]
.
type
.
dtype
)]
rval
=
[
T
.
cast
(
v
,
node
.
outputs
[
0
]
.
type
.
dtype
)]
elif
r
.
type
.
broadcastable
==
node
.
outputs
[
0
]
.
type
.
broadcastable
:
elif
r
.
type
.
broadcastable
==
node
.
outputs
[
0
]
.
type
.
broadcastable
:
# we are broadcasting v somehow, but not r
# we are broadcasting v somehow, but not r
rval
=
[
broadcast_like
(
v
,
r
,
node
.
fgraph
,
dtype
=
v
.
dtype
)]
o
=
broadcast_like
(
v
,
r
,
node
.
fgraph
,
dtype
=
v
.
dtype
)
copy_stack_trace
(
node
.
outputs
[
0
],
o
)
rval
=
[
o
]
else
:
else
:
# we are broadcasting both v and r,
# we are broadcasting both v and r,
# the output shape must be computed
# the output shape must be computed
...
@@ -1857,7 +1911,7 @@ theano.configparser.AddConfigVar(
...
@@ -1857,7 +1911,7 @@ theano.configparser.AddConfigVar(
theano
.
configparser
.
BoolParam
(
True
),
theano
.
configparser
.
BoolParam
(
True
),
in_c_key
=
False
)
in_c_key
=
False
)
#######################
#####
#######################
# Constant Canonicalization
# Constant Canonicalization
############################
############################
...
@@ -4974,7 +5028,11 @@ def constant_folding(node):
...
@@ -4974,7 +5028,11 @@ def constant_folding(node):
constant
=
output
.
type
.
Constant
constant
=
output
.
type
.
Constant
except
AttributeError
:
except
AttributeError
:
constant
=
Constant
constant
=
Constant
rval
.
append
(
constant
(
output
.
type
,
storage_map
[
output
][
0
]))
v
=
constant
(
output
.
type
,
storage_map
[
output
][
0
])
copy_stack_trace
(
output
,
v
)
rval
.
append
(
v
)
return
rval
return
rval
...
@@ -5854,7 +5912,10 @@ def local_add_mul_fusion(node):
...
@@ -5854,7 +5912,10 @@ def local_add_mul_fusion(node):
isinstance
(
inp
.
owner
.
op
.
scalar_op
,
s_op
)):
isinstance
(
inp
.
owner
.
op
.
scalar_op
,
s_op
)):
l
=
list
(
node
.
inputs
)
l
=
list
(
node
.
inputs
)
l
.
remove
(
inp
)
l
.
remove
(
inp
)
return
[
node
.
op
(
*
(
l
+
inp
.
owner
.
inputs
))]
output_node
=
node
.
op
(
*
(
l
+
inp
.
owner
.
inputs
))
copy_stack_trace
(
node
.
outputs
[
0
],
output_node
)
return
[
output_node
]
if
config
.
tensor
.
local_elemwise_fusion
:
if
config
.
tensor
.
local_elemwise_fusion
:
_logger
.
debug
(
"enabling optimization fusion elemwise in fast_run"
)
_logger
.
debug
(
"enabling optimization fusion elemwise in fast_run"
)
...
...
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论