Skip to content
项目
群组
代码片段
帮助
当前项目
正在载入...
登录 / 注册
切换导航面板
P
pytensor
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
图表
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
日程
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
图像
聊天
创建新问题
作业
提交
问题看板
Open sidebar
testgroup
pytensor
Commits
e6724e8d
提交
e6724e8d
authored
4月 24, 2016
作者:
Pascal Lamblin
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #4383 from saebrahimi/keep_stack_trace_tensor_tests/test_opt
Make stack trace tests in `tensor/tests/test_opt.py` use `check_stack_trace`.
上级
e3569d12
4ace8420
隐藏空白字符变更
内嵌
并排
正在显示
2 个修改的文件
包含
121 行增加
和
109 行删除
+121
-109
opt.py
theano/gof/opt.py
+3
-3
test_opt.py
theano/tensor/tests/test_opt.py
+118
-106
没有找到文件。
theano/gof/opt.py
浏览文件 @
e6724e8d
...
@@ -2667,7 +2667,8 @@ def check_stack_trace(f_or_fgraph, ops_to_check='last', bug_print='raise'):
...
@@ -2667,7 +2667,8 @@ def check_stack_trace(f_or_fgraph, ops_to_check='last', bug_print='raise'):
# if ops_to_check is a string
# if ops_to_check is a string
if
isinstance
(
ops_to_check
,
string_types
):
if
isinstance
(
ops_to_check
,
string_types
):
if
ops_to_check
==
'last'
:
if
ops_to_check
==
'last'
:
apply_nodes_to_check
=
[
fgraph
.
outputs
[
0
]
.
owner
]
apply_nodes_to_check
=
[
fgraph
.
outputs
[
i
]
.
owner
for
i
in
range
(
len
(
fgraph
.
outputs
))]
elif
ops_to_check
==
'all'
:
elif
ops_to_check
==
'all'
:
apply_nodes_to_check
=
fgraph
.
apply_nodes
apply_nodes_to_check
=
fgraph
.
apply_nodes
else
:
else
:
...
@@ -2714,8 +2715,7 @@ def check_stack_trace(f_or_fgraph, ops_to_check='last', bug_print='raise'):
...
@@ -2714,8 +2715,7 @@ def check_stack_trace(f_or_fgraph, ops_to_check='last', bug_print='raise'):
for
node
in
apply_nodes_to_check
:
for
node
in
apply_nodes_to_check
:
for
output
in
node
.
outputs
:
for
output
in
node
.
outputs
:
if
(
not
hasattr
(
output
.
tag
,
'trace'
)
or
if
(
not
hasattr
(
output
.
tag
,
'trace'
)
or
not
output
.
tag
.
trace
):
not
output
.
tag
.
trace
):
return
False
return
False
return
True
return
True
theano/tensor/tests/test_opt.py
浏览文件 @
e6724e8d
...
@@ -61,6 +61,7 @@ from theano.tensor.elemwise import DimShuffle
...
@@ -61,6 +61,7 @@ from theano.tensor.elemwise import DimShuffle
from
theano.tests
import
unittest_tools
as
utt
from
theano.tests
import
unittest_tools
as
utt
from
theano.compile.mode
import
optdb
from
theano.compile.mode
import
optdb
from
theano.compile
import
Mode
from
theano.compile
import
Mode
from
theano.gof.opt
import
check_stack_trace
from
nose.plugins.attrib
import
attr
from
nose.plugins.attrib
import
attr
mode_opt
=
theano
.
config
.
mode
mode_opt
=
theano
.
config
.
mode
...
@@ -110,8 +111,7 @@ class test_dimshuffle_lift(unittest.TestCase):
...
@@ -110,8 +111,7 @@ class test_dimshuffle_lift(unittest.TestCase):
self
.
assertTrue
(
str
(
g
)
==
"[DimShuffle{1,0}(DimShuffle{1,0}(x))]"
)
self
.
assertTrue
(
str
(
g
)
==
"[DimShuffle{1,0}(DimShuffle{1,0}(x))]"
)
dimshuffle_lift
.
optimize
(
g
)
dimshuffle_lift
.
optimize
(
g
)
self
.
assertTrue
(
str
(
g
)
==
"[x]"
)
self
.
assertTrue
(
str
(
g
)
==
"[x]"
)
# Check stacktrace was copied over correctly after opt was applied
# no need to check_stack_trace as graph is supposed to be empty
self
.
assertTrue
(
hasattr
(
g
.
outputs
[
0
]
.
tag
,
'trace'
))
def
test_merge2
(
self
):
def
test_merge2
(
self
):
x
,
y
,
z
=
inputs
()
x
,
y
,
z
=
inputs
()
...
@@ -123,7 +123,7 @@ class test_dimshuffle_lift(unittest.TestCase):
...
@@ -123,7 +123,7 @@ class test_dimshuffle_lift(unittest.TestCase):
dimshuffle_lift
.
optimize
(
g
)
dimshuffle_lift
.
optimize
(
g
)
self
.
assertTrue
(
str
(
g
)
==
"[DimShuffle{0,1,x,x}(x)]"
,
str
(
g
))
self
.
assertTrue
(
str
(
g
)
==
"[DimShuffle{0,1,x,x}(x)]"
,
str
(
g
))
# Check stacktrace was copied over correctly after opt was applied
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
g
.
outputs
[
0
]
.
tag
,
'trace
'
))
self
.
assertTrue
(
check_stack_trace
(
g
,
ops_to_check
=
'all
'
))
def
test_elim3
(
self
):
def
test_elim3
(
self
):
x
,
y
,
z
=
inputs
()
x
,
y
,
z
=
inputs
()
...
@@ -135,8 +135,7 @@ class test_dimshuffle_lift(unittest.TestCase):
...
@@ -135,8 +135,7 @@ class test_dimshuffle_lift(unittest.TestCase):
str
(
g
))
str
(
g
))
dimshuffle_lift
.
optimize
(
g
)
dimshuffle_lift
.
optimize
(
g
)
self
.
assertTrue
(
str
(
g
)
==
"[x]"
,
str
(
g
))
self
.
assertTrue
(
str
(
g
)
==
"[x]"
,
str
(
g
))
# Check stacktrace was copied over correctly after opt was applied
# no need to check_stack_trace as graph is supposed to be empty
self
.
assertTrue
(
hasattr
(
g
.
outputs
[
0
]
.
tag
,
'trace'
))
def
test_lift
(
self
):
def
test_lift
(
self
):
x
,
y
,
z
=
inputs
([
False
]
*
1
,
[
False
]
*
2
,
[
False
]
*
3
)
x
,
y
,
z
=
inputs
([
False
]
*
1
,
[
False
]
*
2
,
[
False
]
*
3
)
...
@@ -164,7 +163,7 @@ class test_dimshuffle_lift(unittest.TestCase):
...
@@ -164,7 +163,7 @@ class test_dimshuffle_lift(unittest.TestCase):
self
.
assertTrue
(
str
(
g
)
in
(
opt_str_g_inplace
,
opt_str_g_noinplace
),
self
.
assertTrue
(
str
(
g
)
in
(
opt_str_g_inplace
,
opt_str_g_noinplace
),
str
(
g
))
str
(
g
))
# Check stacktrace was copied over correctly after opt was applied
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
g
.
outputs
[
0
]
.
tag
,
'trace
'
))
self
.
assertTrue
(
check_stack_trace
(
g
,
ops_to_check
=
'all
'
))
def
test_recursive_lift
(
self
):
def
test_recursive_lift
(
self
):
v
=
T
.
vector
(
dtype
=
"float64"
)
v
=
T
.
vector
(
dtype
=
"float64"
)
...
@@ -191,7 +190,7 @@ class test_dimshuffle_lift(unittest.TestCase):
...
@@ -191,7 +190,7 @@ class test_dimshuffle_lift(unittest.TestCase):
self
.
assertTrue
(
str
(
new_g
)
==
opt_str_g
)
self
.
assertTrue
(
str
(
new_g
)
==
opt_str_g
)
# Check stacktrace was copied over correctly after opt was applied
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
new_g
.
outputs
[
0
]
.
tag
,
'trace
'
))
self
.
assertTrue
(
check_stack_trace
(
new_g
,
ops_to_check
=
'all
'
))
def
test_useless_dimshuffle
(
self
):
def
test_useless_dimshuffle
(
self
):
x
,
_
,
_
=
inputs
()
x
,
_
,
_
=
inputs
()
...
@@ -1649,8 +1648,8 @@ def test_local_useless_slice():
...
@@ -1649,8 +1648,8 @@ def test_local_useless_slice():
# Now test that the stack trace is copied over properly,
# Now test that the stack trace is copied over properly,
# before before and after optimization.
# before before and after optimization.
assert
hasattr
(
f_unopt
.
outputs
[
0
]
.
variable
.
tag
,
'trace
'
)
assert
check_stack_trace
(
f_unopt
,
ops_to_check
=
'all
'
)
assert
hasattr
(
f_opt
.
outputs
[
0
]
.
variable
.
tag
,
'trace
'
)
assert
check_stack_trace
(
f_opt
,
ops_to_check
=
'all
'
)
# test a 4d tensor
# test a 4d tensor
z
=
tensor
.
tensor4
(
'z'
)
z
=
tensor
.
tensor4
(
'z'
)
...
@@ -1670,8 +1669,8 @@ def test_local_useless_slice():
...
@@ -1670,8 +1669,8 @@ def test_local_useless_slice():
# Finally, test that the stack trace is copied over properly,
# Finally, test that the stack trace is copied over properly,
# before before and after optimization.
# before before and after optimization.
assert
hasattr
(
f_opt_check
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
)
assert
check_stack_trace
(
f_opt_check
,
ops_to_check
=
Subtensor
)
assert
hasattr
(
f_opt_check_apply
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
)
assert
check_stack_trace
(
f_opt_check_apply
,
ops_to_check
=
Subtensor
)
def
test_local_useless_inc_subtensor
():
def
test_local_useless_inc_subtensor
():
x
=
tensor
.
matrix
(
'x'
)
x
=
tensor
.
matrix
(
'x'
)
...
@@ -1890,31 +1889,25 @@ class test_local_subtensor_make_vector(unittest.TestCase):
...
@@ -1890,31 +1889,25 @@ class test_local_subtensor_make_vector(unittest.TestCase):
# which requires us to add the 'canonicalize' phase.
# which requires us to add the 'canonicalize' phase.
mode
=
theano
.
compile
.
mode
.
Mode
(
optimizer
=
None
)
.
including
(
'canonicalize_db'
)
.
including
(
"local_subtensor_make_vector"
)
mode
=
theano
.
compile
.
mode
.
Mode
(
optimizer
=
None
)
.
including
(
'canonicalize_db'
)
.
including
(
"local_subtensor_make_vector"
)
f
=
function
([
x
,
y
,
z
],
v
[
0
],
mode
=
mode
)
f
=
function
([
x
,
y
,
z
],
v
[
0
],
mode
=
mode
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
))
#import ipdb; ipdb.set_trace()
# Compile function using all optimizations in fast_compile mode,
# Compile function using all optimizations in fast_compile mode,
# including the 'local_subtensor_make_vector' optimization
# including the 'local_subtensor_make_vector' optimization
mode
=
theano
.
compile
.
mode
.
get_mode
(
'FAST_COMPILE'
)
.
including
(
"local_subtensor_make_vector"
)
mode
=
theano
.
compile
.
mode
.
get_mode
(
'FAST_COMPILE'
)
.
including
(
"local_subtensor_make_vector"
)
f
=
function
([
x
,
y
,
z
],
v
[
0
],
mode
=
mode
)
f
=
function
([
x
,
y
,
z
],
v
[
0
],
mode
=
mode
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
))
# The two cases in this test do not check the case where
# local_subtensor_make_vector inserts a Subtensor node (See issue #4421)
# self.assertTrue(check_stack_trace(f, ops_to_check='all'))
class
test_local_subtensor_lift
(
unittest
.
TestCase
):
class
test_local_subtensor_lift
(
unittest
.
TestCase
):
def
_verify_stack_trace
(
self
,
f
):
for
output
in
f
.
outputs
:
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
output
.
variable
.
tag
,
'trace'
))
def
test0
(
self
):
def
test0
(
self
):
# basic test that the Op works
# basic test that the Op works
x
=
tensor
.
matrix
(
'x'
)
x
=
tensor
.
matrix
(
'x'
)
f
=
function
([
x
],
tensor
.
exp
(
x
)[
0
],
mode
=
mode_opt
)
f
=
function
([
x
],
tensor
.
exp
(
x
)[
0
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
'all'
))
prog
=
f
.
maker
.
fgraph
.
toposort
()
prog
=
f
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
# first subtensor
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
# first subtensor
...
@@ -1928,7 +1921,9 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -1928,7 +1921,9 @@ class test_local_subtensor_lift(unittest.TestCase):
x
=
tensor
.
matrix
(
'x'
)
x
=
tensor
.
matrix
(
'x'
)
f
=
function
([
x
],
[
tensor
.
exp
(
x
)[
0
],
tensor
.
exp
(
x
)],
mode
=
mode_opt
)
f
=
function
([
x
],
[
tensor
.
exp
(
x
)[
0
],
tensor
.
exp
(
x
)],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
[
Subtensor
,
tensor
.
Elemwise
]))
prog
=
f
.
maker
.
fgraph
.
toposort
()
prog
=
f
.
maker
.
fgraph
.
toposort
()
assert
prog
[
0
]
.
op
==
tensor
.
exp
assert
prog
[
0
]
.
op
==
tensor
.
exp
...
@@ -1944,7 +1939,9 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -1944,7 +1939,9 @@ class test_local_subtensor_lift(unittest.TestCase):
z
=
tensor
.
matrix
(
'z'
)
z
=
tensor
.
matrix
(
'z'
)
f
=
function
([
x
,
y
,
z
],
tensor
.
exp
(
x
+
y
+
z
)[
0
],
mode
=
mode_opt
)
f
=
function
([
x
,
y
,
z
],
tensor
.
exp
(
x
+
y
+
z
)[
0
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
[
Subtensor
,
tensor
.
DimShuffle
]))
prog
=
f
.
maker
.
fgraph
.
toposort
()
prog
=
f
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
...
@@ -1963,7 +1960,9 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -1963,7 +1960,9 @@ class test_local_subtensor_lift(unittest.TestCase):
z
=
tensor
.
matrix
(
'z'
)
z
=
tensor
.
matrix
(
'z'
)
f
=
function
([
x
,
y
,
z
],
tensor
.
exp
(
x
+
y
+
z
)[
0
:
2
],
mode
=
mode_opt
)
f
=
function
([
x
,
y
,
z
],
tensor
.
exp
(
x
+
y
+
z
)[
0
:
2
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
[
Subtensor
,
tensor
.
DimShuffle
]))
prog
=
f
.
maker
.
fgraph
.
toposort
()
prog
=
f
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
...
@@ -1981,7 +1980,8 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -1981,7 +1980,8 @@ class test_local_subtensor_lift(unittest.TestCase):
y
=
tensor
.
vector
(
'y'
)
y
=
tensor
.
vector
(
'y'
)
f
=
function
([
y
],
tensor
.
exp
(
y
.
dimshuffle
(
0
,
'x'
))[
0
],
mode
=
mode_opt
)
f
=
function
([
y
],
tensor
.
exp
(
y
.
dimshuffle
(
0
,
'x'
))[
0
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
'all'
))
prog
=
f
.
maker
.
fgraph
.
toposort
()
prog
=
f
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
...
@@ -1998,7 +1998,8 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -1998,7 +1998,8 @@ class test_local_subtensor_lift(unittest.TestCase):
y
=
tensor
.
vector
(
'y'
)
y
=
tensor
.
vector
(
'y'
)
f
=
function
([
x
,
y
],
tensor
.
exp
(
x
+
y
)[
0
],
mode
=
mode_opt
)
f
=
function
([
x
,
y
],
tensor
.
exp
(
x
+
y
)[
0
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Opt doesn't apply, so no need for check_stack_trace
# self.assertTrue(check_stack_trace(f, ops_to_check='all'))
prog
=
f
.
maker
.
fgraph
.
toposort
()
prog
=
f
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
...
@@ -2016,7 +2017,8 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -2016,7 +2017,8 @@ class test_local_subtensor_lift(unittest.TestCase):
f
=
function
([
x
,
y
],
[
tensor
.
exp
(
x
+
y
)[
0
],
tensor
.
exp
(
x
+
y
)
+
x
],
f
=
function
([
x
,
y
],
[
tensor
.
exp
(
x
+
y
)[
0
],
tensor
.
exp
(
x
+
y
)
+
x
],
mode
=
mode_opt
)
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Opt doesn't apply, so no need for check_stack_trace
# self.assertTrue(check_stack_trace(f, ops_to_check=Subtensor))
prog
=
f
.
maker
.
fgraph
.
toposort
()
prog
=
f
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
DimShuffle
)
...
@@ -2036,7 +2038,8 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -2036,7 +2038,8 @@ class test_local_subtensor_lift(unittest.TestCase):
y
=
tensor
.
scalar
(
'y'
)
y
=
tensor
.
scalar
(
'y'
)
f
=
function
([
x
,
y
],
tensor
.
exp
(
x
+
y
)[
0
],
mode
=
mode_opt
)
f
=
function
([
x
,
y
],
tensor
.
exp
(
x
+
y
)[
0
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
prog
=
f
.
maker
.
fgraph
.
toposort
()
prog
=
f
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
...
@@ -2046,7 +2049,7 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -2046,7 +2049,7 @@ class test_local_subtensor_lift(unittest.TestCase):
f
([
1
,
2
,
3
],
4
)
# let debugmode test something
f
([
1
,
2
,
3
],
4
)
# let debugmode test something
def
test7
(
self
):
def
test7
(
self
):
#
t
est that Subtensor(Rebroadcast(x)) gets optimized into
#
T
est that Subtensor(Rebroadcast(x)) gets optimized into
# Rebroadcast(Subtensor(x)).
# Rebroadcast(Subtensor(x)).
# test basic case
# test basic case
...
@@ -2057,7 +2060,9 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -2057,7 +2060,9 @@ class test_local_subtensor_lift(unittest.TestCase):
assert
newx
.
broadcastable
==
(
True
,
False
)
assert
newx
.
broadcastable
==
(
True
,
False
)
f1
=
function
([
x
],
newx
[:
2
,
:
5
],
mode
=
mode_opt
)
f1
=
function
([
x
],
newx
[:
2
,
:
5
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f1
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f1
,
ops_to_check
=
[
Subtensor
,
tensor
.
Rebroadcast
]))
prog
=
f1
.
maker
.
fgraph
.
toposort
()
prog
=
f1
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
assert
isinstance
(
prog
[
1
]
.
op
,
tensor
.
Rebroadcast
)
assert
isinstance
(
prog
[
1
]
.
op
,
tensor
.
Rebroadcast
)
...
@@ -2071,7 +2076,9 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -2071,7 +2076,9 @@ class test_local_subtensor_lift(unittest.TestCase):
assert
newy
.
broadcastable
==
(
True
,
False
,
True
,
False
)
assert
newy
.
broadcastable
==
(
True
,
False
,
True
,
False
)
f2
=
function
([
y
],
newy
[:,
3
,
0
,
:],
mode
=
mode_opt
)
f2
=
function
([
y
],
newy
[:,
3
,
0
,
:],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f2
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f2
,
ops_to_check
=
[
Subtensor
,
tensor
.
Rebroadcast
]))
prog
=
f2
.
maker
.
fgraph
.
toposort
()
prog
=
f2
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
assert
isinstance
(
prog
[
1
]
.
op
,
tensor
.
Rebroadcast
)
assert
isinstance
(
prog
[
1
]
.
op
,
tensor
.
Rebroadcast
)
...
@@ -2079,7 +2086,9 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -2079,7 +2086,9 @@ class test_local_subtensor_lift(unittest.TestCase):
# corner case 2: subtensor idx_list is shorter than resulting broadcast pattern
# corner case 2: subtensor idx_list is shorter than resulting broadcast pattern
f3
=
function
([
y
],
newy
[:,
3
,
0
],
mode
=
mode_opt
)
f3
=
function
([
y
],
newy
[:,
3
,
0
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f3
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f3
,
ops_to_check
=
[
Subtensor
,
tensor
.
Rebroadcast
]))
prog
=
f3
.
maker
.
fgraph
.
toposort
()
prog
=
f3
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
assert
isinstance
(
prog
[
1
]
.
op
,
tensor
.
Rebroadcast
)
assert
isinstance
(
prog
[
1
]
.
op
,
tensor
.
Rebroadcast
)
...
@@ -2094,7 +2103,9 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -2094,7 +2103,9 @@ class test_local_subtensor_lift(unittest.TestCase):
out
=
newz
[:,
3
,
0
]
out
=
newz
[:,
3
,
0
]
f4
=
function
([
z
],
newz
[:,
3
,
0
],
mode
=
mode_opt
)
f4
=
function
([
z
],
newz
[:,
3
,
0
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f4
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f4
,
ops_to_check
=
[
Subtensor
,
tensor
.
Rebroadcast
]))
prog
=
f4
.
maker
.
fgraph
.
toposort
()
prog
=
f4
.
maker
.
fgraph
.
toposort
()
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
assert
isinstance
(
prog
[
0
]
.
op
,
tensor
.
Subtensor
)
assert
isinstance
(
prog
[
1
]
.
op
,
tensor
.
Rebroadcast
)
assert
isinstance
(
prog
[
1
]
.
op
,
tensor
.
Rebroadcast
)
...
@@ -2102,11 +2113,6 @@ class test_local_subtensor_lift(unittest.TestCase):
...
@@ -2102,11 +2113,6 @@ class test_local_subtensor_lift(unittest.TestCase):
class
test_local_subtensor_merge
(
unittest
.
TestCase
):
class
test_local_subtensor_merge
(
unittest
.
TestCase
):
def
_verify_stack_trace
(
self
,
f
):
for
output
in
f
.
outputs
:
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
output
.
variable
.
tag
,
'trace'
))
def
setUp
(
self
):
def
setUp
(
self
):
utt
.
seed_rng
()
utt
.
seed_rng
()
self
.
x_shapes
=
[(
2
,
2
),
(
5
,
3
),
(
4
,
1
),
(
1
,
2
),
self
.
x_shapes
=
[(
2
,
2
),
(
5
,
3
),
(
4
,
1
),
(
1
,
2
),
...
@@ -2121,7 +2127,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2121,7 +2127,8 @@ class test_local_subtensor_merge(unittest.TestCase):
g
=
function
([
x
],
x
[
idx
::][
-
1
],
mode
=
mode_opt
.
excluding
(
g
=
function
([
x
],
x
[
idx
::][
-
1
],
mode
=
mode_opt
.
excluding
(
'local_subtensor_merge'
))
'local_subtensor_merge'
))
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
topo
=
f
.
maker
.
fgraph
.
toposort
()
topo
=
f
.
maker
.
fgraph
.
toposort
()
assert
len
([
t
for
t
in
topo
assert
len
([
t
for
t
in
topo
...
@@ -2149,7 +2156,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2149,7 +2156,8 @@ class test_local_subtensor_merge(unittest.TestCase):
mode
=
mode_opt
.
excluding
(
'local_subtensor_merge'
))
mode
=
mode_opt
.
excluding
(
'local_subtensor_merge'
))
#theano.printing.debugprint(f, print_type=True)
#theano.printing.debugprint(f, print_type=True)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
topo
=
f
.
maker
.
fgraph
.
toposort
()
topo
=
f
.
maker
.
fgraph
.
toposort
()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
...
@@ -2178,7 +2186,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2178,7 +2186,8 @@ class test_local_subtensor_merge(unittest.TestCase):
g
=
function
([
x
],
x
[::
-
1
][
idx
],
g
=
function
([
x
],
x
[::
-
1
][
idx
],
mode
=
mode_opt
.
excluding
(
'local_subtensor_merge'
))
mode
=
mode_opt
.
excluding
(
'local_subtensor_merge'
))
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
#theano.printing.debugprint(f, print_type=True)
#theano.printing.debugprint(f, print_type=True)
topo
=
f
.
maker
.
fgraph
.
toposort
()
topo
=
f
.
maker
.
fgraph
.
toposort
()
...
@@ -2208,7 +2217,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2208,7 +2217,8 @@ class test_local_subtensor_merge(unittest.TestCase):
mode
=
mode_opt
.
excluding
(
'local_subtensor_merge'
))
mode
=
mode_opt
.
excluding
(
'local_subtensor_merge'
))
#theano.printing.debugprint(f, print_type=True)
#theano.printing.debugprint(f, print_type=True)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
topo
=
f
.
maker
.
fgraph
.
toposort
()
topo
=
f
.
maker
.
fgraph
.
toposort
()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
...
@@ -2232,7 +2242,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2232,7 +2242,8 @@ class test_local_subtensor_merge(unittest.TestCase):
for
idx
in
xrange
(
-
9
,
8
):
for
idx
in
xrange
(
-
9
,
8
):
f
=
function
([
x
],
x
[::
-
1
][:
idx
],
mode
=
mode_opt
)
f
=
function
([
x
],
x
[::
-
1
][:
idx
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
#theano.printing.debugprint(f, print_type=True)
#theano.printing.debugprint(f, print_type=True)
topo
=
f
.
maker
.
fgraph
.
toposort
()
topo
=
f
.
maker
.
fgraph
.
toposort
()
...
@@ -2252,7 +2263,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2252,7 +2263,8 @@ class test_local_subtensor_merge(unittest.TestCase):
y
=
tensor
.
iscalar
(
'y'
)
y
=
tensor
.
iscalar
(
'y'
)
f
=
function
([
x
,
y
],
x
[::
-
1
][:
y
],
mode
=
mode_opt
)
f
=
function
([
x
,
y
],
x
[::
-
1
][:
y
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
#theano.printing.debugprint(f, print_type=True)
#theano.printing.debugprint(f, print_type=True)
...
@@ -2275,7 +2287,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2275,7 +2287,8 @@ class test_local_subtensor_merge(unittest.TestCase):
for
idx2
in
xrange
(
-
7
,
7
):
for
idx2
in
xrange
(
-
7
,
7
):
f
=
function
([
x
],
x
[
idx1
:][:
idx2
],
mode
=
mode_opt
)
f
=
function
([
x
],
x
[
idx1
:][:
idx2
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
'all'
))
#theano.printing.debugprint(f, print_type=True)
#theano.printing.debugprint(f, print_type=True)
topo
=
f
.
maker
.
fgraph
.
toposort
()
topo
=
f
.
maker
.
fgraph
.
toposort
()
...
@@ -2296,7 +2309,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2296,7 +2309,8 @@ class test_local_subtensor_merge(unittest.TestCase):
z
=
tensor
.
iscalar
(
'y'
)
z
=
tensor
.
iscalar
(
'y'
)
f
=
function
([
x
,
y
,
z
],
x
[
y
:][:
z
],
mode
=
mode_opt
)
f
=
function
([
x
,
y
,
z
],
x
[
y
:][:
z
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
#theano.printing.debugprint(f, print_type=True)
#theano.printing.debugprint(f, print_type=True)
...
@@ -2327,7 +2341,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2327,7 +2341,8 @@ class test_local_subtensor_merge(unittest.TestCase):
z
=
x
[
slice
(
*
sl1
)][
slice
(
*
sl2
)]
z
=
x
[
slice
(
*
sl1
)][
slice
(
*
sl2
)]
f
=
function
([
x
],
z
,
mode
=
mode_opt
)
f
=
function
([
x
],
z
,
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
x_val
=
self
.
rng
.
uniform
(
size
=
shape
)
.
astype
(
config
.
floatX
)
x_val
=
self
.
rng
.
uniform
(
size
=
shape
)
.
astype
(
config
.
floatX
)
...
@@ -2346,7 +2361,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2346,7 +2361,8 @@ class test_local_subtensor_merge(unittest.TestCase):
f
=
function
([
x
,
b1
,
e1
,
s1
,
b2
,
e2
,
s2
],
x
[
b1
:
e1
:
s1
][
b2
:
e2
:
s2
],
f
=
function
([
x
,
b1
,
e1
,
s1
,
b2
,
e2
,
s2
],
x
[
b1
:
e1
:
s1
][
b2
:
e2
:
s2
],
mode
=
mode_opt
)
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
#theano.printing.debugprint(f, print_type=True)
#theano.printing.debugprint(f, print_type=True)
...
@@ -2433,7 +2449,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2433,7 +2449,8 @@ class test_local_subtensor_merge(unittest.TestCase):
i
=
tensor
.
iscalar
(
'i'
)
i
=
tensor
.
iscalar
(
'i'
)
f
=
function
([
x
,
b
,
e
,
s
,
i
],
x
[
b
:
e
:
s
][
i
],
mode
=
mode_opt
)
f
=
function
([
x
,
b
,
e
,
s
,
i
],
x
[
b
:
e
:
s
][
i
],
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
#theano.printing.debugprint(f, print_type=True)
#theano.printing.debugprint(f, print_type=True)
...
@@ -2526,8 +2543,11 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2526,8 +2543,11 @@ class test_local_subtensor_merge(unittest.TestCase):
sub_x
=
x
[
slice1
][
slice2
]
sub_x
=
x
[
slice1
][
slice2
]
f
=
theano
.
function
([
x
]
+
input_vars
,
sub_x
,
mode
=
mode_opt
)
f
=
theano
.
function
([
x
]
+
input_vars
,
sub_x
,
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
# for some cases, the optimization may remove all Subtensors,
# which is why we pass "bug_print='ignore'".
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
,
bug_print
=
'ignore'
))
topo
=
f
.
maker
.
fgraph
.
toposort
()
topo
=
f
.
maker
.
fgraph
.
toposort
()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
...
@@ -2586,7 +2606,8 @@ class test_local_subtensor_merge(unittest.TestCase):
...
@@ -2586,7 +2606,8 @@ class test_local_subtensor_merge(unittest.TestCase):
sub_x
=
x
[
symbol_slice
][
i
]
sub_x
=
x
[
symbol_slice
][
i
]
f
=
theano
.
function
([
x
]
+
input_vars
,
sub_x
,
mode
=
mode_opt
)
f
=
theano
.
function
([
x
]
+
input_vars
,
sub_x
,
mode
=
mode_opt
)
self
.
_verify_stack_trace
(
f
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
f
,
ops_to_check
=
Subtensor
))
topo
=
f
.
maker
.
fgraph
.
toposort
()
topo
=
f
.
maker
.
fgraph
.
toposort
()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
...
@@ -2717,15 +2738,16 @@ class test_local_adv_sub1_adv_inc_sub1(unittest.TestCase):
...
@@ -2717,15 +2738,16 @@ class test_local_adv_sub1_adv_inc_sub1(unittest.TestCase):
# which requires us to add the 'canonicalize' phase.
# which requires us to add the 'canonicalize' phase.
mode
=
theano
.
compile
.
mode
.
Mode
(
optimizer
=
None
)
.
including
(
'canonicalize'
)
.
including
(
"local_adv_sub1_adv_inc_sub1"
)
mode
=
theano
.
compile
.
mode
.
Mode
(
optimizer
=
None
)
.
including
(
'canonicalize'
)
.
including
(
"local_adv_sub1_adv_inc_sub1"
)
f
=
theano
.
function
([
x
,
y
,
idx
],
o
,
self
.
mode
)
f
=
theano
.
function
([
x
,
y
,
idx
],
o
,
self
.
mode
)
# Check stacktrace was copied over correctly after opt was applied
# The opt only removes nodes in this case, no check_stack_trace needed
self
.
assertTrue
(
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
))
# Compile function using all optimizations in fast_compile mode,
# Compile function using all optimizations in fast_compile mode,
# including the 'local_subtensor_make_vector' optimization
# including the 'local_subtensor_make_vector' optimization
mode
=
theano
.
compile
.
mode
.
get_mode
(
'FAST_COMPILE'
)
.
including
(
"local_adv_sub1_adv_inc_sub1"
)
mode
=
theano
.
compile
.
mode
.
get_mode
(
'FAST_COMPILE'
)
.
including
(
"local_adv_sub1_adv_inc_sub1"
)
f
=
theano
.
function
([
x
,
y
,
idx
],
o
,
self
.
mode
)
f
=
theano
.
function
([
x
,
y
,
idx
],
o
,
self
.
mode
)
# Check stacktrace was copied over correctly after opt was applied
# The opt only removes nodes in this case, no check_stack_trace needed
self
.
assertTrue
(
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
))
# See if there are use cases which add nodes and need check_stack_trace
# See issue #4421
class
Test_alloc_zero
(
unittest
.
TestCase
):
class
Test_alloc_zero
(
unittest
.
TestCase
):
...
@@ -2930,7 +2952,9 @@ def test_local_IncSubtensor_serialize():
...
@@ -2930,7 +2952,9 @@ def test_local_IncSubtensor_serialize():
# Now test that the stack trace is copied over properly,
# Now test that the stack trace is copied over properly,
# if we return the gradients. We need to use same mode as before.
# if we return the gradients. We need to use same mode as before.
f
=
theano
.
function
([
i
,
j
,
t
],
dW
,
mode
=
mode
)
f
=
theano
.
function
([
i
,
j
,
t
],
dW
,
mode
=
mode
)
assert
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
)
assert
check_stack_trace
(
f
,
ops_to_check
=
[
tensor
.
IncSubtensor
,
tensor
.
AdvancedIncSubtensor
,
tensor
.
AdvancedIncSubtensor1
])
def
test_local_set_to_inc_subtensor
():
def
test_local_set_to_inc_subtensor
():
v
=
theano
.
tensor
.
fmatrix
()
v
=
theano
.
tensor
.
fmatrix
()
...
@@ -2961,9 +2985,9 @@ def test_local_set_to_inc_subtensor():
...
@@ -2961,9 +2985,9 @@ def test_local_set_to_inc_subtensor():
utt
.
assert_allclose
(
r1
,
r2
)
utt
.
assert_allclose
(
r1
,
r2
)
# Finally, test that the stack trace is copied over properly,
# Finally, test that the stack trace is copied over properly,
# before
before
and after optimization.
# before and after optimization.
assert
hasattr
(
f1
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
)
assert
check_stack_trace
(
f1
,
ops_to_check
=
tensor
.
AdvancedIncSubtensor1
)
assert
hasattr
(
f2
.
outputs
[
0
]
.
variable
.
tag
,
'trace
'
)
assert
check_stack_trace
(
f2
,
ops_to_check
=
'all
'
)
def
test_local_subtensor_of_dot
():
def
test_local_subtensor_of_dot
():
...
@@ -2998,14 +3022,14 @@ def test_local_subtensor_of_dot():
...
@@ -2998,14 +3022,14 @@ def test_local_subtensor_of_dot():
f
=
theano
.
function
([
m1
,
m2
,
idx
],
theano
.
dot
(
m1
,
m2
)[
idx
,
1
:
4
,
:,
idx
:],
mode
=
mode
)
f
=
theano
.
function
([
m1
,
m2
,
idx
],
theano
.
dot
(
m1
,
m2
)[
idx
,
1
:
4
,
:,
idx
:],
mode
=
mode
)
assert
test_equality
(
f
(
d1
,
d2
,
1
),
numpy
.
dot
(
d1
,
d2
)[
1
,
1
:
4
,
:,
1
:])
assert
test_equality
(
f
(
d1
,
d2
,
1
),
numpy
.
dot
(
d1
,
d2
)[
1
,
1
:
4
,
:,
1
:])
# if we return the gradients. We need to use same mode as before.
# if we return the gradients. We need to use same mode as before.
assert
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace
'
)
assert
check_stack_trace
(
f
,
ops_to_check
=
'last
'
)
f
=
theano
.
function
([
m1
,
m2
,
idx
],
theano
.
dot
(
m1
,
m2
)[
1
:
4
,
:,
idx
:,
idx
],
mode
=
mode
)
f
=
theano
.
function
([
m1
,
m2
,
idx
],
theano
.
dot
(
m1
,
m2
)[
1
:
4
,
:,
idx
:,
idx
],
mode
=
mode
)
assert
test_equality
(
f
(
d1
,
d2
,
1
),
numpy
.
dot
(
d1
,
d2
)[
1
:
4
,
:,
1
:,
1
])
assert
test_equality
(
f
(
d1
,
d2
,
1
),
numpy
.
dot
(
d1
,
d2
)[
1
:
4
,
:,
1
:,
1
])
# Now test that the stack trace is copied over properly,
# Now test that the stack trace is copied over properly,
# if we return the gradients. We need to use same mode as before.
# if we return the gradients. We need to use same mode as before.
assert
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace
'
)
assert
check_stack_trace
(
f
,
ops_to_check
=
'last
'
)
class
Test_local_elemwise_alloc
(
unittest
.
TestCase
):
class
Test_local_elemwise_alloc
(
unittest
.
TestCase
):
...
@@ -3052,11 +3076,6 @@ class Test_local_elemwise_alloc(unittest.TestCase):
...
@@ -3052,11 +3076,6 @@ class Test_local_elemwise_alloc(unittest.TestCase):
if
elem
.
op
is
not
None
])
==
count
if
elem
.
op
is
not
None
])
==
count
)
)
def
_verify_stack_trace
(
self
,
f
):
for
output
in
f
.
outputs
:
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
output
.
variable
.
tag
,
'trace'
))
def
test_remove_alloc_wo_dimshuffle
(
self
):
def
test_remove_alloc_wo_dimshuffle
(
self
):
# No optimization on alloc
# No optimization on alloc
func
=
function
(
func
=
function
(
...
@@ -3066,7 +3085,8 @@ class Test_local_elemwise_alloc(unittest.TestCase):
...
@@ -3066,7 +3085,8 @@ class Test_local_elemwise_alloc(unittest.TestCase):
)
)
self
.
_verify_alloc_count
(
func
,
1
)
self
.
_verify_alloc_count
(
func
,
1
)
self
.
_verify_assert_count
(
func
,
0
)
self
.
_verify_assert_count
(
func
,
0
)
self
.
_verify_stack_trace
(
func
)
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
check_stack_trace
(
func
,
ops_to_check
=
'all'
))
# Optimization on alloc with assert
# Optimization on alloc with assert
func
=
function
(
func
=
function
(
...
@@ -3509,11 +3529,6 @@ class Test_local_useless_elemwise_comparison(unittest.TestCase):
...
@@ -3509,11 +3529,6 @@ class Test_local_useless_elemwise_comparison(unittest.TestCase):
class
Test_local_useless_alloc
(
unittest
.
TestCase
):
class
Test_local_useless_alloc
(
unittest
.
TestCase
):
def
_verify_stack_trace
(
self
,
f
):
for
output
in
f
.
outputs
:
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
output
.
variable
.
tag
,
'trace'
))
def
setUp
(
self
):
def
setUp
(
self
):
self
.
rng
=
numpy
.
random
.
RandomState
(
utt
.
fetch_seed
())
self
.
rng
=
numpy
.
random
.
RandomState
(
utt
.
fetch_seed
())
...
@@ -3534,7 +3549,8 @@ class Test_local_useless_alloc(unittest.TestCase):
...
@@ -3534,7 +3549,8 @@ class Test_local_useless_alloc(unittest.TestCase):
if
isinstance
(
mode_opt
,
compile
.
DebugMode
):
if
isinstance
(
mode_opt
,
compile
.
DebugMode
):
self
.
assertRaises
(
ValueError
,
f
)
self
.
assertRaises
(
ValueError
,
f
)
self
.
_verify_stack_trace
(
f
)
# No need to check_stack_trace as the optimization
# local_useless_alloc only removes nodes.
def
test1
(
self
):
def
test1
(
self
):
# Test that alloc never gets instantiated during optimization
# Test that alloc never gets instantiated during optimization
...
@@ -3549,7 +3565,8 @@ class Test_local_useless_alloc(unittest.TestCase):
...
@@ -3549,7 +3565,8 @@ class Test_local_useless_alloc(unittest.TestCase):
op_classes
=
[
node
.
op
.
__class__
for
node
in
f
.
maker
.
fgraph
.
toposort
()]
op_classes
=
[
node
.
op
.
__class__
for
node
in
f
.
maker
.
fgraph
.
toposort
()]
assert
tensor
.
Alloc
not
in
op_classes
assert
tensor
.
Alloc
not
in
op_classes
self
.
_verify_stack_trace
(
f
)
# No need to check_stack_trace as the optimization
# local_useless_alloc only removes nodes.
def
test2
(
self
):
def
test2
(
self
):
# Test that alloc never gets instantiated during optimization
# Test that alloc never gets instantiated during optimization
...
@@ -3568,18 +3585,12 @@ class Test_local_useless_alloc(unittest.TestCase):
...
@@ -3568,18 +3585,12 @@ class Test_local_useless_alloc(unittest.TestCase):
# we have to change the following we should not see tensor.Alloc
# we have to change the following we should not see tensor.Alloc
# in op_classes and we have to change the assert.
# in op_classes and we have to change the assert.
assert
tensor
.
Alloc
in
op_classes
assert
tensor
.
Alloc
in
op_classes
# The correct opt removes nodes, no need for check_stack_trace
self
.
_verify_stack_trace
(
f
)
class
Test_local_useless_inc_subtensor_alloc
(
unittest
.
TestCase
):
class
Test_local_useless_inc_subtensor_alloc
(
unittest
.
TestCase
):
opt_name
=
'local_useless_inc_subtensor_alloc'
opt_name
=
'local_useless_inc_subtensor_alloc'
def
_verify_stack_trace
(
self
,
f
):
for
output
in
f
.
outputs
:
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
output
.
variable
.
tag
,
'trace'
))
def
setUp
(
self
):
def
setUp
(
self
):
# The optimization requires the shape feature so we need to compile in
# The optimization requires the shape feature so we need to compile in
# FAST_RUN mode.
# FAST_RUN mode.
...
@@ -3617,8 +3628,9 @@ class Test_local_useless_inc_subtensor_alloc(unittest.TestCase):
...
@@ -3617,8 +3628,9 @@ class Test_local_useless_inc_subtensor_alloc(unittest.TestCase):
utt
.
assert_allclose
(
r1
,
r2
)
utt
.
assert_allclose
(
r1
,
r2
)
self
.
_verify_stack_trace
(
f1
)
# Check stacktrace was copied over correctly after opt was applied
self
.
_verify_stack_trace
(
f2
)
self
.
assertTrue
(
check_stack_trace
(
f1
,
ops_to_check
=
tensor
.
AdvancedIncSubtensor
))
self
.
assertTrue
(
check_stack_trace
(
f2
,
ops_to_check
=
tensor
.
AdvancedIncSubtensor
))
def
test_advanced_inc_subtensor1
(
self
):
def
test_advanced_inc_subtensor1
(
self
):
...
@@ -3650,8 +3662,10 @@ class Test_local_useless_inc_subtensor_alloc(unittest.TestCase):
...
@@ -3650,8 +3662,10 @@ class Test_local_useless_inc_subtensor_alloc(unittest.TestCase):
utt
.
assert_allclose
(
r1
,
r2
)
utt
.
assert_allclose
(
r1
,
r2
)
self
.
_verify_stack_trace
(
f1
)
# Check stacktrace was copied over correctly after opt was applied
self
.
_verify_stack_trace
(
f2
)
self
.
assertTrue
(
check_stack_trace
(
f1
,
ops_to_check
=
tensor
.
AdvancedIncSubtensor1
))
self
.
assertTrue
(
check_stack_trace
(
f2
,
ops_to_check
=
'all'
))
def
test_incsubtensor
(
self
):
def
test_incsubtensor
(
self
):
x
=
tensor
.
vector
(
'x'
)
x
=
tensor
.
vector
(
'x'
)
...
@@ -3679,8 +3693,9 @@ class Test_local_useless_inc_subtensor_alloc(unittest.TestCase):
...
@@ -3679,8 +3693,9 @@ class Test_local_useless_inc_subtensor_alloc(unittest.TestCase):
utt
.
assert_allclose
(
r1
,
r2
)
utt
.
assert_allclose
(
r1
,
r2
)
self
.
_verify_stack_trace
(
f1
)
# Check stacktrace was copied over correctly after opt was applied
self
.
_verify_stack_trace
(
f2
)
self
.
assertTrue
(
check_stack_trace
(
f1
,
ops_to_check
=
'last'
))
self
.
assertTrue
(
check_stack_trace
(
f2
,
ops_to_check
=
'last'
))
class
test_shapeoptimizer
(
unittest
.
TestCase
):
class
test_shapeoptimizer
(
unittest
.
TestCase
):
...
@@ -4048,10 +4063,9 @@ class T_Tile(unittest.TestCase):
...
@@ -4048,10 +4063,9 @@ class T_Tile(unittest.TestCase):
assert
len
(
topo
)
==
1
assert
len
(
topo
)
==
1
assert
isinstance
(
topo
[
0
]
.
op
,
compile
.
DeepCopyOp
)
assert
isinstance
(
topo
[
0
]
.
op
,
compile
.
DeepCopyOp
)
f
(
data
)
f
(
data
)
# In this case the opt only removes nodes,
# Check that stacktrace is copied over
# no need to check_stack_trace
self
.
assertTrue
(
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
))
# See issue #4421
self
.
assertTrue
(
len
(
f
.
outputs
[
0
]
.
variable
.
tag
.
trace
)
>
0
)
def
speed_local_pow_specialize_range
():
def
speed_local_pow_specialize_range
():
...
@@ -4190,7 +4204,7 @@ class T_Rebroadcast(unittest.TestCase):
...
@@ -4190,7 +4204,7 @@ class T_Rebroadcast(unittest.TestCase):
e
=
f
.
maker
.
fgraph
.
toposort
()
e
=
f
.
maker
.
fgraph
.
toposort
()
assert
len
([
n
for
n
in
e
if
isinstance
(
n
.
op
,
T
.
Rebroadcast
)])
==
0
assert
len
([
n
for
n
in
e
if
isinstance
(
n
.
op
,
T
.
Rebroadcast
)])
==
0
assert
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace
'
)
assert
check_stack_trace
(
f
,
ops_to_check
=
'all
'
)
def
test_rebroadcast_rebroadcast
(
self
):
def
test_rebroadcast_rebroadcast
(
self
):
mode
=
theano
.
compile
.
get_default_mode
()
.
including
(
'canonicalize'
)
mode
=
theano
.
compile
.
get_default_mode
()
.
including
(
'canonicalize'
)
...
@@ -5818,8 +5832,7 @@ def test_local_join_make_vector():
...
@@ -5818,8 +5832,7 @@ def test_local_join_make_vector():
for
n
in
e
if
isinstance
(
n
.
op
,
Join
)])
for
n
in
e
if
isinstance
(
n
.
op
,
Join
)])
assert
f
.
maker
.
fgraph
.
outputs
[
0
]
.
dtype
==
config
.
floatX
assert
f
.
maker
.
fgraph
.
outputs
[
0
]
.
dtype
==
config
.
floatX
assert
hasattr
(
f
.
outputs
[
0
]
.
variable
,
'tag'
)
assert
check_stack_trace
(
f
,
ops_to_check
=
'all'
)
assert
hasattr
(
f
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
)
def
test_local_add_specialize
():
def
test_local_add_specialize
():
...
@@ -5921,11 +5934,10 @@ def test_local_useless_split():
...
@@ -5921,11 +5934,10 @@ def test_local_useless_split():
assert
len
(
graph_nonopt
)
==
1
assert
len
(
graph_nonopt
)
==
1
assert
isinstance
(
graph_nonopt
[
0
]
.
op
,
tensor
.
Split
)
assert
isinstance
(
graph_nonopt
[
0
]
.
op
,
tensor
.
Split
)
# Check that stacktraces have been copied over properly
# Check if there are use cases that are not covered here
assert
hasattr
(
f_opt
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
)
# and if the line below is necessary and correct (See issue #4421)
assert
len
(
f_opt
.
outputs
[
0
]
.
variable
.
tag
.
trace
)
>
0
# assert check_stack_trace(f_opt, ops_to_check=[Assert])
assert
hasattr
(
f_nonopt
.
outputs
[
0
]
.
variable
.
tag
,
'trace'
)
assert
check_stack_trace
(
f_nonopt
,
ops_to_check
=
'all'
)
assert
len
(
f_nonopt
.
outputs
[
0
]
.
variable
.
tag
.
trace
)
>
0
def
test_local_flatten_lift
():
def
test_local_flatten_lift
():
...
@@ -5998,7 +6010,7 @@ class Test_lift_transpose_through_dot(unittest.TestCase):
...
@@ -5998,7 +6010,7 @@ class Test_lift_transpose_through_dot(unittest.TestCase):
sg
=
'[dot(DimShuffle{1,0}(b), DimShuffle{1,0}(a))]'
sg
=
'[dot(DimShuffle{1,0}(b), DimShuffle{1,0}(a))]'
assert
str
(
g
)
==
sg
,
(
str
(
g
),
sg
)
assert
str
(
g
)
==
sg
,
(
str
(
g
),
sg
)
# Check stacktrace was copied over correctly after opt was applied
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
g
.
outputs
[
0
]
.
tag
,
'trace
'
))
self
.
assertTrue
(
check_stack_trace
(
g
,
ops_to_check
=
'all
'
))
def
test_row_matrix
(
self
):
def
test_row_matrix
(
self
):
a
=
vector
(
'a'
)
a
=
vector
(
'a'
)
...
@@ -6010,7 +6022,7 @@ class Test_lift_transpose_through_dot(unittest.TestCase):
...
@@ -6010,7 +6022,7 @@ class Test_lift_transpose_through_dot(unittest.TestCase):
sg
=
'[dot(DimShuffle{1,0}(b), DimShuffle{0,x}(a))]'
sg
=
'[dot(DimShuffle{1,0}(b), DimShuffle{0,x}(a))]'
assert
str
(
g
)
==
sg
,
(
str
(
g
),
sg
)
assert
str
(
g
)
==
sg
,
(
str
(
g
),
sg
)
# Check stacktrace was copied over correctly after opt was applied
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
g
.
outputs
[
0
]
.
tag
,
'trace
'
))
self
.
assertTrue
(
check_stack_trace
(
g
,
ops_to_check
=
'all
'
))
def
test_matrix_col
(
self
):
def
test_matrix_col
(
self
):
a
=
vector
(
'a'
)
a
=
vector
(
'a'
)
...
@@ -6022,7 +6034,7 @@ class Test_lift_transpose_through_dot(unittest.TestCase):
...
@@ -6022,7 +6034,7 @@ class Test_lift_transpose_through_dot(unittest.TestCase):
sg
=
'[dot(DimShuffle{x,0}(a), DimShuffle{1,0}(b))]'
sg
=
'[dot(DimShuffle{x,0}(a), DimShuffle{1,0}(b))]'
assert
str
(
g
)
==
sg
,
(
str
(
g
),
sg
)
assert
str
(
g
)
==
sg
,
(
str
(
g
),
sg
)
# Check stacktrace was copied over correctly after opt was applied
# Check stacktrace was copied over correctly after opt was applied
self
.
assertTrue
(
hasattr
(
g
.
outputs
[
0
]
.
tag
,
'trace
'
))
self
.
assertTrue
(
check_stack_trace
(
g
,
ops_to_check
=
'all
'
))
def
test_local_upcast_elemwise_constant_inputs
():
def
test_local_upcast_elemwise_constant_inputs
():
...
...
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论