提交 e9a0e775 authored 作者: Ricardo's avatar Ricardo 提交者: Thomas Wiecki

Remove `warn__reduce_join` flag

上级 cc771b41
......@@ -1454,23 +1454,6 @@ def add_deprecated_configvars():
in_c_key=False,
)
config.add(
"warn__reduce_join",
(
"Your current code is fine, but Aesara versions "
"prior to 0.7 (or this development version) "
"might have given an incorrect result. "
"To disable this warning, set the Aesara flag "
"warn__reduce_join to False. The problem was an "
"optimization, that modified the pattern "
'"Reduce{scalar.op}(Join(axis=0, a, b), axis=0)", '
"did not check the reduction axis. So if the "
"reduction axis was not 0, you got a wrong answer."
),
BoolParam(_warn_default("0.7")),
in_c_key=False,
)
config.add(
"warn__inc_set_subtensor1",
(
......
......@@ -5,7 +5,6 @@
import itertools
import logging
import operator
import warnings
from functools import partial, reduce
import numpy as np
......@@ -13,7 +12,6 @@ import numpy as np
import aesara.scalar.basic as aes
import aesara.scalar.math as aes_math
from aesara.assert_op import assert_op
from aesara.configdefaults import config
from aesara.graph.basic import Constant, Variable
from aesara.graph.opt import (
LocalOptGroup,
......@@ -1609,20 +1607,7 @@ def local_reduce_join(fgraph, node):
if reduce_axis is None:
reduce_axis = tuple(range(node.inputs[0].ndim))
# I put this warning late to don't add extra warning.
if len(reduce_axis) != 1 or 0 not in reduce_axis:
if config.warn__reduce_join:
warnings.warning(
"Your current code is fine, but Aesara versions "
"prior to 0.7 (or this development version Sept 2014) "
"might have given an incorrect result for this code. "
"To disable this warning, set the Aesara flag "
"warn__reduce_join to False. The problem was an "
"optimization, that modified the pattern "
'"Reduce{scalar.op}(Join(axis=0, a, b), axis=0)", '
"did not check the reduction axis. So if the "
"reduction axis was not 0, you got a wrong answer."
)
return
# We add the new check late to don't add extra warning.
......
......@@ -3567,8 +3567,7 @@ class TestLocalReduce:
assert isinstance(topo[-1].op, Elemwise)
# Test a case that was bugged in a old Aesara bug
with config.change_flags(warn__reduce_join=False):
f = function([], aet_sum(aet.stack([A, A]), axis=1), mode=self.mode)
f = function([], aet_sum(aet.stack([A, A]), axis=1), mode=self.mode)
utt.assert_allclose(f(), [15, 15])
topo = f.maker.fgraph.toposort()
......@@ -3594,9 +3593,8 @@ class TestLocalReduce:
# Test that the optimization does not crash in one case where it
# is not applied. Reported at
# https://groups.google.com/d/topic/theano-users/EDgyCU00fFA/discussion
with config.change_flags(warn__reduce_join=False):
out = aet_sum([vx, vy, vz], axis=None)
f = function([vx, vy, vz], out)
out = aet_sum([vx, vy, vz], axis=None)
f = function([vx, vy, vz], out)
class TestLocalSumProdDimshuffle:
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论