提交 fd4c5d91 authored 作者: Brandon T. Willard's avatar Brandon T. Willard 提交者: Brandon T. Willard

Refactor get_canonical_form_slice so that it uses as_index_literal

上级 0cbf8557
...@@ -168,7 +168,9 @@ def get_idx_list(inputs, idx_list): ...@@ -168,7 +168,9 @@ def get_idx_list(inputs, idx_list):
return indices_from_subtensor(inputs[1:], idx_list) return indices_from_subtensor(inputs[1:], idx_list)
def get_canonical_form_slice(theslice, length): def get_canonical_form_slice(
theslice: Union[slice, Variable], length: Variable
) -> Tuple[Variable, int]:
"""Convert slices to canonical form. """Convert slices to canonical form.
Given a slice [start:stop:step] transform it into a canonical form Given a slice [start:stop:step] transform it into a canonical form
...@@ -179,16 +181,24 @@ def get_canonical_form_slice(theslice, length): ...@@ -179,16 +181,24 @@ def get_canonical_form_slice(theslice, length):
if the resulting set of numbers needs to be reversed or not. if the resulting set of numbers needs to be reversed or not.
""" """
from aesara.tensor import extract_constant, ge, lt, sgn, switch from aesara.tensor import ge, lt, sgn, switch
if isinstance(theslice, slice): if not isinstance(theslice, slice):
try:
value = as_index_literal(theslice)
except NotScalarConstantError:
value = theslice
value = switch(lt(value, 0), (value + length), value)
return value, 1
def analyze(x): def analyze(x):
try: try:
x_constant = get_scalar_constant_value(x) x_constant = as_index_literal(x)
is_constant = True is_constant = True
except NotScalarConstantError: except NotScalarConstantError:
x_constant = extract_constant(x) x_constant = x
is_constant = False is_constant = False
return x_constant, is_constant return x_constant, is_constant
...@@ -298,9 +308,7 @@ def get_canonical_form_slice(theslice, length): ...@@ -298,9 +308,7 @@ def get_canonical_form_slice(theslice, length):
else: else:
start = switch(lt(start, 0), start + length, start) start = switch(lt(start, 0), start + length, start)
start = switch(lt(start, 0), switch_neg_step(-1, 0), start) start = switch(lt(start, 0), switch_neg_step(-1, 0), start)
start = switch( start = switch(ge(start, length), switch_neg_step(length - 1, length), start)
ge(start, length), switch_neg_step(length - 1, length), start
)
if stop is None or stop == sys.maxsize: if stop is None or stop == sys.maxsize:
# The special "maxsize" case is probably not needed here, # The special "maxsize" case is probably not needed here,
# as slices containing maxsize are not generated by # as slices containing maxsize are not generated by
...@@ -328,11 +336,6 @@ def get_canonical_form_slice(theslice, length): ...@@ -328,11 +336,6 @@ def get_canonical_form_slice(theslice, length):
return slice(nw_start, nw_stop, nw_step), reverse return slice(nw_start, nw_stop, nw_step), reverse
else: else:
return slice(nw_start, nw_stop, nw_step), 1 return slice(nw_start, nw_stop, nw_step), 1
else:
value = extract_constant(theslice)
value = switch(lt(value, 0), (value + length), value)
return value, 1
def range_len(slc): def range_len(slc):
......
...@@ -14,6 +14,7 @@ from aesara.compile.io import In ...@@ -14,6 +14,7 @@ from aesara.compile.io import In
from aesara.configdefaults import config from aesara.configdefaults import config
from aesara.graph.op import get_test_value from aesara.graph.op import get_test_value
from aesara.graph.opt_utils import is_same_graph from aesara.graph.opt_utils import is_same_graph
from aesara.scalar.basic import as_scalar
from aesara.tensor.elemwise import DimShuffle from aesara.tensor.elemwise import DimShuffle
from aesara.tensor.math import exp, isinf from aesara.tensor.math import exp, isinf
from aesara.tensor.math import sum as aet_sum from aesara.tensor.math import sum as aet_sum
...@@ -96,6 +97,186 @@ def test_as_index_literal(): ...@@ -96,6 +97,186 @@ def test_as_index_literal():
assert res is np.newaxis assert res is np.newaxis
class TestGetCanonicalFormSlice:
def test_scalar_constant(self):
a = as_scalar(0)
length = lscalar()
res = get_canonical_form_slice(a, length)
assert res[0].owner.op == aet.switch
assert res[1] == 1
def test_all_symbolic(self):
start = iscalar("b")
stop = iscalar("e")
step = iscalar("s")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(start, stop, step), length)
f = aesara.function(
[start, stop, step, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
)
length = 5
a = np.arange(length)
for start in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for stop in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for step in [-6, -3, -1, 2, 5]:
out = f(start, stop, step, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[start:stop:step]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_start_None(self):
stop = iscalar("e")
step = iscalar("s")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(None, stop, step), length)
f = aesara.function(
[stop, step, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
)
length = 5
a = np.arange(length)
for stop in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for step in [-6, -3, -1, 2, 5]:
out = f(stop, step, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[:stop:step]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_stop_None(self):
start = iscalar("b")
step = iscalar("s")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(start, None, step), length)
f = aesara.function(
[start, step, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
)
length = 5
a = np.arange(length)
for start in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for step in [-6, -3, -1, 2, 5]:
out = f(start, step, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[start:None:step]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_step_None(self):
start = iscalar("b")
stop = iscalar("e")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(start, stop, None), length)
f = aesara.function(
[start, stop, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
)
length = 5
a = np.arange(length)
for start in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for stop in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
out = f(start, stop, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[start:stop:None]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_start_stop_None(self):
step = iscalar("s")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(None, None, step), length)
f = aesara.function(
[step, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
)
length = 5
a = np.arange(length)
for step in [-6, -3, -1, 2, 5]:
out = f(step, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[None:None:step]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_stop_step_None(self):
start = iscalar("b")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(start, None, None), length)
f = aesara.function(
[start, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
)
length = 5
a = np.arange(length)
for start in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
out = f(start, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[start:None:None]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_start_step_None(self):
stop = iscalar("e")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(None, stop, None), length)
f = aesara.function(
[stop, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
)
length = 5
a = np.arange(length)
for stop in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
out = f(stop, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[None:stop:None]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
class TestSubtensor(utt.OptimizationTestMixin): class TestSubtensor(utt.OptimizationTestMixin):
""" """
This is designed to be sub-classed (e.g. by the GPU tests). This is designed to be sub-classed (e.g. by the GPU tests).
...@@ -846,191 +1027,6 @@ class TestSubtensor(utt.OptimizationTestMixin): ...@@ -846,191 +1027,6 @@ class TestSubtensor(utt.OptimizationTestMixin):
for step in [-3, -1, 2, 5]: for step in [-3, -1, 2, 5]:
assert np.all(f(start, stop, step) == v_data[start:stop:step].shape) assert np.all(f(start, stop, step) == v_data[start:stop:step].shape)
def test_slice_canonical_form_0(self):
start = iscalar("b")
stop = iscalar("e")
step = iscalar("s")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(start, stop, step), length)
f = self.function(
[start, stop, step, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
N=0,
op=subtensor_ops,
)
length = 5
a = np.arange(length)
for start in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for stop in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for step in [-6, -3, -1, 2, 5]:
out = f(start, stop, step, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[start:stop:step]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_slice_canonical_form_1(self):
stop = iscalar("e")
step = iscalar("s")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(None, stop, step), length)
f = self.function(
[stop, step, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
N=0,
op=subtensor_ops,
)
length = 5
a = np.arange(length)
for stop in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for step in [-6, -3, -1, 2, 5]:
out = f(stop, step, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[:stop:step]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_slice_canonical_form_2(self):
start = iscalar("b")
step = iscalar("s")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(start, None, step), length)
f = self.function(
[start, step, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
N=0,
op=subtensor_ops,
)
length = 5
a = np.arange(length)
for start in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for step in [-6, -3, -1, 2, 5]:
out = f(start, step, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[start:None:step]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_slice_canonical_form_3(self):
start = iscalar("b")
stop = iscalar("e")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(start, stop, None), length)
f = self.function(
[start, stop, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
N=0,
op=subtensor_ops,
)
length = 5
a = np.arange(length)
for start in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
for stop in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
out = f(start, stop, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[start:stop:None]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_slice_canonical_form_4(self):
step = iscalar("s")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(None, None, step), length)
f = self.function(
[step, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
N=0,
op=subtensor_ops,
)
length = 5
a = np.arange(length)
for step in [-6, -3, -1, 2, 5]:
out = f(step, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[None:None:step]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_slice_canonical_form_5(self):
start = iscalar("b")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(start, None, None), length)
f = self.function(
[start, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
N=0,
op=subtensor_ops,
)
length = 5
a = np.arange(length)
for start in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
out = f(start, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[start:None:None]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def test_slice_canonical_form_6(self):
stop = iscalar("e")
length = iscalar("l")
cnf = get_canonical_form_slice(slice(None, stop, None), length)
f = self.function(
[stop, length],
[
aet.as_tensor_variable(cnf[0].start),
aet.as_tensor_variable(cnf[0].stop),
aet.as_tensor_variable(cnf[0].step),
aet.as_tensor_variable(cnf[1]),
],
N=0,
op=subtensor_ops,
)
length = 5
a = np.arange(length)
for stop in [-8, -5, -4, -1, 0, 1, 4, 5, 8]:
out = f(stop, length)
t_out = a[out[0] : out[1] : out[2]][:: out[3]]
v_out = a[None:stop:None]
assert np.all(t_out == v_out)
assert np.all(t_out.shape == v_out.shape)
def grad_list_(self, idxs, data): def grad_list_(self, idxs, data):
n = self.shared(data) n = self.shared(data)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论