提交 5628ab15 authored 作者: Cove Geary's avatar Cove Geary 提交者: Michael Osthege

Type updates for stack (addresses #193)

上级 e4800634
......@@ -2559,17 +2559,17 @@ def roll(x, shift, axis=None):
)
def stack(tensors: Sequence[TensorVariable], axis: int = 0):
def stack(tensors: Sequence["TensorLike"], axis: int = 0):
"""Stack tensors in sequence on given axis (default is 0).
Take a sequence of tensors and stack them on given axis to make a single
tensor. The size in dimension `axis` of the result will be equal to the number
of tensors passed.
Take a sequence of tensors or tensor-like constant and stack them on
given axis to make a single tensor. The size in dimension `axis` of the
result will be equal to the number of tensors passed.
Parameters
----------
tensors : Sequence[TensorVariable]
A list of tensors to be stacked.
tensors : Sequence[TensorLike]
A list of tensors or tensor-like constants to be stacked.
axis : int
The index of the new axis. Default value is 0.
......@@ -2604,11 +2604,11 @@ def stack(tensors: Sequence[TensorVariable], axis: int = 0):
(2, 2, 2, 3, 2)
"""
if not isinstance(tensors, Sequence):
raise TypeError("First argument should be Sequence[TensorVariable]")
raise TypeError("First argument should be a Sequence.")
elif len(tensors) == 0:
raise ValueError("No tensor arguments provided")
raise ValueError("No tensor arguments provided.")
# If all tensors are scalars of the same type, call make_vector.
# If all tensors are scalars, call make_vector.
# It makes the graph simpler, by not adding DimShuffles and SpecifyShapes
# This should be an optimization!
......@@ -2618,12 +2618,13 @@ def stack(tensors: Sequence[TensorVariable], axis: int = 0):
# optimization.
# See ticket #660
if all(
# In case there are explicit ints in tensors
isinstance(t, (np.number, float, int, builtins.complex))
# In case there are explicit scalars in tensors
isinstance(t, Number)
or (isinstance(t, np.ndarray) and t.ndim == 0)
or (isinstance(t, Variable) and isinstance(t.type, TensorType) and t.ndim == 0)
for t in tensors
):
# in case there is direct int
# In case there is direct scalar
tensors = list(map(as_tensor_variable, tensors))
dtype = aes.upcast(*[i.dtype for i in tensors])
return MakeVector(dtype)(*tensors)
......
......@@ -1280,15 +1280,15 @@ class TestJoinAndSplit:
def test_stack_scalar_make_vector_constant(self):
# Test that calling stack() on scalars instantiates MakeVector,
# event when the scalar are simple int type.
# even when the scalars are non-symbolic ints.
a = iscalar("a")
b = lscalar("b")
# test when the constant is the first element.
# The first element is used in a special way
s = stack([10, a, b, np.int8(3)])
s = stack([10, a, b, np.int8(3), np.array(4, dtype=np.int8)])
f = function([a, b], s, mode=self.mode)
val = f(1, 2)
assert np.all(val == [10, 1, 2, 3])
assert np.all(val == [10, 1, 2, 3, 4])
topo = f.maker.fgraph.toposort()
assert len([n for n in topo if isinstance(n.op, MakeVector)]) > 0
assert len([n for n in topo if isinstance(n, type(self.join_op))]) == 0
......@@ -1333,11 +1333,14 @@ class TestJoinAndSplit:
stack([a, b], -4)
# Testing depreciation warning is now an informative error
with pytest.raises(
TypeError, match=r"First argument should be Sequence\[TensorVariable\]"
):
with pytest.raises(TypeError, match="First argument should be a Sequence"):
s = stack(a, b)
def test_stack_empty(self):
# Do not support stacking an empty sequence
with pytest.raises(ValueError, match="No tensor arguments provided"):
stack([])
def test_stack_hessian(self):
# Test the gradient of stack when used in hessian, see gh-1589
a = dvector("a")
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论