提交 bf9f3dcf authored 作者: Joseph Turian's avatar Joseph Turian

merge

......@@ -809,8 +809,8 @@ class MaxAndArgmax(Op):
tensor(axis.type.dtype, broadcastable)]
return Apply(self, inputs, outputs)
def perform(self, node, (x, axis), (max, max_idx)):
max[0] = numpy.max(x, axis)
max_idx[0] = numpy.argmax(x, axis)
max[0] = numpy.asarray(numpy.max(x, axis))
max_idx[0] = numpy.asarray(numpy.argmax(x, axis))
def grad(self, (x, axis), (g_max, g_max_idx)):
# @warning: This only works if axis is 0, else the max is
# broadcasted wrong in the call to eq.
......@@ -859,6 +859,27 @@ def argmax(x, axis=None):
# but when Argmax.c_impl() is in place, it should be fine.
return max_and_argmax(x,axis)[1]
@constructor
def min(x, axis=None):
if 'float'in str(x.dtype):
return -max(-x, axis=axis)
else:
#Be careful about unsigned integers, complex
raise NotImplementedError()
@constructor
def argmin(x, axis=None):
if 'float'in str(x.dtype):
return argmax(-x, axis=axis)
else:
#Be careful about unsigned integers, complex
raise NotImplementedError()
@constructor
def smallest(*args):
"""Return the [elementwise] smallest of a variable number of arguments (like python's min)."""
return min(stack(*args), axis=0)
##########################
# Comparison
......@@ -1646,28 +1667,26 @@ pprint.assign(lambda pstate, r: r.owner and isinstance(r.owner.op, Join),
@constructor
def shape_padleft(tensor, n_ones=1):
"""Reshape `tensor` by left-padding the shape with `n_ones` 1s
def shape_padleft(t, n_ones=1):
"""Reshape `t` by left-padding the shape with `n_ones` 1s
See also: `shape_padright` and `Dimshuffle`
"""
_t = as_tensor(t)
pattern = ['x']*n_ones + [i for i in range(tensor.type.ndim)]
return DimShuffle(tensor.broadcastable, pattern)(tensor)
@constructor
def rightpad_shape(tensor, n_ones):
"""Reshape `tensor` by right-padding the shape with `n_ones` 1s"""
pattern = [i for i in range(tensor.type.ndim)] + ['x']*n_ones
return DimShuffle(tensor.broadcastable, pattern)(tensor)
pattern = ['x']*n_ones + [i for i in range(_t.type.ndim)]
return DimShuffle(_t.broadcastable, pattern)(_t)
@constructor
def shape_padright(tensor, n_ones=1):
"""Reshape `tensor` by right-padding the shape with `n_ones` 1s
def shape_padright(t, n_ones=1):
"""Reshape `t` by right-padding the shape with `n_ones` 1s
See also: `shape_padleft` and `Dimshuffle`
"""
pattern = [i for i in range(tensor.type.ndim)] + ['x']*n_ones
return DimShuffle(tensor.broadcastable, pattern)(tensor)
_t = as_tensor(t)
pattern = [i for i in range(_t.type.ndim)] + ['x']*n_ones
return DimShuffle(_t.broadcastable, pattern)(_t)
@constructor
def stack(*tensors):
......
......@@ -875,6 +875,15 @@ class T_Join_and_Split(unittest.TestCase):
return
self.fail()
def test_stack_mixed_type_constants(self):
a = as_tensor(1)
b = as_tensor(2.0)
c = as_tensor(3.0)
s = stack(a, b, c)
want = numpy.array([1, 2, 3])
self.failUnless((eval_outputs([s]) == want).all())
def test_stack_scalar(self):
a = as_tensor(1)
b = as_tensor(2)
......@@ -1767,6 +1776,28 @@ class test_tensordot(unittest.TestCase):
f6(bval,aval)))
tensor.verify_grad(None, TensorDot(axes), [bval,aval])
def test_smallest_stack():
sx, sy = dscalar(), dscalar()
rval = function([sx,sy], stack(sx,sy))(-4.0, -2.0)
assert type(rval) == numpy.ndarray
assert [-4, -2] == list(rval)
def test_smallest():
x = dvector()
y = dvector()
z = dvector()
f1 = function([x], smallest(x))
assert numpy.all([1,2,3] == f1([1,2,3]))
f3 = function([x,y,z], smallest(x,y,z))
assert numpy.all([1,2,3] == f3([1,3,9], [7,7,7], [8,2,3]))
sx, sy = dscalar(), dscalar()
assert -4 == function([sx,sy], smallest(sx,sy))(-4.0, -2.0)
if __name__ == '__main__':
if len(sys.argv) >= 2 and sys.argv[1] == 'OPT':
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论