提交 1574ca80 authored 作者: Olivier Delalleau's avatar Olivier Delalleau

Merged

...@@ -99,7 +99,7 @@ The ``make_node`` method creates a node to be included in the expression graph. ...@@ -99,7 +99,7 @@ The ``make_node`` method creates a node to be included in the expression graph.
It runs when we apply our Op (``fibby``) to Variable (``x``), as in ``fibby(tensor.vector())``. It runs when we apply our Op (``fibby``) to Variable (``x``), as in ``fibby(tensor.vector())``.
When an Op has multiple inputs, their order in the inputs argument to ``Apply`` When an Op has multiple inputs, their order in the inputs argument to ``Apply``
is important: Theano will call ``make_node(*inputs)`` to copy the graph, is important: Theano will call ``make_node(*inputs)`` to copy the graph,
so it is important to not change the semantics of the expression by doing changing the argument order. so it is important not to change the semantics of the expression by changing the argument order.
......
...@@ -1680,10 +1680,10 @@ def min(x, axis='DEFAULT'): ...@@ -1680,10 +1680,10 @@ def min(x, axis='DEFAULT'):
axis = 0 axis = 0
elif axis=='DEFAULT': elif axis=='DEFAULT':
axis = x.type.ndim - 1 axis = x.type.ndim - 1
warnings.warn("The default axis of min will change! Now we return the min over the last dimensions. It will change to be the same as numpy: the min over all dimensions. To hide this warning and be compatible with the future behavior, set axis to -1 to have the current behavior. To have the futur behavior set axis to range(nb dim), but this don't support the grad. To have the grad, you must flatten the tensor before calling min().") warnings.warn("The default axis of min will change! Now we return the min over the last dimensions. It will change to be the same as numpy: the min over all dimensions. To hide this warning and be compatible with the future behavior, set axis to -1 to have the current behavior. To have the future behavior, set axis to range(x.ndim), but this does not support the grad. To be able to get the grad, you must flatten the tensor before calling min().")
elif axis is None: elif axis is None:
axis = x.type.ndim - 1 axis = x.type.ndim - 1
warnings.warn("The behavior of min when axis==None will change! Now we return the min over the last dimensions. It will change to the min over all dimensions as numpy. To hide this warning and be compatible with the future behavior, set axis to -1 to have the current behavior. To have the futur behavior set axis to range(nb dim), but this don't support the grad. To have the grad, you must flatten the tensor before calling min().") warnings.warn("The behavior of min when axis is None will change! Now we return the min over the last dimensions. It will change to the min over all dimensions as numpy. To hide this warning and be compatible with the future behavior, set axis to -1 to have the current behavior. To have the future behavior, set axis to range(x.ndim), but this does not support the grad. To be able to get the grad, you must flatten the tensor before calling min().")
str_x_type = str(x.dtype) str_x_type = str(x.dtype)
if str_x_type.startswith('float') or str_x_type.startswith('int'): if str_x_type.startswith('float') or str_x_type.startswith('int'):
return -max(-x, axis=axis) return -max(-x, axis=axis)
...@@ -2219,9 +2219,10 @@ def mean(input, axis = None, op = False): ...@@ -2219,9 +2219,10 @@ def mean(input, axis = None, op = False):
@constructor @constructor
def var(input, axis = None): def var(input, axis = None):
"""Compute the variance along the given axis of a tensor `input` """Compute the variance along the given axis of a tensor `input`.
:param axis: compute the variance along this axis of the tensor. None means trailing axis. :param axis: Compute the variance along this axis of the tensor.
None means all axes (like numpy).
:type axis: None or int or (list of int) (see `Sum`) :type axis: None or int or (list of int) (see `Sum`)
""" """
...@@ -2255,6 +2256,16 @@ def var(input, axis = None): ...@@ -2255,6 +2256,16 @@ def var(input, axis = None):
#return the mean sqr #return the mean sqr
return mean(centered_input**2, axis) return mean(centered_input**2, axis)
@constructor
def std(input, axis=None):
"""Compute the standard deviation along the given axis of a tensor `input`.
:param axis: Compute the standard deviation along this axis of the tensor.
None means all axes (like numpy).
:type axis: None or int or (list of int) (see `Sum`)
"""
return sqrt(var(input=input, axis=axis))
if 0: if 0:
## COMMENTED OUT FEB 17 2010 ## COMMENTED OUT FEB 17 2010
## TODO (DOCUMENT AND WRITE TESTS) OR DELETE ## TODO (DOCUMENT AND WRITE TESTS) OR DELETE
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论