提交 a6e461bf authored 作者: Brandon T. Willard's avatar Brandon T. Willard 提交者: Brandon T. Willard

Add missing hermitian option to MatrixPinv

上级 40313aac
...@@ -18,26 +18,10 @@ logger = logging.getLogger(__name__) ...@@ -18,26 +18,10 @@ logger = logging.getLogger(__name__)
class MatrixPinv(Op): class MatrixPinv(Op):
"""Computes the pseudo-inverse of a matrix :math:`A`. __props__ = ("hermitian",)
The pseudo-inverse of a matrix :math:`A`, denoted :math:`A^+`, is
defined as: "the matrix that 'solves' [the least-squares problem]
:math:`Ax = b`," i.e., if :math:`\\bar{x}` is said solution, then
:math:`A^+` is that matrix such that :math:`\\bar{x} = A^+b`.
Note that :math:`Ax=AA^+b`, so :math:`AA^+` is close to the identity matrix.
This method is not faster than `matrix_inverse`. Its strength comes from
that it works for non-square matrices.
If you have a square matrix though, `matrix_inverse` can be both more
exact and faster to compute. Also this op does not get optimized into a
solve op.
""" def __init__(self, hermitian):
self.hermitian = hermitian
__props__ = ()
def __init__(self):
pass
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
...@@ -47,7 +31,7 @@ class MatrixPinv(Op): ...@@ -47,7 +31,7 @@ class MatrixPinv(Op):
def perform(self, node, inputs, outputs): def perform(self, node, inputs, outputs):
(x,) = inputs (x,) = inputs
(z,) = outputs (z,) = outputs
z[0] = np.linalg.pinv(x).astype(x.dtype) z[0] = np.linalg.pinv(x, hermitian=self.hermitian).astype(x.dtype)
def L_op(self, inputs, outputs, g_outputs): def L_op(self, inputs, outputs, g_outputs):
r"""The gradient function should return r"""The gradient function should return
...@@ -75,8 +59,46 @@ class MatrixPinv(Op): ...@@ -75,8 +59,46 @@ class MatrixPinv(Op):
).T ).T
return [grad] return [grad]
def infer_shape(self, fgraph, node, shapes):
return [list(reversed(shapes[0]))]
def pinv(x, hermitian=False):
"""Computes the pseudo-inverse of a matrix :math:`A`.
The pseudo-inverse of a matrix :math:`A`, denoted :math:`A^+`, is
defined as: "the matrix that 'solves' [the least-squares problem]
:math:`Ax = b`," i.e., if :math:`\\bar{x}` is said solution, then
:math:`A^+` is that matrix such that :math:`\\bar{x} = A^+b`.
Note that :math:`Ax=AA^+b`, so :math:`AA^+` is close to the identity matrix.
This method is not faster than `matrix_inverse`. Its strength comes from
that it works for non-square matrices.
If you have a square matrix though, `matrix_inverse` can be both more
exact and faster to compute. Also this op does not get optimized into a
solve op.
"""
return MatrixPinv(hermitian=hermitian)(x)
class Inv(Op):
"""Computes the inverse of one or more matrices."""
def make_node(self, x):
x = as_tensor_variable(x)
return Apply(self, [x], [x.type()])
def perform(self, node, inputs, outputs):
(x,) = inputs
(z,) = outputs
z[0] = np.linalg.inv(x).astype(x.dtype)
def infer_shape(self, fgraph, node, shapes):
return shapes
pinv = MatrixPinv() inv = Inv()
class MatrixInverse(Op): class MatrixInverse(Op):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论