提交 f6bf2943 authored 作者: abergeron's avatar abergeron

Merge pull request #2018 from Tanjay94/Op

Auto generate Op.{hash,eq,str}
......@@ -575,6 +575,30 @@ class Op(utils.object2, PureOp, CLinkerOp):
def __init__(self, use_c_code=theano.config.cxx):
self._op_use_c_code = use_c_code
def _props(self):
return (getattr(self, a) for a in self.__props__)
def __hash__(self):
if hasattr(self, '__props__'):
return hash((type(self), self._props()))
else:
return super(Op, self).__hash__()
def __str__(self):
if hasattr(self, '__props__'):
if len(self.__props__) == 0:
return "%s" % (self.__class__.__name__,)
else:
return "%s{%s}" % (self.__class__.__name__, ", ".join("%s=%r" % (p, getattr(self, p)) for p in self.__props__))
else:
return super(Op, self).__str__()
def __eq__(self, other):
if hasattr(self, '__props__'):
return (type(self) == type(other) and self._props() == other._props())
else:
return NotImplemented
def make_thunk(self, node, storage_map, compute_map, no_recycling):
"""
:param node: something previously returned by self.make_node
......
......@@ -70,16 +70,13 @@ class Hint(Op):
transfer that information out of the graph.
"""
__props__ = ('hints',)
def __init__(self, **kwargs):
self.hints = tuple(kwargs.items())
self.view_map = {0: [0]}
def __eq__(self, other):
return type(self) == type(other) and self.hints == other.hints
def __hash__(self):
return hash((type(self), self.hints))
def make_node(self, x):
return Apply(self, [x], [x.type()])
......
......@@ -31,22 +31,11 @@ class MatrixPinv(Op):
exact and faster to compute. Also this op does not get optimized into a
solve op.
"""
def __init__(self):
pass
def props(self):
"""Function exposing different properties of each instance of the
op.
For the ``MatrixPinv`` op, there are no properties to be exposed.
"""
return ()
def __hash__(self):
return hash((type(self), self.props()))
__props__ = ()
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def __init__(self):
pass
def make_node(self, x):
x = as_tensor_variable(x)
......@@ -56,9 +45,6 @@ class MatrixPinv(Op):
def perform(self, node, (x,), (z, )):
z[0] = numpy.linalg.pinv(x).astype(x.dtype)
def __str__(self):
return "MatrixPseudoInverse"
pinv = MatrixPinv()
......@@ -73,23 +59,11 @@ class MatrixInverse(Op):
of ``solve``.
"""
__props__ = ()
def __init__(self):
pass
def props(self):
"""Function exposing different properties of each instance of the
op.
For the ``MatrixInverse`` op, there are no properties to be exposed.
"""
return ()
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def make_node(self, x):
x = as_tensor_variable(x)
assert x.ndim == 2
......@@ -137,9 +111,6 @@ class MatrixInverse(Op):
return [None]
return [-matrix_dot(xi, ev, xi)]
def __str__(self):
return "MatrixInverse"
matrix_inverse = MatrixInverse()
......@@ -315,20 +286,7 @@ class Eig(Op):
"""
_numop = staticmethod(numpy.linalg.eig)
def props(self):
"""Function exposing different properties of each instance of the
op.
For the ``Eig`` op, there are no properties to be exposed.
"""
return ()
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
__props__ = ()
def make_node(self, x):
x = as_tensor_variable(x)
......@@ -344,9 +302,6 @@ class Eig(Op):
n = shapes[0][0]
return [(n,), (n, n)]
def __str__(self):
return self._numop.__name__.capitalize()
eig = Eig()
......@@ -356,17 +311,12 @@ class Eigh(Eig):
"""
_numop = staticmethod(numpy.linalg.eigh)
__props__ = ('UPLO',)
def __init__(self, UPLO='L'):
assert UPLO in ['L', 'U']
self.UPLO = UPLO
def __str__(self):
return 'Eigh{%s}' % self.UPLO
def props(self):
return self.UPLO,
def make_node(self, x):
x = as_tensor_variable(x)
assert x.ndim == 2
......@@ -427,6 +377,8 @@ class EighGrad(Op):
"""Gradient of an eigensystem of a Hermitian matrix.
"""
__props__ = ('UPLO',)
def __init__(self, UPLO='L'):
assert UPLO in ['L', 'U']
self.UPLO = UPLO
......@@ -437,18 +389,6 @@ class EighGrad(Op):
self.tri0 = numpy.triu
self.tri1 = lambda a: numpy.tril(a, -1)
def props(self):
return (self.UPLO,)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def __str__(self):
return 'EighGrad{%s}' % self.UPLO
def make_node(self, x, w, v, gw, gv):
x, w, v, gw, gv = map(as_tensor_variable, (x, w, v, gw, gv))
assert x.ndim == 2
......@@ -507,16 +447,11 @@ class QRFull(Op):
and r is upper-triangular.
"""
_numop = staticmethod(numpy.linalg.qr)
__props__ = ('mode',)
def __init__(self, mode):
self.mode = mode
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def make_node(self, x):
x = as_tensor_variable(x)
assert x.ndim == 2, "The input of qr function should be a matrix."
......@@ -524,18 +459,12 @@ class QRFull(Op):
r = theano.tensor.matrix(dtype=x.dtype)
return Apply(self, [x], [q, r])
def props(self):
return self.mode
def perform(self, node, (x,), (q, r)):
assert x.ndim == 2, "The input of qr function should be a matrix."
q[0], r[0] = self._numop(x,
self.mode)
def __str__(self):
return self._numop.__class__.__name__
class QRIncomplete(Op):
"""
......@@ -544,19 +473,11 @@ class QRIncomplete(Op):
Factor the matrix a as qr and return a single matrix.
"""
_numop = staticmethod(numpy.linalg.qr)
__props__ = ('mode',)
def __init__(self, mode):
self.mode = mode
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def props(self):
return self.mode
def make_node(self, x):
x = as_tensor_variable(x)
assert x.ndim == 2, "The input of qr function should be a matrix."
......@@ -568,9 +489,6 @@ class QRIncomplete(Op):
q[0] = self._numop(x,
self.mode)
def __str__(self):
return self._numop.__class__.__name__
def qr(a, mode="full"):
"""
......@@ -627,6 +545,7 @@ class SVD(Op):
# See doc in the docstring of the function just after this class.
_numop = staticmethod(numpy.linalg.svd)
__props__ = ('full_matrices', 'compute_uv')
def __init__(self, full_matrices=True, compute_uv=True):
"""
......@@ -644,15 +563,6 @@ class SVD(Op):
self.full_matrices = full_matrices
self.compute_uv = compute_uv
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def props(self):
return self.full_matrices, self.compute_uv,
def make_node(self, x):
x = as_tensor_variable(x)
assert x.ndim == 2, "The input of svd function should be a matrix."
......@@ -667,9 +577,6 @@ class SVD(Op):
self.full_matrices,
self.compute_uv)
def __str__(self):
return self._numop.__name__.capitalize()
def svd(a, full_matrices=1, compute_uv=1):
"""
......
......@@ -42,34 +42,16 @@ class Cholesky(Op):
#TODO: inplace
#TODO: for specific dtypes
#TODO: LAPACK wrapper with in-place behavior, for solve also
__props__ = ('lower', 'destructive')
def __init__(self, lower=True):
self.lower = lower
self.destructive = False
def props(self):
return (self.lower,
self.destructive)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def infer_shape(self, node, shapes):
return [shapes[0]]
def __str__(self):
if self.lower:
lu = 'lower'
else:
lu = 'upper'
if self.destructive:
destr = 'destructive'
else:
destr = 'non-destructive'
return 'Cholesky{%s,%s}' % (lu, destr)
def make_node(self, x):
assert imported_scipy, (
"Scipy not available. Scipy is needed for the Cholesky op")
......@@ -92,31 +74,13 @@ cholesky = Cholesky()
class CholeskyGrad(Op):
"""
"""
__props__ = ('lower', 'destructive')
def __init__(self, lower=True):
self.lower = lower
self.destructive = False
def props(self):
return (self.lower,
self.destructive)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def __str__(self):
if self.lower:
lu = 'lower'
else:
lu = 'upper'
if self.destructive:
destr = 'destructive'
else:
destr = 'non-destructive'
return 'CholeskyGrad{%s,%s}' % (lu, destr)
def make_node(self, x, l, dz):
x = as_tensor_variable(x)
l = as_tensor_variable(l)
......@@ -175,6 +139,9 @@ class CholeskyGrad(Op):
class Solve(Op):
"""Solve a system of linear equations"""
__props__ = ('A_structure', 'lower', 'overwrite_A', 'overwrite_b')
def __init__(self,
A_structure='general',
lower=False,
......@@ -187,18 +154,6 @@ class Solve(Op):
self.overwrite_A = overwrite_A
self.overwrite_b = overwrite_b
def props(self):
return (self.A_structure,
self.lower,
self.overwrite_A,
self.overwrite_b)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return type(self) == type(other) and self.props() == other.props()
def __repr__(self):
return 'Solve{%s}' % str(self.props())
......@@ -241,19 +196,12 @@ class Eigvalsh(Op):
"""Generalized eigenvalues of a Hermetian positive definite eigensystem
"""
__props__ = ('lower',)
def __init__(self, lower=True):
assert lower in [True, False]
self.lower = lower
def props(self):
return (self.lower,)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def make_node(self, a, b):
assert imported_scipy, (
"Scipy not available. Scipy is needed for the Eigvalsh op")
......@@ -304,6 +252,8 @@ class EigvalshGrad(Op):
# discussion on github at
# https://github.com/Theano/Theano/pull/1846#discussion-diff-12486764
__props__ = ('lower',)
def __init__(self, lower=True):
assert lower in [True, False]
self.lower = lower
......@@ -314,15 +264,6 @@ class EigvalshGrad(Op):
self.tri0 = numpy.triu
self.tri1 = lambda a: numpy.tril(a, -1)
def props(self):
return (self.lower,)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def make_node(self, a, b, gw):
assert imported_scipy, (
"Scipy not available. Scipy is needed for the GEigvalsh op")
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论