提交 f6bf2943 authored 作者: abergeron's avatar abergeron

Merge pull request #2018 from Tanjay94/Op

Auto generate Op.{hash,eq,str}
...@@ -575,6 +575,30 @@ class Op(utils.object2, PureOp, CLinkerOp): ...@@ -575,6 +575,30 @@ class Op(utils.object2, PureOp, CLinkerOp):
def __init__(self, use_c_code=theano.config.cxx): def __init__(self, use_c_code=theano.config.cxx):
self._op_use_c_code = use_c_code self._op_use_c_code = use_c_code
def _props(self):
return (getattr(self, a) for a in self.__props__)
def __hash__(self):
if hasattr(self, '__props__'):
return hash((type(self), self._props()))
else:
return super(Op, self).__hash__()
def __str__(self):
if hasattr(self, '__props__'):
if len(self.__props__) == 0:
return "%s" % (self.__class__.__name__,)
else:
return "%s{%s}" % (self.__class__.__name__, ", ".join("%s=%r" % (p, getattr(self, p)) for p in self.__props__))
else:
return super(Op, self).__str__()
def __eq__(self, other):
if hasattr(self, '__props__'):
return (type(self) == type(other) and self._props() == other._props())
else:
return NotImplemented
def make_thunk(self, node, storage_map, compute_map, no_recycling): def make_thunk(self, node, storage_map, compute_map, no_recycling):
""" """
:param node: something previously returned by self.make_node :param node: something previously returned by self.make_node
......
...@@ -70,16 +70,13 @@ class Hint(Op): ...@@ -70,16 +70,13 @@ class Hint(Op):
transfer that information out of the graph. transfer that information out of the graph.
""" """
__props__ = ('hints',)
def __init__(self, **kwargs): def __init__(self, **kwargs):
self.hints = tuple(kwargs.items()) self.hints = tuple(kwargs.items())
self.view_map = {0: [0]} self.view_map = {0: [0]}
def __eq__(self, other):
return type(self) == type(other) and self.hints == other.hints
def __hash__(self):
return hash((type(self), self.hints))
def make_node(self, x): def make_node(self, x):
return Apply(self, [x], [x.type()]) return Apply(self, [x], [x.type()])
......
...@@ -31,22 +31,11 @@ class MatrixPinv(Op): ...@@ -31,22 +31,11 @@ class MatrixPinv(Op):
exact and faster to compute. Also this op does not get optimized into a exact and faster to compute. Also this op does not get optimized into a
solve op. solve op.
""" """
def __init__(self):
pass
def props(self):
"""Function exposing different properties of each instance of the
op.
For the ``MatrixPinv`` op, there are no properties to be exposed.
"""
return ()
def __hash__(self): __props__ = ()
return hash((type(self), self.props()))
def __eq__(self, other): def __init__(self):
return (type(self) == type(other) and self.props() == other.props()) pass
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
...@@ -56,9 +45,6 @@ class MatrixPinv(Op): ...@@ -56,9 +45,6 @@ class MatrixPinv(Op):
def perform(self, node, (x,), (z, )): def perform(self, node, (x,), (z, )):
z[0] = numpy.linalg.pinv(x).astype(x.dtype) z[0] = numpy.linalg.pinv(x).astype(x.dtype)
def __str__(self):
return "MatrixPseudoInverse"
pinv = MatrixPinv() pinv = MatrixPinv()
...@@ -73,23 +59,11 @@ class MatrixInverse(Op): ...@@ -73,23 +59,11 @@ class MatrixInverse(Op):
of ``solve``. of ``solve``.
""" """
__props__ = ()
def __init__(self): def __init__(self):
pass pass
def props(self):
"""Function exposing different properties of each instance of the
op.
For the ``MatrixInverse`` op, there are no properties to be exposed.
"""
return ()
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
assert x.ndim == 2 assert x.ndim == 2
...@@ -137,9 +111,6 @@ class MatrixInverse(Op): ...@@ -137,9 +111,6 @@ class MatrixInverse(Op):
return [None] return [None]
return [-matrix_dot(xi, ev, xi)] return [-matrix_dot(xi, ev, xi)]
def __str__(self):
return "MatrixInverse"
matrix_inverse = MatrixInverse() matrix_inverse = MatrixInverse()
...@@ -315,20 +286,7 @@ class Eig(Op): ...@@ -315,20 +286,7 @@ class Eig(Op):
""" """
_numop = staticmethod(numpy.linalg.eig) _numop = staticmethod(numpy.linalg.eig)
__props__ = ()
def props(self):
"""Function exposing different properties of each instance of the
op.
For the ``Eig`` op, there are no properties to be exposed.
"""
return ()
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
...@@ -344,9 +302,6 @@ class Eig(Op): ...@@ -344,9 +302,6 @@ class Eig(Op):
n = shapes[0][0] n = shapes[0][0]
return [(n,), (n, n)] return [(n,), (n, n)]
def __str__(self):
return self._numop.__name__.capitalize()
eig = Eig() eig = Eig()
...@@ -356,17 +311,12 @@ class Eigh(Eig): ...@@ -356,17 +311,12 @@ class Eigh(Eig):
""" """
_numop = staticmethod(numpy.linalg.eigh) _numop = staticmethod(numpy.linalg.eigh)
__props__ = ('UPLO',)
def __init__(self, UPLO='L'): def __init__(self, UPLO='L'):
assert UPLO in ['L', 'U'] assert UPLO in ['L', 'U']
self.UPLO = UPLO self.UPLO = UPLO
def __str__(self):
return 'Eigh{%s}' % self.UPLO
def props(self):
return self.UPLO,
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
assert x.ndim == 2 assert x.ndim == 2
...@@ -427,6 +377,8 @@ class EighGrad(Op): ...@@ -427,6 +377,8 @@ class EighGrad(Op):
"""Gradient of an eigensystem of a Hermitian matrix. """Gradient of an eigensystem of a Hermitian matrix.
""" """
__props__ = ('UPLO',)
def __init__(self, UPLO='L'): def __init__(self, UPLO='L'):
assert UPLO in ['L', 'U'] assert UPLO in ['L', 'U']
self.UPLO = UPLO self.UPLO = UPLO
...@@ -437,18 +389,6 @@ class EighGrad(Op): ...@@ -437,18 +389,6 @@ class EighGrad(Op):
self.tri0 = numpy.triu self.tri0 = numpy.triu
self.tri1 = lambda a: numpy.tril(a, -1) self.tri1 = lambda a: numpy.tril(a, -1)
def props(self):
return (self.UPLO,)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def __str__(self):
return 'EighGrad{%s}' % self.UPLO
def make_node(self, x, w, v, gw, gv): def make_node(self, x, w, v, gw, gv):
x, w, v, gw, gv = map(as_tensor_variable, (x, w, v, gw, gv)) x, w, v, gw, gv = map(as_tensor_variable, (x, w, v, gw, gv))
assert x.ndim == 2 assert x.ndim == 2
...@@ -507,16 +447,11 @@ class QRFull(Op): ...@@ -507,16 +447,11 @@ class QRFull(Op):
and r is upper-triangular. and r is upper-triangular.
""" """
_numop = staticmethod(numpy.linalg.qr) _numop = staticmethod(numpy.linalg.qr)
__props__ = ('mode',)
def __init__(self, mode): def __init__(self, mode):
self.mode = mode self.mode = mode
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
assert x.ndim == 2, "The input of qr function should be a matrix." assert x.ndim == 2, "The input of qr function should be a matrix."
...@@ -524,18 +459,12 @@ class QRFull(Op): ...@@ -524,18 +459,12 @@ class QRFull(Op):
r = theano.tensor.matrix(dtype=x.dtype) r = theano.tensor.matrix(dtype=x.dtype)
return Apply(self, [x], [q, r]) return Apply(self, [x], [q, r])
def props(self):
return self.mode
def perform(self, node, (x,), (q, r)): def perform(self, node, (x,), (q, r)):
assert x.ndim == 2, "The input of qr function should be a matrix." assert x.ndim == 2, "The input of qr function should be a matrix."
q[0], r[0] = self._numop(x, q[0], r[0] = self._numop(x,
self.mode) self.mode)
def __str__(self):
return self._numop.__class__.__name__
class QRIncomplete(Op): class QRIncomplete(Op):
""" """
...@@ -544,19 +473,11 @@ class QRIncomplete(Op): ...@@ -544,19 +473,11 @@ class QRIncomplete(Op):
Factor the matrix a as qr and return a single matrix. Factor the matrix a as qr and return a single matrix.
""" """
_numop = staticmethod(numpy.linalg.qr) _numop = staticmethod(numpy.linalg.qr)
__props__ = ('mode',)
def __init__(self, mode): def __init__(self, mode):
self.mode = mode self.mode = mode
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def props(self):
return self.mode
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
assert x.ndim == 2, "The input of qr function should be a matrix." assert x.ndim == 2, "The input of qr function should be a matrix."
...@@ -568,9 +489,6 @@ class QRIncomplete(Op): ...@@ -568,9 +489,6 @@ class QRIncomplete(Op):
q[0] = self._numop(x, q[0] = self._numop(x,
self.mode) self.mode)
def __str__(self):
return self._numop.__class__.__name__
def qr(a, mode="full"): def qr(a, mode="full"):
""" """
...@@ -627,6 +545,7 @@ class SVD(Op): ...@@ -627,6 +545,7 @@ class SVD(Op):
# See doc in the docstring of the function just after this class. # See doc in the docstring of the function just after this class.
_numop = staticmethod(numpy.linalg.svd) _numop = staticmethod(numpy.linalg.svd)
__props__ = ('full_matrices', 'compute_uv')
def __init__(self, full_matrices=True, compute_uv=True): def __init__(self, full_matrices=True, compute_uv=True):
""" """
...@@ -644,15 +563,6 @@ class SVD(Op): ...@@ -644,15 +563,6 @@ class SVD(Op):
self.full_matrices = full_matrices self.full_matrices = full_matrices
self.compute_uv = compute_uv self.compute_uv = compute_uv
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def props(self):
return self.full_matrices, self.compute_uv,
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
assert x.ndim == 2, "The input of svd function should be a matrix." assert x.ndim == 2, "The input of svd function should be a matrix."
...@@ -667,9 +577,6 @@ class SVD(Op): ...@@ -667,9 +577,6 @@ class SVD(Op):
self.full_matrices, self.full_matrices,
self.compute_uv) self.compute_uv)
def __str__(self):
return self._numop.__name__.capitalize()
def svd(a, full_matrices=1, compute_uv=1): def svd(a, full_matrices=1, compute_uv=1):
""" """
......
...@@ -42,34 +42,16 @@ class Cholesky(Op): ...@@ -42,34 +42,16 @@ class Cholesky(Op):
#TODO: inplace #TODO: inplace
#TODO: for specific dtypes #TODO: for specific dtypes
#TODO: LAPACK wrapper with in-place behavior, for solve also #TODO: LAPACK wrapper with in-place behavior, for solve also
__props__ = ('lower', 'destructive')
def __init__(self, lower=True): def __init__(self, lower=True):
self.lower = lower self.lower = lower
self.destructive = False self.destructive = False
def props(self):
return (self.lower,
self.destructive)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def infer_shape(self, node, shapes): def infer_shape(self, node, shapes):
return [shapes[0]] return [shapes[0]]
def __str__(self):
if self.lower:
lu = 'lower'
else:
lu = 'upper'
if self.destructive:
destr = 'destructive'
else:
destr = 'non-destructive'
return 'Cholesky{%s,%s}' % (lu, destr)
def make_node(self, x): def make_node(self, x):
assert imported_scipy, ( assert imported_scipy, (
"Scipy not available. Scipy is needed for the Cholesky op") "Scipy not available. Scipy is needed for the Cholesky op")
...@@ -92,31 +74,13 @@ cholesky = Cholesky() ...@@ -92,31 +74,13 @@ cholesky = Cholesky()
class CholeskyGrad(Op): class CholeskyGrad(Op):
""" """
""" """
__props__ = ('lower', 'destructive')
def __init__(self, lower=True): def __init__(self, lower=True):
self.lower = lower self.lower = lower
self.destructive = False self.destructive = False
def props(self):
return (self.lower,
self.destructive)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def __str__(self):
if self.lower:
lu = 'lower'
else:
lu = 'upper'
if self.destructive:
destr = 'destructive'
else:
destr = 'non-destructive'
return 'CholeskyGrad{%s,%s}' % (lu, destr)
def make_node(self, x, l, dz): def make_node(self, x, l, dz):
x = as_tensor_variable(x) x = as_tensor_variable(x)
l = as_tensor_variable(l) l = as_tensor_variable(l)
...@@ -175,6 +139,9 @@ class CholeskyGrad(Op): ...@@ -175,6 +139,9 @@ class CholeskyGrad(Op):
class Solve(Op): class Solve(Op):
"""Solve a system of linear equations""" """Solve a system of linear equations"""
__props__ = ('A_structure', 'lower', 'overwrite_A', 'overwrite_b')
def __init__(self, def __init__(self,
A_structure='general', A_structure='general',
lower=False, lower=False,
...@@ -187,18 +154,6 @@ class Solve(Op): ...@@ -187,18 +154,6 @@ class Solve(Op):
self.overwrite_A = overwrite_A self.overwrite_A = overwrite_A
self.overwrite_b = overwrite_b self.overwrite_b = overwrite_b
def props(self):
return (self.A_structure,
self.lower,
self.overwrite_A,
self.overwrite_b)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return type(self) == type(other) and self.props() == other.props()
def __repr__(self): def __repr__(self):
return 'Solve{%s}' % str(self.props()) return 'Solve{%s}' % str(self.props())
...@@ -241,19 +196,12 @@ class Eigvalsh(Op): ...@@ -241,19 +196,12 @@ class Eigvalsh(Op):
"""Generalized eigenvalues of a Hermetian positive definite eigensystem """Generalized eigenvalues of a Hermetian positive definite eigensystem
""" """
__props__ = ('lower',)
def __init__(self, lower=True): def __init__(self, lower=True):
assert lower in [True, False] assert lower in [True, False]
self.lower = lower self.lower = lower
def props(self):
return (self.lower,)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def make_node(self, a, b): def make_node(self, a, b):
assert imported_scipy, ( assert imported_scipy, (
"Scipy not available. Scipy is needed for the Eigvalsh op") "Scipy not available. Scipy is needed for the Eigvalsh op")
...@@ -304,6 +252,8 @@ class EigvalshGrad(Op): ...@@ -304,6 +252,8 @@ class EigvalshGrad(Op):
# discussion on github at # discussion on github at
# https://github.com/Theano/Theano/pull/1846#discussion-diff-12486764 # https://github.com/Theano/Theano/pull/1846#discussion-diff-12486764
__props__ = ('lower',)
def __init__(self, lower=True): def __init__(self, lower=True):
assert lower in [True, False] assert lower in [True, False]
self.lower = lower self.lower = lower
...@@ -314,15 +264,6 @@ class EigvalshGrad(Op): ...@@ -314,15 +264,6 @@ class EigvalshGrad(Op):
self.tri0 = numpy.triu self.tri0 = numpy.triu
self.tri1 = lambda a: numpy.tril(a, -1) self.tri1 = lambda a: numpy.tril(a, -1)
def props(self):
return (self.lower,)
def __hash__(self):
return hash((type(self), self.props()))
def __eq__(self, other):
return (type(self) == type(other) and self.props() == other.props())
def make_node(self, a, b, gw): def make_node(self, a, b, gw):
assert imported_scipy, ( assert imported_scipy, (
"Scipy not available. Scipy is needed for the GEigvalsh op") "Scipy not available. Scipy is needed for the GEigvalsh op")
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论