提交 58100e17 authored 作者: sentient07's avatar sentient07

Removed unnecessary __eq__ and __hash__ methods

上级 0da30159
......@@ -71,9 +71,6 @@ class OpFromGraph(gof.Op):
fn = function([x, y, z], [e2])
"""
__props__ = ("inputs", "outputs")
def __init__(self, inputs, outputs, **kwargs):
if not isinstance(outputs, list):
raise TypeError('outputs must be list', outputs)
......
......@@ -518,8 +518,6 @@ class FromFunctionOp(gof.Op):
"""
__props__ = ("fn", "itypes", "otypes", "infer_shape")
def __init__(self, fn, itypes, otypes, infer_shape):
self.__fn = fn
self.itypes = itypes
......
......@@ -782,7 +782,6 @@ class Op(utils.object2, PureOp, CLinkerOp):
"""
def prepare_node(self, node, storage_map, compute_map, impl):
"""
Make any special modifications that the Op needs before doing
......
......@@ -861,7 +861,7 @@ class ScalarOp(Op):
raise TypeError(
"Expected a callable for the 'output_types_preference' argument to %s. (got: %s)" %
(self.__class__, output_types_preference))
self.output_types_preference = output_types_preference
self.output_types_preference = output_types_preference
def make_node(self, *inputs):
if self.nin >= 0:
......@@ -908,16 +908,6 @@ class ScalarOp(Op):
raise utils.MethodNotDefined("grad", type(self),
self.__class__.__name__)
def __eq__(self, other):
test = (type(self) == type(other) and
getattr(self, 'output_types_preference', None) ==
getattr(other, 'output_types_preference', None))
return test
def __hash__(self):
return hash(type(self).__name__) ^ hash(
getattr(self, 'output_types_preference', 0))
def __str__(self):
if hasattr(self, 'name') and self.name:
return self.name
......
......@@ -1426,17 +1426,6 @@ class CAReduce(Op):
self.__dict__.update(d)
self.set_ufunc(self.scalar_op)
def __eq__(self, other):
return (type(self) == type(other) and
self.scalar_op == other.scalar_op and
self.axis == other.axis)
def __hash__(self):
if self.axis is None:
return hash(self.scalar_op)
else:
return hash(self.scalar_op) ^ hash(tuple(self.axis))
def __str__(self):
if self.axis is not None:
return "Reduce{%s}{%s}" % (
......@@ -1724,12 +1713,6 @@ class All(CAReduce):
def _output_dtype(self, idtype):
return "int8"
def __str__(self):
if self.axis is None:
return "All"
else:
return "All{%s}" % ", ".join(map(str, self.axis))
def make_node(self, input):
input = as_tensor_variable(input)
if input.dtype not in ["int8", "uint8"]:
......@@ -1757,12 +1740,6 @@ class Any(CAReduce):
def _output_dtype(self, idtype):
return "int8"
def __str__(self):
if self.axis is None:
return "Any"
else:
return "Any{%s}" % ", ".join(map(str, self.axis))
def make_node(self, input):
input = as_tensor_variable(input)
if input.dtype not in ["int8", "uint8"]:
......@@ -1834,14 +1811,6 @@ class CAReduceDtype(CAReduce):
self.dtype = dtype
self.acc_dtype = acc_dtype
def __eq__(self, other):
return (CAReduce.__eq__(self, other) and
self.dtype == other.dtype and
self.acc_dtype == other.acc_dtype)
def __hash__(self):
return CAReduce.__hash__(self) ^ hash((self.dtype, self.acc_dtype))
def __setstate__(self, d):
super(CAReduceDtype, self).__setstate__(d)
if not hasattr(self, "dtype"):
......@@ -2062,14 +2031,6 @@ class Prod(CAReduceDtype):
if 'no_zeros_in_input' not in dct:
self.no_zeros_in_input = False
def __eq__(self, other):
return (CAReduceDtype.__eq__(self, other) and
self.no_zeros_in_input == other.no_zeros_in_input)
def __hash__(self):
return (CAReduceDtype.__hash__(self) ^
hash(self.no_zeros_in_input))
def grad(self, inp, grads):
"""
The grad of this Op could be very easy, if it is was not for the case
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论