提交 3b819206 authored 作者: sentient07's avatar sentient07

Reverted changes with Scalars, added Props to few more place, fixed travis

上级 58100e17
......@@ -71,6 +71,7 @@ class OpFromGraph(gof.Op):
fn = function([x, y, z], [e2])
"""
def __init__(self, inputs, outputs, **kwargs):
if not isinstance(outputs, list):
raise TypeError('outputs must be list', outputs)
......
......@@ -781,7 +781,6 @@ class Op(utils.object2, PureOp, CLinkerOp):
Convenience class to bundle `PureOp` and `CLinkerOp`.
"""
def prepare_node(self, node, storage_map, compute_map, impl):
"""
Make any special modifications that the Op needs before doing
......
......@@ -15,6 +15,8 @@ class Minimal(gof.Op):
# If two Apply nodes have the same inputs and the ops compare equal...
# then they will be MERGED so they had better have computed the same thing!
__props__ = ()
def __init__(self):
# If you put things here, think about whether they change the outputs
# computed by # self.perform()
......@@ -25,12 +27,6 @@ class Minimal(gof.Op):
super(Minimal, self).__init__()
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def make_node(self, *args):
# HERE `args` must be THEANO VARIABLES
return gof.Apply(op=self, inputs=args, outputs=[tensor.lscalar()])
......
......@@ -27,11 +27,7 @@ class Solve(gof.Op):
# and keeps a memory workspace from call to call as a non-default Op
# output
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
__props__ = ()
def make_node(self, A, b):
A_ = tensor.as_tensor_variable(A)
......
......@@ -852,7 +852,6 @@ class ScalarOp(Op):
nin = -1
nout = 1
__props__ = ("output_types_preference", "name")
def __init__(self, output_types_preference=None, name=None):
self.name = name
......@@ -861,7 +860,7 @@ class ScalarOp(Op):
raise TypeError(
"Expected a callable for the 'output_types_preference' argument to %s. (got: %s)" %
(self.__class__, output_types_preference))
self.output_types_preference = output_types_preference
self.output_types_preference = output_types_preference
def make_node(self, *inputs):
if self.nin >= 0:
......@@ -908,6 +907,16 @@ class ScalarOp(Op):
raise utils.MethodNotDefined("grad", type(self),
self.__class__.__name__)
def __eq__(self, other):
test = (type(self) == type(other) and
getattr(self, 'output_types_preference', None) ==
getattr(other, 'output_types_preference', None))
return test
def __hash__(self):
return hash(type(self).__name__) ^ hash(
getattr(self, 'output_types_preference', 0))
def __str__(self):
if hasattr(self, 'name') and self.name:
return self.name
......@@ -1174,7 +1183,6 @@ isinf = IsInf()
class InRange(LogicalComparison):
nin = 3
__props__ = ("openlow", "openhi")
def __init__(self, openlow, openhi):
self.openlow = openlow
......@@ -2026,8 +2034,6 @@ identity = Identity(same_out, name='identity')
# CASTING OPERATIONS
class Cast(UnaryScalarOp):
__props__ = ("o_type", "name")
def __init__(self, o_type, name=None):
if not isinstance(o_type, Scalar):
raise TypeError(o_type)
......@@ -3453,7 +3459,7 @@ class Composite(ScalarOp):
Composite depends on all the Ops in its graph having C code.
"""
__props__ = ('inputs', 'outputs')
init_param = ('inputs', 'outputs')
def __str__(self):
if self.name is None:
......@@ -3467,7 +3473,7 @@ class Composite(ScalarOp):
This fct allow fix patch this.
"""
d = dict([(k, getattr(self, k)) for k in self.__props__])
d = dict([(k, getattr(self, k)) for k in self.init_param])
out = self.__class__(**d)
if name:
out.name = name
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论