提交 77d8c018 authored 作者: Olivier Breuleux's avatar Olivier Breuleux

merge

......@@ -146,9 +146,9 @@ class Container(object):
self.storage = storage
self.readonly = readonly
self.strict = strict
def __get(self):
def __get__(self):
return self.storage[0]
def __set(self, value):
def __set__(self, value):
if self.readonly:
raise Exception("Cannot set readonly storage: %s" % self.name)
try:
......@@ -162,8 +162,8 @@ class Container(object):
except Exception, e:
e.args = e.args + (('Container name "%s"' % self.name),)
raise
data = property(__get, __set)
value = property(__get, __set)
data = property(__get__, __set__)
value = property(__get__, __set__)
def __str__(self):
return "<" + str(self.storage[0]) + ">"
def __repr__(self):
......
......@@ -373,9 +373,11 @@ class TensorType(Type):
def c_extract(self, name, sub):
"""Override `CLinkerOp.c_extract` """
# TODO: make the error message print out the dtype of the
# input received.
return """
%(name)s = NULL;
type_num_%(name)s = %(type_num)s;
type_num_%(name)s = ((PyArrayObject*)py_%(name)s)->descr->type_num; //we expect %(type_num)s
if (py_%(name)s == Py_None) {
// We can either fail here or set %(name)s to NULL and rely on Ops using
// tensors to handle the NULL case, but if they fail to do so they'll end up
......@@ -387,7 +389,7 @@ class TensorType(Type):
PyErr_SetString(PyExc_ValueError, "expected an ndarray");
%(fail)s
}
else if (((PyArrayObject*)py_%(name)s)->descr->type_num != %(type_num)s) {
else if (type_num_%(name)s != %(type_num)s) {
PyErr_SetString(PyExc_ValueError, "expected %(type_num)s");
%(fail)s
}
......@@ -1358,6 +1360,15 @@ class Repeat(gof.Op):
repeat = Repeat()
class SetDefault(gof.Op):
view_map = {0: [1]}
def make_node(self, x, default):
assert x.type == default.type
return gof.Apply(self, [x, default], [default.type()])
def perform(self, node, (x, default), (out, )):
out[0] = default.copy() if x is None else x
setdefault = SetDefault()
##########################
......@@ -1852,7 +1863,6 @@ class Split(Op):
return [join(axis, *g_outputs), None, None]
class Rebroadcast(Op):
"""
Change the input's broadcastable fields in
......@@ -1892,6 +1902,7 @@ def unbroadcast(x, *axes):
return Rebroadcast(*[(axis, False) for axis in axes])(x)
class Join(Op):
"""
Concatenate multiple `TensorVariable`s along some axis.
......@@ -2523,6 +2534,7 @@ class Outer(Op):
return "outer"
outer = Outer()
#########################
# Gradient
#########################
......
......@@ -394,10 +394,12 @@ def local_softmax_with_bias(node):
vectors = []
non_vectors = []
for x_in in x.owner.inputs:
if list(x_in.type.broadcastable) == [True, False] \
and isinstance(x_in.owner.op, tensor.DimShuffle):
assert len(x_in.owner.inputs)==1
vectors.append(x_in.owner.inputs[0])
if list(x_in.type.broadcastable) == [True, False]:
if x_in.owner and isinstance(x_in.owner.op, tensor.DimShuffle):
assert len(x_in.owner.inputs)==1
vectors.append(x_in.owner.inputs[0])
else:
vectors.append(tensor.DimShuffle((True, False), (1,))(x_in))
else:
non_vectors.append(x_in)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论