提交 f43227ba authored 作者: James Bergstra's avatar James Bergstra

modified definition of sparse matrix "shape" property

上级 6026b300
......@@ -248,9 +248,18 @@ class _sparse_py_operators:
def __dot__(left, right): return structured_dot(left, right)
def __rdot__(right, left): return structured_dot(left, right)
#N.B. THIS IS COMMENTED OUT ON PURPOSE!!!
# Discussion with Fred & James (at least, and maybe others before)
# we decided that casting from a sparse to dense should be explicit
# because it's usually something you want to be pretty careful about,
# and not to do by accident.
#def _as_TensorVariable(self):
# return dense_from_sparse(self)
shape = property(lambda self: tensor.shape(self))
shape = property(lambda self: tensor.shape(dense_from_sparse(self))) # don't worry!
# ... the plan is that the ShapeFeature in tensor.opt will do shape propagation
# ... and remove the dense_from_sparse from the graph. This will *NOT* actually expand
# ... your sparse matrix just to get the shape.
ndim = property(lambda self: self.type.ndim)
dtype = property(lambda self: self.type.dtype)
......@@ -513,6 +522,8 @@ class DenseFromSparse(gof.op.Op):
return [sp_ones_like(x) * gz]
else:
return [SparseFromDense(x.type.format)(gz)]
def infer_shape(self, node, (ishape,)):
return [ishape]
dense_from_sparse = DenseFromSparse()
class SparseFromDense(gof.op.Op):
......@@ -535,6 +546,8 @@ class SparseFromDense(gof.op.Op):
out[0] = SparseType.format_cls[self.format](x)
def grad(self, (x, ), (gz, )):
return dense_from_sparse(gz),
def infer_shape(self, node, (ishape,)):
return [ishape]
csr_from_dense = SparseFromDense('csr')
csc_from_dense = SparseFromDense('csc')
......
......@@ -449,6 +449,8 @@ def test_shape_i():
assert f(sp.csr_matrix(random_lil((100,10), sparse_dtype, 3)))==(10)
def test_shape():
# Test that getting the shape of a sparse variable
# does not actually create a dense tensor in the process.
sparse_dtype = 'float32'
a = SparseType('csr', dtype=sparse_dtype)()
......
......@@ -407,6 +407,9 @@ class Shape_i(T.Op):
((npy_int64*)PyArray_DATA(%(out)s))[0]=CudaNdarray_HOST_DIMS(%(x)s)[%(i)s];
"""%locals()
else:
#TODO: if your type is not listed here, make a damn registry of shape_i ops for
# various types of variables.
# Do not continue this madness.
return super(Shape_i, self).c_code(node, name, (x,), (out,), sub)
def grad(self, (x,), (gz,)):
return [None]
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论