提交 d66d4c92 authored 作者: Frederic's avatar Frederic

Add method SparseVariable.toarray()

上级 970c704d
......@@ -123,6 +123,7 @@ List of Implemented Operations
Both grad are implemented. Structured by default.
- :class:`SparseFromDense <theano.sparse.basic.SparseFromDense>` and ``csr_from_dense``, ``csc_from_dense``.
The grad implemented is structured.
- Theano SparseVariable object have a method ``toarray()`` that is the same as ``dense_from_sparse``.
- Construction of Sparses and their Properties
- :class:`CSM <theano.sparse.basic.CSM>` and ``CSC``, ``CSR`` to construct a matrix.
......
......@@ -300,6 +300,8 @@ class _sparse_py_operators:
# def _as_TensorVariable(self):
# return dense_from_sparse(self)
def toarray(self):
return dense_from_sparse(self)
shape = property(lambda self: tensor.shape(dense_from_sparse(self)))
# don't worry!
# the plan is that the ShapeFeature in tensor.opt will do shape propagation
......
......@@ -708,18 +708,25 @@ class T_conversion(unittest.TestCase):
self.assertTrue(str(val.dtype) == 'float64')
self.assertTrue(val.format == 'csr')
if 1:
def test2(self):
#call dense_from_sparse
for t in _mtypes:
s = t(scipy.sparse.identity(5))
d = dense_from_sparse(s)
# s should be copied into the graph as a constant
s[0, 0] = 3.0 # changes s, but not the copy
val = eval_outputs([d])
return
self.assertTrue(str(val.dtype) == s.dtype)
self.assertTrue(numpy.all(val[0] == [1, 0, 0, 0, 0]))
def test_dense_from_sparse(self):
#call dense_from_sparse
for t in _mtypes:
s = t(scipy.sparse.identity(5))
s = as_sparse_variable(s)
d = dense_from_sparse(s)
val = eval_outputs([d])
self.assertTrue(str(val.dtype) == s.dtype)
self.assertTrue(numpy.all(val[0] == [1, 0, 0, 0, 0]))
def test_todense(self):
#call sparse_var.todense()
for t in _mtypes:
s = t(scipy.sparse.identity(5))
s = as_sparse_variable(s)
d = s.toarray()
val = eval_outputs([d])
self.assertTrue(str(val.dtype) == s.dtype)
self.assertTrue(numpy.all(val[0] == [1, 0, 0, 0, 0]))
@staticmethod
def check_format_ndim(format, ndim):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论