提交 5410b7ca authored 作者: Nicolas Bouchard's avatar Nicolas Bouchard

Add test to local_csm_properties_csm optimization

上级 df0a6370
...@@ -22,7 +22,7 @@ local_usmm = gof.opt.PatternSub( ...@@ -22,7 +22,7 @@ local_usmm = gof.opt.PatternSub(
(usmm, (theano.tensor.neg, 'alpha'), 'x', 'y', 'z')) (usmm, (theano.tensor.neg, 'alpha'), 'x', 'y', 'z'))
register_specialize(local_usmm, name="local_usmm") register_specialize(local_usmm, name="local_usmm")
# This is tested in tests/test_opt.py:test_local_csm_grad_c
@gof.local_optimizer([csm_grad(None)]) @gof.local_optimizer([csm_grad(None)])
def local_csm_grad_c(node): def local_csm_grad_c(node):
""" csm_grad(None) -> csm_grad_c """ """ csm_grad(None) -> csm_grad_c """
......
...@@ -13,7 +13,14 @@ from theano.sparse.basic import ( ...@@ -13,7 +13,14 @@ from theano.sparse.basic import (
_is_sparse) _is_sparse)
from theano.sparse.sandbox.sp import sp_sum from theano.sparse.sandbox.sp import sp_sum
class Cast(gof.op.Op): class Cast(gof.op.Op):
"""Cast sparse variable to the desired dtype.
This wrap the method astype from scipy.
"""
# It returns a new matrix, not a view.
def __init__(self, out_type): def __init__(self, out_type):
self.out_type = out_type self.out_type = out_type
......
...@@ -12,7 +12,8 @@ from theano.gof.python25 import any ...@@ -12,7 +12,8 @@ from theano.gof.python25 import any
if not enable_sparse: if not enable_sparse:
raise SkipTest('Optional package sparse disabled') raise SkipTest('Optional package sparse disabled')
from theano.sparse import CSM, CSMProperties, csm_properties, CSC, CSR from theano.sparse import (CSM, CSMProperties, csm_properties, CSC, CSR,
DenseFromSparse, CSMGrad)
from theano.sparse.tests.test_basic import random_lil from theano.sparse.tests.test_basic import random_lil
...@@ -32,3 +33,22 @@ def test_local_csm_properties_csm(): ...@@ -32,3 +33,22 @@ def test_local_csm_properties_csm():
v = cast(random_lil((10, 40), v = cast(random_lil((10, 40),
config.floatX, 3)) config.floatX, 3))
f(v.data, v.indices, v.indptr, v.shape) f(v.data, v.indices, v.indptr, v.shape)
def test_local_csm_grad_c():
data = tensor.vector()
indices, indptr, shape = (tensor.ivector(), tensor.ivector(),
tensor.ivector())
mode = theano.compile.mode.get_default_mode()
mode = mode.including("specialize", "local_csm_grad_c")
for CS, cast in [(CSC, sp.csc_matrix), (CSR, sp.csr_matrix)]:
cost = tensor.sum(DenseFromSparse()(CS(data, indices, indptr, shape)))
f = theano.function(
[data, indices, indptr, shape],
cost,
mode=mode)
assert not any(isinstance(node.op, CSMGrad) for node
in f.maker.env.toposort())
v = cast(random_lil((10, 40),
config.floatX, 3))
f(v.data, v.indices, v.indptr, v.shape)
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论