提交 91343323 authored 作者: James Bergstra's avatar James Bergstra

Moved the test_dot_vm and test_dot_mv funcs into TestGemv

上级 49587bc3
...@@ -758,40 +758,40 @@ def test_dot_w_self(): ...@@ -758,40 +758,40 @@ def test_dot_w_self():
f(numpy.asarray([[0,1], [2,3]], dtype=config.floatX)) f(numpy.asarray([[0,1], [2,3]], dtype=config.floatX))
def test_dot_vm(): class TestGemv(TestCase):
''' Test vector dot matrix ''' def test_dot_vm(self):
rng = numpy.random.RandomState(unittest_tools.fetch_seed()) ''' Test vector dot matrix '''
v = theano.shared(numpy.array(rng.uniform(size=(2,)), dtype='float32')) rng = numpy.random.RandomState(unittest_tools.fetch_seed())
m = theano.shared(numpy.array(rng.uniform(size=(2,2)), dtype='float32')) v = theano.shared(numpy.array(rng.uniform(size=(2,)), dtype='float32'))
f = theano.function([], theano.dot(v,m), mode = mode_blas_opt) m = theano.shared(numpy.array(rng.uniform(size=(2,2)), dtype='float32'))
f = theano.function([], theano.dot(v,m), mode = mode_blas_opt)
# Assert they produce the same output # Assert they produce the same output
assert numpy.allclose(f(), numpy.dot(v.get_value(), m.get_value())) assert numpy.allclose(f(), numpy.dot(v.get_value(), m.get_value()))
# Assert that the dot was optimized somehow # Assert that the dot was optimized somehow
assert sum([isinstance(node.op, T.Dot) for node in assert sum([isinstance(node.op, T.Dot) for node in
f.maker.env.toposort() ]) == 0 f.maker.env.toposort() ]) == 0
assert sum([isinstance(node.op, T.blas.Dot22) for node in assert sum([isinstance(node.op, T.blas.Dot22) for node in
f.maker.env.toposort() ]) == 1 f.maker.env.toposort() ]) == 1
def test_dot_mv(): def test_dot_mv(self):
''' Test matrix dot vector ''' ''' Test matrix dot vector '''
rng = numpy.random.RandomState(unittest_tools.fetch_seed()) rng = numpy.random.RandomState(unittest_tools.fetch_seed())
v = theano.shared(numpy.array(rng.uniform(size=(2,)), dtype='float32')) v = theano.shared(numpy.array(rng.uniform(size=(2,)), dtype='float32'))
m = theano.shared(numpy.array(rng.uniform(size=(2,2)), m = theano.shared(numpy.array(rng.uniform(size=(2,2)),
dtype='float32')) dtype='float32'))
f = theano.function([], theano.dot(m,v), mode = mode_blas_opt) f = theano.function([], theano.dot(m,v), mode = mode_blas_opt)
# Assert they produce the same output # Assert they produce the same output
assert numpy.allclose(f(), numpy.dot(m.get_value(), v.get_value())) assert numpy.allclose(f(), numpy.dot(m.get_value(), v.get_value()))
# Assert that the dot was optimized somehow # Assert that the dot was optimized somehow
assert sum([isinstance(node.op, T.Dot) for node in assert sum([isinstance(node.op, T.Dot) for node in
f.maker.env.toposort() ]) == 0 f.maker.env.toposort() ]) == 0
assert sum([isinstance(node.op, T.blas.Dot22) for node in assert sum([isinstance(node.op, T.blas.Dot22) for node in
f.maker.env.toposort() ]) == 1 f.maker.env.toposort() ]) == 1
class TestGemv(TestCase):
def test_gemv1(self): def test_gemv1(self):
''' test vector1+dot(matrix,vector2) ''' ''' test vector1+dot(matrix,vector2) '''
rng = numpy.random.RandomState(unittest_tools.fetch_seed()) rng = numpy.random.RandomState(unittest_tools.fetch_seed())
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论