提交 ddd1dc03 authored 作者: nouiz's avatar nouiz

Merge pull request #235 from delallea/minor

Minor
...@@ -991,11 +991,11 @@ class FunctionMaker(object): ...@@ -991,11 +991,11 @@ class FunctionMaker(object):
# optimize the env # optimize the env
compute_test_value_orig = theano.config.compute_test_value compute_test_value_orig = theano.config.compute_test_value
add_stack_trace_on_call = gof.Op.add_stack_trace_on_call
try: try:
theano.config.compute_test_value = "off" theano.config.compute_test_value = "off"
start_optimizer = time.time()
add_stack_trace_on_call = gof.Op.add_stack_trace_on_call
gof.Op.add_stack_trace_on_call = False gof.Op.add_stack_trace_on_call = False
start_optimizer = time.time()
optimizer(env) optimizer(env)
end_optimizer = time.time() end_optimizer = time.time()
......
...@@ -5,12 +5,14 @@ The elemwise fct are also used with scalar operation! So it can happen that ndim ...@@ -5,12 +5,14 @@ The elemwise fct are also used with scalar operation! So it can happen that ndim
""" """
import StringIO, sys import copy, logging, StringIO, sys
import numpy import numpy
from theano import Op, Type, Apply, Variable, Constant
from theano import tensor, scalar, gof
import logging, copy from theano import Apply, Constant, Op, Type, Variable
from theano import gof, scalar, tensor
_logger_name = 'theano.sandbox.cuda.elemwise' _logger_name = 'theano.sandbox.cuda.elemwise'
_logger = logging.getLogger(_logger_name) _logger = logging.getLogger(_logger_name)
_logger.setLevel(logging.INFO) _logger.setLevel(logging.INFO)
......
...@@ -696,9 +696,11 @@ def spectral_radius_bound(X, log2_exponent): ...@@ -696,9 +696,11 @@ def spectral_radius_bound(X, log2_exponent):
if X.type.ndim != 2: if X.type.ndim != 2:
raise TypeError('spectral_radius_bound requires a matrix argument', X) raise TypeError('spectral_radius_bound requires a matrix argument', X)
if not isinstance(log2_exponent, int): if not isinstance(log2_exponent, int):
raise TypeError('spectral_radius_bound requires a integer exponent', log2_exponent) raise TypeError('spectral_radius_bound requires an integer exponent',
log2_exponent)
if log2_exponent <= 0: if log2_exponent <= 0:
raise ValueError('spectral_radius_bound requires a strictly positive exponent', log2_exponent) raise ValueError('spectral_radius_bound requires a strictly positive '
'exponent', log2_exponent)
XX = X XX = X
for i in xrange(log2_exponent): for i in xrange(log2_exponent):
XX = tensor.dot(XX, XX) XX = tensor.dot(XX, XX)
......
...@@ -11,8 +11,10 @@ if cuda_available: ...@@ -11,8 +11,10 @@ if cuda_available:
class BadOldCode(Exception): class BadOldCode(Exception):
""" We create a specific Exception to be sure it don't get caught """
by mistake""" We create a specific Exception to be sure it does not get caught by
mistake.
"""
pass pass
......
...@@ -2600,7 +2600,8 @@ class T_Scan(unittest.TestCase): ...@@ -2600,7 +2600,8 @@ class T_Scan(unittest.TestCase):
assert numpy.allclose(theano_y , v_y[-4:]) assert numpy.allclose(theano_y , v_y[-4:])
def test_opt_order(self): def test_opt_order(self):
""" Verify that scan optimizations are applied before blas """
Verify that scan optimizations are applied before blas
optimizations. optimizations.
This is needed as otherwise, the dot won't become a dot22 This is needed as otherwise, the dot won't become a dot22
so it will be slower and won't get transferred to the gpu. so it will be slower and won't get transferred to the gpu.
......
...@@ -182,7 +182,7 @@ def Lop(f, wrt, eval_points, consider_constant=None, warn_type=False, ...@@ -182,7 +182,7 @@ def Lop(f, wrt, eval_points, consider_constant=None, warn_type=False,
inputs = gof.graph.inputs(f) inputs = gof.graph.inputs(f)
gmap = gradient.grad_sources_inputs( gmap = gradient.grad_sources_inputs(
zip(f,eval_points), zip(f, eval_points),
list(inputs) + list(consider_constant), list(inputs) + list(consider_constant),
warn_type=warn_type) warn_type=warn_type)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论