提交 31dbab74 authored 作者: James Bergstra's avatar James Bergstra

changing default blas.ldflags to copy numpy configuration

上级 eaeccea0
...@@ -25,10 +25,6 @@ AddConfigVar('nocleanup', ...@@ -25,10 +25,6 @@ AddConfigVar('nocleanup',
"suppress the deletion of code files that did not compile cleanly", "suppress the deletion of code files that did not compile cleanly",
BoolParam(False)) BoolParam(False))
AddConfigVar('blas.ldflags',
"lib[s] to include for [Fortran] level-3 blas implementation",
StrParam("-lblas"))
AddConfigVar('tensor.cmp_sloppy', AddConfigVar('tensor.cmp_sloppy',
"Relax tensor._allclose (0) not at all, (1) a bit, (2) more", "Relax tensor._allclose (0) not at all, (1) a bit, (2) more",
IntParam(0, lambda i: i in (0,1,2))) IntParam(0, lambda i: i in (0,1,2)))
......
...@@ -2,7 +2,8 @@ ...@@ -2,7 +2,8 @@
import sys, traceback, logging import sys, traceback, logging
import numpy import numpy
from ..configparser import config import numpy.distutils
from ..configparser import config, AddConfigVar, StrParam
from theano.gof import (utils, Op, Apply, view_roots, PatternSub, DestroyHandler, from theano.gof import (utils, Op, Apply, view_roots, PatternSub, DestroyHandler,
SeqOptimizer, local_optimizer, Optimizer, LocalOptimizer, OpKeyOptimizer, SeqOptimizer, local_optimizer, Optimizer, LocalOptimizer, OpKeyOptimizer,
InconsistencyError, toolbox) InconsistencyError, toolbox)
...@@ -17,6 +18,18 @@ from theano import compile #to register the optimizer built by this file ...@@ -17,6 +18,18 @@ from theano import compile #to register the optimizer built by this file
from theano.tensor.blas_headers import cblas_header_text, blas_header_text from theano.tensor.blas_headers import cblas_header_text, blas_header_text
def default_blas_ldflags():
try:
return ' '.join('-l%s'%l
for l in numpy.distutils.__config__.blas_opt_info['libraries'])
except:
return "-lblas"
AddConfigVar('blas.ldflags',
"lib[s] to include for [Fortran] level-3 blas implementation",
StrParam(default_blas_ldflags()))
_logger = logging.getLogger('theano.tensor.blas') _logger = logging.getLogger('theano.tensor.blas')
_logger.setLevel(logging.WARN) _logger.setLevel(logging.WARN)
def debug(*msg): _logger.debug(' '.join(str(m) for m in msg)) def debug(*msg): _logger.debug(' '.join(str(m) for m in msg))
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论