提交 da95bf92 authored 作者: abergeron's avatar abergeron 提交者: GitHub

Merge pull request #4984 from nouiz/mixed6

Mixed6
If you have any questions on Theano, please ask your questions in either [theano-user mailing list](https://groups.google.com/forum/#!forum/theano-users) or [stackoverflow](http://stackoverflow.com/) with the "theano" tag. If you have any questions on Theano, please ask your questions in either [theano-user mailing list](https://groups.google.com/forum/#!forum/theano-users) or [stackoverflow](http://stackoverflow.com/) with the "theano" tag.
Use issue only to report bugs or to ask for new features in Theano. Use issue only to report bugs or to ask for new features in Theano.
Before reporting a bug, update to Theano development version. Maybe it
is already fixed. If not, tell us the Theano flags and Theano version
that generate the problem. If it is a regression, can do a "git
bisect" to idenfy when the problem appeard. This really help fix it
rapidly.
If you add a feature request, describe in which case it will be useful.
\ No newline at end of file
...@@ -55,7 +55,7 @@ Requirements for Quality Contributions ...@@ -55,7 +55,7 @@ Requirements for Quality Contributions
* All the code should be properly tested. * All the code should be properly tested.
* The code should be compatible with Python 2.6 and above, as well as Python * The code should be compatible with Python 2.7 and above, as well as Python
3.3 and above (using `six` if needed). 3.3 and above (using `six` if needed).
* All the code should respect the * All the code should respect the
......
...@@ -442,12 +442,21 @@ if param != "": ...@@ -442,12 +442,21 @@ if param != "":
# to support path that includes spaces, we need to wrap it with double quotes on Windows # to support path that includes spaces, we need to wrap it with double quotes on Windows
if param and os.name == 'nt': if param and os.name == 'nt':
param = '"%s"' % param param = '"%s"' % param
def warn_cxx(val):
"""We only support clang++ as otherwise we hit strange g++/OSX bugs."""
if sys.platform == 'darwin' and val != 'clang++':
_logger.warning("Only clang++ is supported. With g++,"
" we end up with strange g++/OSX bugs.")
return True
AddConfigVar('cxx', AddConfigVar('cxx',
"The C++ compiler to use. Currently only g++ is" "The C++ compiler to use. Currently only g++ is"
" supported, but supporting additional compilers should not be " " supported, but supporting additional compilers should not be "
"too difficult. " "too difficult. "
"If it is empty, no C++ code is compiled.", "If it is empty, no C++ code is compiled.",
StrParam(param), StrParam(param, is_valid=warn_cxx),
in_c_key=False) in_c_key=False)
del param del param
......
...@@ -36,6 +36,7 @@ if [ "$1" == "--buildbot" ]; then ...@@ -36,6 +36,7 @@ if [ "$1" == "--buildbot" ]; then
NOSETESTS=${ROOT_CWD}/Theano/bin/theano-nose NOSETESTS=${ROOT_CWD}/Theano/bin/theano-nose
export PYTHONPATH=${ROOT_CWD}:$PYTHONPATH export PYTHONPATH=${ROOT_CWD}:$PYTHONPATH
else else
ROOT_CWD=.
COMPILEDIR=`python -c "from __future__ import print_function; import theano; print(theano.config.compiledir)"|tail -1` COMPILEDIR=`python -c "from __future__ import print_function; import theano; print(theano.config.compiledir)"|tail -1`
NOSETESTS=`python -c "from __future__ import print_function; import theano; print(theano.__path__[0])"|tail -1`/../bin/theano-nose NOSETESTS=`python -c "from __future__ import print_function; import theano; print(theano.__path__[0])"|tail -1`/../bin/theano-nose
fi fi
......
from __future__ import absolute_import, print_function, division from __future__ import absolute_import, print_function, division
import string
import numpy as np import numpy as np
import theano import theano
...@@ -247,8 +246,8 @@ def sc_complex_dot_batched(bx_gpu, by_gpu, bc_gpu, transa='N', transb='N', ...@@ -247,8 +246,8 @@ def sc_complex_dot_batched(bx_gpu, by_gpu, bc_gpu, transa='N', transb='N',
alpha = np.complex64(1.0) alpha = np.complex64(1.0)
beta = np.complex64(0.0) beta = np.complex64(0.0)
transa = string.lower(transa) transa = transa.lower()
transb = string.lower(transb) transb = transb.lower()
if transb in ['t', 'c']: if transb in ['t', 'c']:
N, m, k = by_shape N, m, k = by_shape
......
...@@ -1672,7 +1672,7 @@ conv_groupopt.register('local_conv_dnn', dnn.local_conv_dnn, ...@@ -1672,7 +1672,7 @@ conv_groupopt.register('local_conv_dnn', dnn.local_conv_dnn,
# It can be disabled by excluding 'conv_gemm'. # It can be disabled by excluding 'conv_gemm'.
conv_groupopt.register('local_conv_gemm', local_conv_gemm, conv_groupopt.register('local_conv_gemm', local_conv_gemm,
'conv_gemm', 'conv_gemm',
'fast_compile', 'fast_run', positin=30) 'fast_compile', 'fast_run', position=30)
class LocalCudaMetaOptimizer(LocalMetaOptimizer): class LocalCudaMetaOptimizer(LocalMetaOptimizer):
......
...@@ -3069,14 +3069,10 @@ def local_subtensor_of_alloc(node): ...@@ -3069,14 +3069,10 @@ def local_subtensor_of_alloc(node):
if type(rval) not in (list, tuple): if type(rval) not in (list, tuple):
rval = [rval] rval = [rval]
if rval[0].type != node.outputs[0].type: if rval[0].type != node.outputs[0].type:
# It happen that the make_node() isn't able to infer that some # It happen that the make_node() isn't able to infer the same pattern.
# dimensions are broadcastable, but that now we can infer # We know it is safe, so fix that.
# that. So we need to remove that information here. rval[0] = T.patternbroadcast(rval[0], node.outputs[0].broadcastable)
rval[0] = theano.tensor.unbroadcast(
rval[0],
*[i for i, (b1, b2) in enumerate(zip(rval[0].broadcastable,
node.outputs[0].broadcastable))
if b1 and not b2])
return rval return rval
...@@ -4855,6 +4851,13 @@ class Canonizer(gof.LocalOptimizer): ...@@ -4855,6 +4851,13 @@ class Canonizer(gof.LocalOptimizer):
assert len(node.outputs) == 1 assert len(node.outputs) == 1
out = node.outputs[0] out = node.outputs[0]
# out won't have a clients field when we didn't commit a
# started change in the graph. We can't do the check if we
# want to skip it, so we force the skip it. It should be
# reapplied later.
if not hasattr(out, 'clients'):
return
# check if any of the clients of this node would be part of # check if any of the clients of this node would be part of
# this canonized graph... if so, we do nothing and wait for # this canonized graph... if so, we do nothing and wait for
# them to be transformed. # them to be transformed.
...@@ -5942,7 +5945,7 @@ def local_add_specialize(node): ...@@ -5942,7 +5945,7 @@ def local_add_specialize(node):
register_specialize(local_add_specialize) register_specialize(local_add_specialize)
mul_canonizer = in2out(gof.LocalOptGroup(local_mul_canonizer, local_fill_cut, mul_canonizer = in2out(gof.LocalOptGroup(local_mul_canonizer, local_fill_cut,
local_fill_sink), local_fill_sink, apply_all_opts=True),
name='mul_canonizer_groups') name='mul_canonizer_groups')
...@@ -6101,7 +6104,7 @@ def add_calculate(num, denum, aslist=False, out_type=None): ...@@ -6101,7 +6104,7 @@ def add_calculate(num, denum, aslist=False, out_type=None):
local_add_canonizer = Canonizer(T.add, T.sub, T.neg, add_calculate) local_add_canonizer = Canonizer(T.add, T.sub, T.neg, add_calculate)
add_canonizer = in2out(gof.LocalOptGroup(local_add_canonizer, local_fill_cut, add_canonizer = in2out(gof.LocalOptGroup(local_add_canonizer, local_fill_cut,
local_fill_sink), local_fill_sink, apply_all_opts=True),
name='add_canonizer_group') name='add_canonizer_group')
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论