提交 9266fb0e authored 作者: Frédéric Bastien's avatar Frédéric Bastien 提交者: GitHub

Merge pull request #5634 from bordesf/master

Warn if taking softmax over broadcastable dimension
...@@ -14,13 +14,14 @@ revisited later when all the intermediate part are on the GPU. ...@@ -14,13 +14,14 @@ revisited later when all the intermediate part are on the GPU.
""" """
from __future__ import absolute_import, print_function, division from __future__ import absolute_import, print_function, division
import logging import logging
import warnings
import numpy import numpy
from six.moves import xrange from six.moves import xrange
import theano import theano
from theano import gof from theano import gof
from theano import scalar from theano import scalar
from theano.tensor import extra_ops from theano.tensor import extra_ops, as_tensor_variable
from theano.gof.opt import copy_stack_trace from theano.gof.opt import copy_stack_trace
from theano.tensor import basic as tensor, subtensor, opt, elemwise from theano.tensor import basic as tensor, subtensor, opt, elemwise
from theano.tensor.type import (values_eq_approx_remove_inf, from theano.tensor.type import (values_eq_approx_remove_inf,
...@@ -802,6 +803,9 @@ def softmax_graph(c): ...@@ -802,6 +803,9 @@ def softmax_graph(c):
def softmax(c): def softmax(c):
c = as_tensor_variable(c)
if c.broadcastable[-1]:
warnings.warn("The softmax is applied on a dimension of shape 1, which does not have a semantic meaning.")
return softmax_op(c) return softmax_op(c)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论