提交 a0a43388 authored 作者: James Bergstra's avatar James Bergstra

code change in softmax: removed the isinf and 0.0 checks because they would fail…

code change in softmax: removed the isinf and 0.0 checks because they would fail incorrectly for NaN inputs.
上级 b7ef62dc
...@@ -142,9 +142,6 @@ class SoftmaxWithBias(gof.Op): ...@@ -142,9 +142,6 @@ class SoftmaxWithBias(gof.Op):
return ['<iostream>','<cmath>'] return ['<iostream>','<cmath>']
@staticmethod @staticmethod
def c_code_cache_version():
return (4,)
@staticmethod
def c_code_template(): def c_code_template():
# this implementation was lifted from # this implementation was lifted from
# /u/bergstrj/cvs/bergstrj/src/feb07/nn.cxx # /u/bergstrj/cvs/bergstrj/src/feb07/nn.cxx
...@@ -180,7 +177,7 @@ class SoftmaxWithBias(gof.Op): ...@@ -180,7 +177,7 @@ class SoftmaxWithBias(gof.Op):
} }
if ((%(x)s->dimensions[1] != %(b)s->dimensions[0])) if ((%(x)s->dimensions[1] != %(b)s->dimensions[0]))
{ {
PyErr_Format(PyExc_ValueError, "number of columns in x (%%i) does not match length of b (%%i)", PyErr_Format(PyExc_ValueError, "number of columns in x (%%zi) does not match length of b (%%zi)",
%(x)s->dimensions[1], %(b)s->dimensions[0]); %(x)s->dimensions[1], %(b)s->dimensions[0]);
%(fail)s; %(fail)s;
} }
...@@ -236,20 +233,6 @@ class SoftmaxWithBias(gof.Op): ...@@ -236,20 +233,6 @@ class SoftmaxWithBias(gof.Op):
sum += sm_ij; sum += sm_ij;
sm_i[j * Ssm] = sm_ij; sm_i[j * Ssm] = sm_ij;
} }
//std::cout << "\\n";
if (std::isinf(sum))
{
//that was our best...
PyErr_SetString(PyExc_ValueError, "softmax is impossible (inf)!");
%(fail)s;
}
if (0.0 == sum)
{
//that was our best...
PyErr_SetString(PyExc_ValueError, "softmax is impossible (zero)!");
%(fail)s;
}
//cblas_dscal(x.N, 1.0 / sum, &mat_at(s,i,0), s.n); //cblas_dscal(x.N, 1.0 / sum, &mat_at(s,i,0), s.n);
double sum_inv = 1.0 / sum; double sum_inv = 1.0 / sum;
...@@ -271,6 +254,10 @@ class SoftmaxWithBias(gof.Op): ...@@ -271,6 +254,10 @@ class SoftmaxWithBias(gof.Op):
code_template = ''.join(self.c_code_template()) code_template = ''.join(self.c_code_template())
return code_template % dict(locals(), **sub) return code_template % dict(locals(), **sub)
@staticmethod
def c_code_cache_version():
return (5,)
softmax_with_bias = SoftmaxWithBias() softmax_with_bias = SoftmaxWithBias()
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论