提交 8d9da724 authored 作者: James Bergstra's avatar James Bergstra

Uncommenting softmax optimization

上级 22226ae5
...@@ -397,8 +397,7 @@ def local_softmax_with_bias(node): ...@@ -397,8 +397,7 @@ def local_softmax_with_bias(node):
return return
return [sm_bias] return [sm_bias]
if 0: def softmax_simplifier(numerators, denominators):
def softmax_simplifier(numerators, denominators):
for numerator in list(numerators): for numerator in list(numerators):
#TODO: a single softmax'd vector?? #TODO: a single softmax'd vector??
if not numerator.type.dtype.startswith('float'): if not numerator.type.dtype.startswith('float'):
...@@ -430,8 +429,9 @@ if 0: ...@@ -430,8 +429,9 @@ if 0:
denominators.remove(matching_denom) denominators.remove(matching_denom)
numerators.append(softmax(x)) numerators.append(softmax(x))
return numerators, denominators return numerators, denominators
opt.local_mul_canonizer.add_simplifier(softmax_simplifier, 'softmax_simplifier') opt.local_mul_canonizer.add_simplifier(softmax_simplifier, 'softmax_simplifier')
if 0:
def softmax_grad_simplifier(numerators, denominators): def softmax_grad_simplifier(numerators, denominators):
print "mul simplify numerators" print "mul simplify numerators"
printing.debugprint(numerators) printing.debugprint(numerators)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论