提交 32eea512 authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Remove unused implementation start

上级 5f504d11
...@@ -432,48 +432,6 @@ def softmax_simplifier(numerators, denominators): ...@@ -432,48 +432,6 @@ def softmax_simplifier(numerators, denominators):
opt.local_mul_canonizer.add_simplifier(softmax_simplifier, 'softmax_simplifier') opt.local_mul_canonizer.add_simplifier(softmax_simplifier, 'softmax_simplifier')
if 0: if 0:
def softmax_grad_simplifier(numerators, denominators):
print "mul simplify numerators"
printing.debugprint(numerators)
print "mul simplify denominators"
printing.debugprint(denominators)
for numerator in list(numerators):
#TODO: a single softmax'd vector??
if not numerator.type.dtype.startswith('float'):
continue
if not numerator.type.broadcastable == (False, False):
continue
if numerator.owner and numerator.owner.op == tensor.exp:
x = numerator.owner.inputs[0]
else:
continue
print "A", denominators
matching_denom = None
for denominator in denominators:
if denominator.owner and denominator.owner.op == tensor.add:
if len(denominator.owner.inputs)==2:
dl,dr = denominator.owner.inputs
# check to see if either dl or dr is softmax(x)
# if yes, we are probably dealing with the gradient of softmax
other=None
if dl.owner and dl.owner.op == softmax and dl.owner.inputs[0]==x:
other=dr
if dr.owner and dr.owner.op == softmax and dr.owner.inputs[0]==x:
other=dl
if other:
print "OTHER", other
if matching_denom:
numerators.remove(numerator)
denominators.remove(matching_denom)
numerators.append(softmax(x))
return numerators, denominators
#opt.local_mul_canonizer.add_simplifier(softmax_grad_simplifier, 'softmax_grad_simplifier')
@opt.register_specialize @opt.register_specialize
@gof.local_optimizer([]) @gof.local_optimizer([])
def local_softmax_grad(node): def local_softmax_grad(node):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论