提交 e913f99d authored 作者: Brandon T. Willard's avatar Brandon T. Willard 提交者: Brandon T. Willard

Rename aesara.tensor.nnet.opt to aesara.tensor.nnet.rewriting

上级 b40be14a
import warnings
import aesara.tensor.nnet.opt
import aesara.tensor.nnet.rewriting
from aesara.tensor.nnet.abstract_conv import (
abstract_conv2d,
conv2d,
......
......@@ -58,7 +58,7 @@ compile.optdb.register(
"fast_run",
"inplace",
position=60,
) # DEBUG
)
@node_rewriter([SparseBlockOuter], inplace=True)
......@@ -82,7 +82,7 @@ compile.optdb.register(
"fast_run",
"inplace",
position=60,
) # DEBUG
)
# Conv opts
......
"""
Ops and optimizations: sigmoid, softplus.
These functions implement special cases of exp and log to improve numerical
stability.
......@@ -98,7 +96,6 @@ ultra_fast_sigmoid_inplace = Elemwise(
pprint.assign(ultra_fast_sigmoid, printing.FunctionPrinter(["ultra_fast_sigmoid"]))
# @opt.register_uncanonicalize
@node_rewriter(None)
def local_ultra_fast_sigmoid(fgraph, node):
"""
......@@ -158,7 +155,6 @@ def hard_sigmoid(x):
return x
# @opt.register_uncanonicalize
@node_rewriter([sigmoid])
def local_hard_sigmoid(fgraph, node):
if isinstance(node.op, Elemwise) and node.op.scalar_op == scalar_sigmoid:
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论