提交 55f54243 authored 作者: Brandon T. Willard's avatar Brandon T. Willard 提交者: Brandon T. Willard

Remove logger in aesara.tensor.math_opt

上级 9beb6c2d
""" Tensor optimizations addressing the ops in math.py.""" """ Tensor optimizations addressing the ops in math.py."""
import itertools import itertools
import logging
import operator import operator
from functools import partial, reduce from functools import partial, reduce
...@@ -93,11 +92,6 @@ from aesara.tensor.type import ( ...@@ -93,11 +92,6 @@ from aesara.tensor.type import (
values_eq_approx_remove_nan, values_eq_approx_remove_nan,
) )
from aesara.tensor.var import TensorConstant, get_unique_value from aesara.tensor.var import TensorConstant, get_unique_value
from aesara.utils import NoDuplicateOptWarningFilter
_logger = logging.getLogger("aesara.tensor.math_opt")
_logger.addFilter(NoDuplicateOptWarningFilter())
def scalarconsts_rest(inputs, elemwise=True, only_process_constants=False): def scalarconsts_rest(inputs, elemwise=True, only_process_constants=False):
...@@ -186,14 +180,6 @@ def local_0_dot_x(fgraph, node): ...@@ -186,14 +180,6 @@ def local_0_dot_x(fgraph, node):
elif x.ndim == 1 and y.ndim == 1: elif x.ndim == 1 and y.ndim == 1:
constant_zero = assert_op(constant_zero, eq(x.shape[0], y.shape[0])) constant_zero = assert_op(constant_zero, eq(x.shape[0], y.shape[0]))
return [constant_zero] return [constant_zero]
else:
_logger.warning(
"Optimization Warning: "
"Optimization aesara/opt.py:local_0_dot_x Found "
"that it could apply, but was not implemented "
"for dot product with these input types:\n"
f"({x.type}, {y.type})"
)
@register_canonicalize @register_canonicalize
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论