提交 a6b9585e authored 作者: Virgile Andreani's avatar Virgile Andreani 提交者: Virgile Andreani

Use implicit string concatenation instead of explicit

上级 f75434f3
...@@ -125,8 +125,8 @@ line-length = 88 ...@@ -125,8 +125,8 @@ line-length = 88
exclude = ["doc/", "pytensor/_version.py"] exclude = ["doc/", "pytensor/_version.py"]
[tool.ruff.lint] [tool.ruff.lint]
select = ["C", "E", "F", "I", "UP", "W", "RUF", "PERF", "PTH"] select = ["C", "E", "F", "I", "UP", "W", "RUF", "PERF", "PTH", "ISC"]
ignore = ["C408", "C901", "E501", "E741", "RUF012", "PERF203"] ignore = ["C408", "C901", "E501", "E741", "RUF012", "PERF203", "ISC001"]
[tool.ruff.lint.isort] [tool.ruff.lint.isort]
......
...@@ -30,9 +30,7 @@ def print_help(exit_status): ...@@ -30,9 +30,7 @@ def print_help(exit_status):
print('Type "pytensor-cache clear" to erase the cache') print('Type "pytensor-cache clear" to erase the cache')
print('Type "pytensor-cache list" to print the cache content') print('Type "pytensor-cache list" to print the cache content')
print('Type "pytensor-cache unlock" to unlock the cache directory') print('Type "pytensor-cache unlock" to unlock the cache directory')
print( print('Type "pytensor-cache cleanup" to delete keys in the old format/code version')
'Type "pytensor-cache cleanup" to delete keys in the old ' "format/code version"
)
print('Type "pytensor-cache purge" to force deletion of the cache directory') print('Type "pytensor-cache purge" to force deletion of the cache directory')
print( print(
'Type "pytensor-cache basecompiledir" ' 'Type "pytensor-cache basecompiledir" '
......
...@@ -1614,14 +1614,10 @@ class _Linker(LocalLinker): ...@@ -1614,14 +1614,10 @@ class _Linker(LocalLinker):
opt = str(reason[0][0]) opt = str(reason[0][0])
msg = ( msg = (
f"An optimization (probably {opt}) inserted an " f"An optimization (probably {opt}) inserted an "
"apply node that raise an error." "apply node that raise an error.\n"
+ "\nThe information we have about this " "The information we have about this optimization is:"
"optimizations is:" f"{reason[0][1]}\n{reason[0][2]}\n"
+ str(reason[0][1]) f"\nThe original exception: \n{e}"
+ "\n"
+ reason[0][2]
+ "\n\nThe original exception: \n"
+ str(e)
) )
new_e = e.__class__(msg) new_e = e.__class__(msg)
exc_type, exc_value, exc_trace = sys.exc_info() exc_type, exc_value, exc_trace = sys.exc_info()
...@@ -1725,15 +1721,11 @@ class _Linker(LocalLinker): ...@@ -1725,15 +1721,11 @@ class _Linker(LocalLinker):
raise raise
opt = str(reason[0][0]) opt = str(reason[0][0])
msg = ( msg = (
f"An optimization (probably {opt}) inserted " f"An optimization (probably {opt}) inserted an "
"an apply node that raise an error." "apply node that raise an error.\n"
+ "\nThe information we have about this " "The information we have about this optimization is:"
"optimizations is:" f"{reason[0][1]}\n{reason[0][2]}\n"
+ str(reason[0][1]) f"\nThe original exception: \n{e}"
+ "\n"
+ reason[0][2]
+ "\n\nThe original exception: \n"
+ str(e)
) )
new_e = e.__class__(msg) new_e = e.__class__(msg)
exc_type, exc_value, exc_trace = sys.exc_info() exc_type, exc_value, exc_trace = sys.exc_info()
......
...@@ -802,20 +802,20 @@ def _node_to_pattern(node): ...@@ -802,20 +802,20 @@ def _node_to_pattern(node):
if not isinstance(connection_pattern, list): if not isinstance(connection_pattern, list):
raise TypeError( raise TypeError(
"Op.connection_pattern should return " "Op.connection_pattern should return "
+ f"list of list of bool, but for Op={node.op}" f"list of list of bool, but for Op={node.op}"
+ f"got {connection_pattern} with type {type(connection_pattern)}." f"got {connection_pattern} with type {type(connection_pattern)}."
) )
if len(connection_pattern) != len(node.inputs): if len(connection_pattern) != len(node.inputs):
raise ValueError( raise ValueError(
f"{node.op}.connection_pattern should have {len(node.inputs)}" f"{node.op}.connection_pattern should have {len(node.inputs)}"
+ f" rows but has {len(connection_pattern)}." f" rows but has {len(connection_pattern)}."
) )
for ii, output_pattern in enumerate(connection_pattern): for ii, output_pattern in enumerate(connection_pattern):
if not isinstance(output_pattern, list): if not isinstance(output_pattern, list):
raise TypeError( raise TypeError(
f"{node.op}.connection_pattern should return" f"{node.op}.connection_pattern should return"
+ f" a list of lists, but element {int(ii)}" f" a list of lists, but element {int(ii)}"
+ f"is {output_pattern} of type {type(output_pattern)}." f"is {output_pattern} of type {type(output_pattern)}."
) )
else: else:
connection_pattern = [[True for output in node.outputs] for ipt in node.inputs] connection_pattern = [[True for output in node.outputs] for ipt in node.inputs]
......
...@@ -229,10 +229,10 @@ class RewriteDatabaseQuery: ...@@ -229,10 +229,10 @@ class RewriteDatabaseQuery:
def __str__(self): def __str__(self):
return ( return (
"RewriteDatabaseQuery(" "RewriteDatabaseQuery("
+ f"inc={self.include},ex={self.exclude}," f"inc={self.include},ex={self.exclude},"
+ f"require={self.require},subquery={self.subquery}," f"require={self.require},subquery={self.subquery},"
+ f"position_cutoff={self.position_cutoff}," f"position_cutoff={self.position_cutoff},"
+ f"extra_rewrites={self.extra_rewrites})" f"extra_rewrites={self.extra_rewrites})"
) )
def __setstate__(self, state): def __setstate__(self, state):
......
...@@ -1744,9 +1744,7 @@ def std_lib_dirs_and_libs() -> tuple[list[str], ...] | None: ...@@ -1744,9 +1744,7 @@ def std_lib_dirs_and_libs() -> tuple[list[str], ...] | None:
if not os.path.exists(os.path.join(libdir, f)): if not os.path.exists(os.path.join(libdir, f)):
print( print(
"Your Python version is from Canopy. " "Your Python version is from Canopy. "
+ "You need to install the package '" f"You need to install the package '{lib}' from Canopy package manager."
+ lib
+ "' from Canopy package manager."
) )
libdirs = [ libdirs = [
# Used in older Canopy # Used in older Canopy
...@@ -1763,9 +1761,7 @@ def std_lib_dirs_and_libs() -> tuple[list[str], ...] | None: ...@@ -1763,9 +1761,7 @@ def std_lib_dirs_and_libs() -> tuple[list[str], ...] | None:
): ):
print( print(
"Your Python version is from Canopy. " "Your Python version is from Canopy. "
+ "You need to install the package '" f"You need to install the package '{lib}' from Canopy package manager."
+ lib
+ "' from Canopy package manager."
) )
python_lib_dirs.insert(0, libdir) python_lib_dirs.insert(0, libdir)
std_lib_dirs_and_libs.data = [libname], python_lib_dirs std_lib_dirs_and_libs.data = [libname], python_lib_dirs
......
...@@ -351,8 +351,8 @@ def raise_with_op( ...@@ -351,8 +351,8 @@ def raise_with_op(
clients = [[c[0] for c in fgraph.clients[var]] for var in node.outputs] clients = [[c[0] for c in fgraph.clients[var]] for var in node.outputs]
detailed_err_msg += ( detailed_err_msg += (
f"Inputs shapes: {shapes}" f"Inputs shapes: {shapes}"
+ f"\nInputs strides: {strides}" f"\nInputs strides: {strides}"
+ f"\nInputs values: {scalar_values}" f"\nInputs values: {scalar_values}"
) )
if verbosity == "high": if verbosity == "high":
inpts = [ inpts = [
......
...@@ -1753,7 +1753,7 @@ class ScalarMaximum(BinaryScalarOp): ...@@ -1753,7 +1753,7 @@ class ScalarMaximum(BinaryScalarOp):
if any(i.type in complex_types for i in node.inputs): if any(i.type in complex_types for i in node.inputs):
raise NotImplementedError() raise NotImplementedError()
# Test for both y>x and x>=y to detect NaN # Test for both y>x and x>=y to detect NaN
return f"{z} = (({y})>({x})? ({y}): " f'(({x})>=({y})? ({x}): nan("")));' return f'{z} = (({y})>({x})? ({y}): (({x})>=({y})? ({x}): nan("")));'
def L_op(self, inputs, outputs, gout): def L_op(self, inputs, outputs, gout):
(x, y) = inputs (x, y) = inputs
...@@ -1795,7 +1795,7 @@ class ScalarMinimum(BinaryScalarOp): ...@@ -1795,7 +1795,7 @@ class ScalarMinimum(BinaryScalarOp):
(z,) = outputs (z,) = outputs
if any(i.type in complex_types for i in node.inputs): if any(i.type in complex_types for i in node.inputs):
raise NotImplementedError() raise NotImplementedError()
return f"{z} = (({y})<({x})? ({y}): " f'(({x})<=({y})? ({x}): nan("")));' return f'{z} = (({y})<({x})? ({y}): (({x})<=({y})? ({x}): nan("")));'
def L_op(self, inputs, outputs, gout): def L_op(self, inputs, outputs, gout):
(x, y) = inputs (x, y) = inputs
......
...@@ -1382,8 +1382,8 @@ class GetItem2d(Op): ...@@ -1382,8 +1382,8 @@ class GetItem2d(Op):
isinstance(ind, Variable) and getattr(ind, "ndim", -1) == 0 isinstance(ind, Variable) and getattr(ind, "ndim", -1) == 0
) or np.isscalar(ind): ) or np.isscalar(ind):
raise NotImplementedError( raise NotImplementedError(
"PyTensor has no sparse vector" "PyTensor has no sparse vector. "
+ "Use X[a:b, c:d], X[a:b, c:c+1] or X[a:b] instead." "Use X[a:b, c:d], X[a:b, c:c+1] or X[a:b] instead."
) )
else: else:
raise ValueError( raise ValueError(
......
...@@ -2284,7 +2284,7 @@ class BaseAbstractConv(Op): ...@@ -2284,7 +2284,7 @@ class BaseAbstractConv(Op):
""" """
if mode not in ("valid", "full"): if mode not in ("valid", "full"):
raise ValueError( raise ValueError(
f"invalid mode {mode}, which must be either " '"valid" or "full"' f'invalid mode {mode}, which must be either "valid" or "full"'
) )
if isinstance(dilation, int): if isinstance(dilation, int):
dilation = (dilation,) * self.convdim dilation = (dilation,) * self.convdim
......
...@@ -80,7 +80,7 @@ class IRFFTOp(Op): ...@@ -80,7 +80,7 @@ class IRFFTOp(Op):
if a.ndim < 3: if a.ndim < 3:
raise TypeError( raise TypeError(
f"{self.__class__.__name__}: input must have dimension >= 3, with " f"{self.__class__.__name__}: input must have dimension >= 3, with "
+ "first dimension batches and last real/imag parts" "first dimension batches and last real/imag parts"
) )
if s is None: if s is None:
......
...@@ -196,8 +196,8 @@ class TestCGemv(OptimizationTestMixin): ...@@ -196,8 +196,8 @@ class TestCGemv(OptimizationTestMixin):
if check_force_gemv_init(): if check_force_gemv_init():
warn( warn(
"WARNING: The current BLAS requires PyTensor to initialize" "WARNING: The current BLAS requires PyTensor to initialize"
+ " memory for some GEMV calls which will result in a minor" " memory for some GEMV calls which will result in a minor"
+ " degradation in performance for such calls." " degradation in performance for such calls."
) )
def t_gemv1(self, m_shp): def t_gemv1(self, m_shp):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论