提交 b142cb51 authored 作者: Virgile Andreani's avatar Virgile Andreani 提交者: Ricardo Vieira

RUF safe autofixes

上级 9d7c2ec9
......@@ -728,7 +728,7 @@ class NominalVariable(AtomicVariable[_TypeType]):
return hash((type(self), self.id, self.type))
def __repr__(self):
return f"{type(self).__name__}({repr(self.id)}, {repr(self.type)})"
return f"{type(self).__name__}({self.id!r}, {self.type!r})"
def signature(self) -> tuple[_TypeType, _IdType]:
return (self.type, self.id)
......@@ -774,7 +774,7 @@ class Constant(AtomicVariable[_TypeType]):
data_str = repr(self.data)
if len(data_str) > 20:
data_str = data_str[:10].strip() + " ... " + data_str[-10:].strip()
return f"{type(self).__name__}({repr(self.type)}, data={data_str})"
return f"{type(self).__name__}({self.type!r}, data={data_str})"
def clone(self, **kwargs):
return self
......
......@@ -1091,7 +1091,7 @@ class FromFunctionNodeRewriter(NodeRewriter):
return getattr(self, "__name__", repr(self))
def __repr__(self):
return f"FromFunctionNodeRewriter({repr(self.fn)}, {repr(self._tracks)}, {repr(self.requirements)})"
return f"FromFunctionNodeRewriter({self.fn!r}, {self._tracks!r}, {self.requirements!r})"
def print_summary(self, stream=sys.stdout, level=0, depth=-1):
print(f"{' ' * level}{self.transform} id={id(self)}", file=stream)
......
......@@ -69,7 +69,7 @@ class ConstrainedVar(Var):
return f"~{self.token} [{self.constraint}]"
def __repr__(self):
return f"{type(self).__name__}({repr(self.constraint)}, {self.token})"
return f"{type(self).__name__}({self.constraint!r}, {self.token})"
def car_Variable(x):
......
......@@ -607,7 +607,7 @@ class EnumType(CType, dict):
self.pyint_compat_code
+ "".join(
f"""
#define {k} {str(self[k])}
#define {k} {self[k]!s}
"""
for k in sorted(self.keys())
)
......
......@@ -137,7 +137,7 @@ except ImportError as e:
"PyTensor flag blas__ldflags is empty. "
"Falling back on slower implementations for "
"dot(matrix, vector), dot(vector, matrix) and "
f"dot(vector, vector) ({str(e)})"
f"dot(vector, vector) ({e!s})"
)
......
......@@ -602,7 +602,7 @@ class Elemwise(OpenMPOp):
if not isinstance(scalar_igrads, (list, tuple)):
raise TypeError(
f"{str(self.scalar_op)}.grad returned {str(type(scalar_igrads))} instead of list or tuple"
f"{self.scalar_op!s}.grad returned {type(scalar_igrads)!s} instead of list or tuple"
)
nd = inputs[0].type.ndim # this is the same for everyone
......
......@@ -8,12 +8,12 @@ from pytensor.tensor.random.type import random_generator_type, random_state_type
class RandomStateSharedVariable(SharedVariable):
def __str__(self):
return self.name or f"RandomStateSharedVariable({repr(self.container)})"
return self.name or f"RandomStateSharedVariable({self.container!r})"
class RandomGeneratorSharedVariable(SharedVariable):
def __str__(self):
return self.name or f"RandomGeneratorSharedVariable({repr(self.container)})"
return self.name or f"RandomGeneratorSharedVariable({self.container!r})"
@shared_constructor.register(np.random.RandomState)
......
......@@ -150,7 +150,7 @@ class ShapeFeature(Feature):
msg = (
f"Failed to infer_shape from Op {node.op}.\nInput shapes: "
f"{[self.shape_of[r] for r in node.inputs]}\nException encountered during infer_shape: "
f"{type(e)}\nException message: {str(e)}\nTraceback: {traceback.format_exc()}"
f"{type(e)}\nException message: {e!s}\nTraceback: {traceback.format_exc()}"
)
if config.on_shape_error == "raise":
raise Exception(msg).with_traceback(e.__traceback__)
......
......@@ -199,7 +199,7 @@ class TensorType(CType[np.ndarray], HasDataType, HasShape):
"this loss, you can: "
f"1) explicitly cast your data to {self.dtype}, or "
'2) set "allow_input_downcast=True" when calling '
f'"function". Value: "{repr(data)}"'
f'"function". Value: "{data!r}"'
)
raise TypeError(err_msg)
elif (
......
......@@ -683,7 +683,7 @@ def test_NominalVariable():
assert not nv4.equals(nv5)
assert hash(nv4) != hash(nv5)
assert repr(nv5) == f"NominalVariable(2, {repr(type3)})"
assert repr(nv5) == f"NominalVariable(2, {type3!r})"
assert nv5.signature() == (type3, 2)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论