提交 fa74d7e3 authored 作者: Brandon T. Willard's avatar Brandon T. Willard 提交者: Brandon T. Willard

Rename aesara.graph.toolbox to aesara.graph.features

上级 4403e131
......@@ -30,9 +30,9 @@ from aesara.compile.ops import OutputGuard, _output_guard
from aesara.configdefaults import config
from aesara.graph.basic import Variable, graph_inputs, io_toposort
from aesara.graph.destroyhandler import DestroyHandler
from aesara.graph.features import BadOptimization
from aesara.graph.fg import InconsistencyError
from aesara.graph.op import COp, Op, ops_with_inner_function
from aesara.graph.toolbox import BadOptimization
from aesara.graph.utils import MethodNotDefined
from aesara.link.basic import Container, LocalLinker
from aesara.link.utils import map_storage, raise_with_op
......
......@@ -30,9 +30,9 @@ from aesara.graph.basic import (
vars_between,
)
from aesara.graph.destroyhandler import DestroyHandler
from aesara.graph.features import PreserveVariableAttributes, is_same_graph
from aesara.graph.fg import FunctionGraph, InconsistencyError
from aesara.graph.op import ops_with_inner_function
from aesara.graph.toolbox import PreserveVariableAttributes, is_same_graph
from aesara.graph.utils import get_variable_trace_string
from aesara.link.basic import Container
from aesara.link.utils import raise_with_op
......
......@@ -149,7 +149,7 @@ from aesara.gpuarray.type import (
get_context,
move_to_gpu,
)
from aesara.graph import toolbox
from aesara.graph import features
from aesara.graph.basic import Constant, Variable, applys_between, clone_replace
from aesara.graph.fg import FunctionGraph
from aesara.graph.opt import (
......@@ -244,7 +244,7 @@ class InputToGpuOptimizer(GlobalOptimizer):
"""
def add_requirements(self, fgraph):
fgraph.attach_feature(toolbox.ReplaceValidate())
fgraph.attach_feature(features.ReplaceValidate())
def apply(self, fgraph):
for input in fgraph.inputs:
......@@ -305,7 +305,7 @@ class GraphToGPU(GlobalOptimizer):
self.local_optimizers_map = local_optimizers_map
def add_requirements(self, fgraph):
fgraph.attach_feature(toolbox.ReplaceValidate())
fgraph.attach_feature(features.ReplaceValidate())
def apply(self, fgraph):
mapping = {}
......
......@@ -9,8 +9,8 @@ from collections import OrderedDict, deque
import aesara
from aesara.configdefaults import config
from aesara.graph.basic import Constant
from aesara.graph.features import AlreadyThere, Bookkeeper
from aesara.graph.fg import InconsistencyError
from aesara.graph.toolbox import AlreadyThere, Bookkeeper
from aesara.misc.ordered_set import OrderedSet
......
import copy
import inspect
import sys
import time
from collections import OrderedDict
from functools import partial
from io import StringIO
import numpy as np
import aesara
from aesara.configdefaults import config
from aesara.graph.basic import (
Variable,
equal_computations,
graph_inputs,
io_toposort,
vars_between,
)
class AlreadyThere(Exception):
"""
Raised by a Feature's on_attach callback method if the FunctionGraph
attempting to attach the feature already has a functionally identical
feature.
"""
class ReplacementDidNotRemoveError(Exception):
"""
This exception should be thrown by replace_all_validate_remove
when an optimization wanted to remove a Variable or a Node from
the graph, but the replacement it gived didn't do that.
"""
class BadOptimization(Exception):
"""
Exception: some variable and its substitute take different runtime values.
Note: If there is only 1 parameter and it is a string, we will use
it as the error message. This is needed when we catch, extend and
reraise an error.
"""
new_r = None
"""
A `Variable` instance that took a different value from `old_r`,
but which replaced `old_r`.
"""
old_r = None
"""
A `Variable` instance that was replaced by `new_r`.
"""
old_r_val = None
"""
The value computed for `old_r`.
"""
new_r_val = None
"""
The value computed for `new_r`.
"""
reason = None
"""
An object that indicates why old_r was turned into new_r.
Convention is that this is the name of the optimization that
requested the replacement.
"""
old_graph = ""
"""
A multiline string representation of the graph leading to
old_r, at the time of the replacement.
"""
new_graph = ""
"""
A multiline string representation of the graph leading to
new_r, at the time of the replacement.
"""
def __init__(
self,
old_r,
new_r=None,
old_r_val=None,
new_r_val=None,
reason=None,
old_graph=None,
new_graph=None,
):
super().__init__()
self.old_r = old_r
self.new_r = new_r
self.old_r_val = old_r_val
self.new_r_val = new_r_val
self.reason = reason
done = dict()
used_ids = dict()
if isinstance(old_r, Variable):
self.old_graph = aesara.compile.debugmode.debugprint(
old_r,
prefix=" ",
depth=6,
file=StringIO(),
done=done,
print_type=True,
used_ids=used_ids,
).getvalue()
else:
self.old_graph = None
if isinstance(new_r, Variable):
self.new_graph = aesara.compile.debugmode.debugprint(
new_r,
prefix=" ",
depth=6,
file=StringIO(),
done=done,
print_type=True,
used_ids=used_ids,
).getvalue()
else:
self.new_graph = None
# To allow extending the error message of an existing error.
self.full_err = None
if isinstance(old_r, str):
assert (
new_r is None
and old_r_val is None
and new_r_val is None
and reason is None
and old_graph is None
and new_graph is None
)
self.full_err = old_r
def __str__(self):
return self.str_diagnostic()
def str_diagnostic(self):
"""
Return a pretty multiline string representating the cause
of the exception.
"""
# We have a pre-made message
if getattr(self, "full_err", None) is not None:
return self.full_err
sio = StringIO()
val_str_len_limit = 800
print("BadOptimization Error", super().__str__(), file=sio)
print(" Variable: id", id(self.new_r), self.new_r, file=sio)
print(" Op", self.new_r.owner, file=sio)
print(" Value Type:", type(self.new_r_val), file=sio)
try:
ssio = StringIO()
print(" Old Value shape, dtype, strides:", end=" ", file=ssio)
print(self.old_r_val.shape, end=" ", file=ssio)
print(self.old_r_val.dtype, end=" ", file=ssio)
print(self.old_r_val.strides, file=ssio)
# only if all succeeds to we add anything to sio
print(ssio.getvalue(), file=sio)
except Exception:
pass
str_old_r_val = str(self.old_r_val)
if len(str_old_r_val) > val_str_len_limit:
print(
" Old Value: ",
str(self.old_r_val)[:val_str_len_limit],
"...",
file=sio,
)
else:
print(" Old Value: ", str(self.old_r_val), file=sio)
try:
ssio = StringIO()
print(" New Value shape, dtype, strides:", end=" ", file=ssio)
print(self.new_r_val.shape, end=" ", file=ssio)
print(self.new_r_val.dtype, end=" ", file=ssio)
print(self.new_r_val.strides, file=ssio)
# only if all succeeds to we add anything to sio
print(ssio.getvalue(), file=sio)
except Exception:
pass
str_new_r_val = str(self.new_r_val)
if len(str_new_r_val) > val_str_len_limit:
print(
" New Value: ",
str(self.new_r_val)[:val_str_len_limit],
"...",
file=sio,
)
else:
print(" New Value: ", str(self.new_r_val), file=sio)
try:
ov = np.asarray(self.old_r_val)
nv = np.asarray(self.new_r_val)
ssio = StringIO()
abs_diff = np.absolute(nv - ov)
print(" Max Abs Diff: ", np.max(abs_diff), file=ssio)
print(" Mean Abs Diff: ", np.mean(abs_diff), file=ssio)
print(" Median Abs Diff: ", np.median(abs_diff), file=ssio)
print(" Std Abs Diff: ", np.std(abs_diff), file=ssio)
arg_max_val = np.argmax(abs_diff)
values_at_max = (nv.flatten()[arg_max_val], ov.flatten()[arg_max_val])
print(" Value at Max Diff: ", values_at_max, file=ssio)
# N.B. the maximum(..., 1e-8) protects against div by 0 when
# nv == ov == 0
reldiff = abs_diff / np.maximum(np.absolute(nv) + np.absolute(ov), 1e-8)
print(" Max Rel Diff: ", np.max(reldiff), file=ssio)
print(" Mean Rel Diff: ", np.mean(reldiff), file=ssio)
print(" Median Rel Diff: ", np.median(reldiff), file=ssio)
print(" Std Rel Diff: ", np.std(reldiff), file=ssio)
arg_max_val = np.argmax(reldiff)
values_at_max = (nv.flatten()[arg_max_val], ov.flatten()[arg_max_val])
print(" Value at Max Diff: ", values_at_max, file=ssio)
# only if all succeeds to we add anything to sio
print(ssio.getvalue(), file=sio)
except Exception:
pass
print(" Reason: ", str(self.reason), file=sio)
print(" Old Graph:", file=sio)
print(self.old_graph, file=sio)
print(" New Graph:", file=sio)
print(self.new_graph, file=sio)
print("", file=sio)
print("Hint: relax the tolerance by setting tensor__cmp_sloppy=1", file=sio)
print(" or even tensor__cmp_sloppy=2 for less-strict comparison", file=sio)
return sio.getvalue()
class Feature:
"""
Base class for FunctionGraph extensions.
A Feature is an object with several callbacks that are triggered
by various operations on FunctionGraphs. It can be used to enforce
graph properties at all stages of graph optimization.
See Also
--------
aesara.graph.features : for common extensions.
"""
def on_attach(self, fgraph):
"""
Called by `FunctionGraph.attach_feature`, the method that attaches the
feature to the `FunctionGraph`. Since this is called after the
`FunctionGraph` is initially populated, this is where you should run
checks on the initial contents of the `FunctionGraph`.
The on_attach method may raise the `AlreadyThere` exception to cancel
the attach operation if it detects that another Feature instance
implementing the same functionality is already attached to the
`FunctionGraph`.
The feature has great freedom in what it can do with the `fgraph`: it
may, for example, add methods to it dynamically.
"""
def on_detach(self, fgraph):
"""
Called by `FunctionGraph.remove_feature`. Should remove any
dynamically-added functionality that it installed into the fgraph.
"""
def on_import(self, fgraph, node, reason):
"""
Called whenever a node is imported into `fgraph`, which is just before
the node is actually connected to the graph.
Note: this is not called when the graph is created. If you want to
detect the first nodes to be implemented to the graph, you should do
this by implementing `on_attach`.
"""
def on_change_input(self, fgraph, node, i, var, new_var, reason=None):
"""
Called whenever ``node.inputs[i]`` is changed from `var` to `new_var`.
At the moment the callback is done, the change has already taken place.
If you raise an exception in this function, the state of the graph
might be broken for all intents and purposes.
"""
def on_prune(self, fgraph, node, reason):
"""
Called whenever a node is pruned (removed) from the `fgraph`, after it
is disconnected from the graph.
"""
def orderings(self, fgraph):
"""
Called by `FunctionGraph.toposort`. It should return a dictionary of
``{node: predecessors}`` where ``predecessors`` is a list of
nodes that should be computed before the key node.
If you raise an exception in this function, the state of the graph
might be broken for all intents and purposes.
"""
return OrderedDict()
class Bookkeeper(Feature):
def on_attach(self, fgraph):
"""
Called by FunctionGraph.attach_feature, the method that attaches
the feature to the FunctionGraph. Since this is called after the
FunctionGraph is initially populated, this is where you should
run checks on the initial contents of the FunctionGraph.
"""
for node in io_toposort(fgraph.inputs, fgraph.outputs):
self.on_import(fgraph, node, "on_attach")
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
for node in io_toposort(fgraph.inputs, fgraph.outputs):
self.on_prune(fgraph, node, "Bookkeeper.detach")
class GetCheckpoint:
def __init__(self, history, fgraph):
self.h = history
self.fgraph = fgraph
self.nb = 0
def __call__(self):
self.h.history[self.fgraph] = []
self.nb += 1
return self.nb
class LambdaExtract:
def __init__(self, fgraph, node, i, r, reason=None):
self.fgraph = fgraph
self.node = node
self.i = i
self.r = r
self.reason = reason
def __call__(self):
return self.fgraph.change_input(
self.node, self.i, self.r, reason=("Revert", self.reason)
)
class History(Feature):
"""Keep an history of changes to an FunctionGraph.
This history can be reverted up to the last checkpoint.. We can
revert to only 1 point in the past. This limit was added to lower
the memory usage.
"""
pickle_rm_attr = ["checkpoint", "revert"]
def __init__(self):
self.history = {}
def on_attach(self, fgraph):
if hasattr(fgraph, "checkpoint") or hasattr(fgraph, "revert"):
raise AlreadyThere(
"History feature is already present or in"
" conflict with another plugin."
)
self.history[fgraph] = []
# Don't call unpickle here, as ReplaceValidate.on_attach()
# call to History.on_attach() will call the
# ReplaceValidate.unpickle and not History.unpickle
fgraph.checkpoint = GetCheckpoint(self, fgraph)
fgraph.revert = partial(self.revert, fgraph)
def unpickle(self, fgraph):
fgraph.checkpoint = GetCheckpoint(self, fgraph)
fgraph.revert = partial(self.revert, fgraph)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
del fgraph.checkpoint
del fgraph.revert
del self.history[fgraph]
def on_change_input(self, fgraph, node, i, r, new_r, reason=None):
if self.history[fgraph] is None:
return
h = self.history[fgraph]
h.append(LambdaExtract(fgraph, node, i, r, reason))
def revert(self, fgraph, checkpoint):
"""
Reverts the graph to whatever it was at the provided
checkpoint (undoes all replacements). A checkpoint at any
given time can be obtained using self.checkpoint().
"""
h = self.history[fgraph]
self.history[fgraph] = None
assert fgraph.checkpoint.nb == checkpoint
while h:
f = h.pop()
f()
self.history[fgraph] = h
class Validator(Feature):
pickle_rm_attr = ["validate", "consistent"]
def on_attach(self, fgraph):
for attr in ("validate", "validate_time"):
if hasattr(fgraph, attr):
raise AlreadyThere(
"Validator feature is already present or in"
" conflict with another plugin."
)
# Don't call unpickle here, as ReplaceValidate.on_attach()
# call to History.on_attach() will call the
# ReplaceValidate.unpickle and not History.unpickle
fgraph.validate = partial(self.validate_, fgraph)
fgraph.consistent = partial(self.consistent_, fgraph)
def unpickle(self, fgraph):
fgraph.validate = partial(self.validate_, fgraph)
fgraph.consistent = partial(self.consistent_, fgraph)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
del fgraph.validate
del fgraph.consistent
def validate_(self, fgraph):
"""
If the caller is replace_all_validate, just raise the
exception. replace_all_validate will print out the
verbose output. Or it has to be done here before raise.
"""
t0 = time.time()
try:
ret = fgraph.execute_callbacks("validate")
except Exception as e:
cf = inspect.currentframe()
uf = cf.f_back
uf_info = inspect.getframeinfo(uf)
# If the caller is replace_all_validate, just raise the
# exception. replace_all_validate will print out the
# verbose output.
# Or it has to be done here before raise.
if uf_info.function == "replace_all_validate":
raise
else:
verbose = uf.f_locals.get("verbose", False)
if verbose:
r = uf.f_locals.get("r", "")
reason = uf_info.function
print(f"validate failed on node {r}.\n Reason: {reason}, {e}")
raise
t1 = time.time()
if fgraph.profile:
fgraph.profile.validate_time += t1 - t0
return ret
def consistent_(self, fgraph):
try:
fgraph.validate()
return True
except Exception:
return False
class ReplaceValidate(History, Validator):
pickle_rm_attr = (
["replace_validate", "replace_all_validate", "replace_all_validate_remove"]
+ History.pickle_rm_attr
+ Validator.pickle_rm_attr
)
def on_attach(self, fgraph):
for attr in (
"replace_validate",
"replace_all_validate",
"replace_all_validate_remove",
):
if hasattr(fgraph, attr):
raise AlreadyThere(
"ReplaceValidate feature is already present"
" or in conflict with another plugin."
)
self._nodes_removed = set()
self.fail_validate = False
History.on_attach(self, fgraph)
Validator.on_attach(self, fgraph)
self.unpickle(fgraph)
def unpickle(self, fgraph):
History.unpickle(self, fgraph)
Validator.unpickle(self, fgraph)
fgraph.replace_validate = partial(self.replace_validate, fgraph)
fgraph.replace_all_validate = partial(self.replace_all_validate, fgraph)
fgraph.replace_all_validate_remove = partial(
self.replace_all_validate_remove, fgraph
)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
History.on_detach(self, fgraph)
Validator.on_detach(self, fgraph)
del self._nodes_removed
del fgraph.replace_validate
del fgraph.replace_all_validate
del fgraph.replace_all_validate_remove
def replace_validate(self, fgraph, r, new_r, reason=None, **kwargs):
self.replace_all_validate(fgraph, [(r, new_r)], reason=reason, **kwargs)
def replace_all_validate(
self, fgraph, replacements, reason=None, verbose=None, **kwargs
):
chk = fgraph.checkpoint()
if verbose is None:
verbose = config.optimizer_verbose
if config.scan__debug:
from aesara.scan.op import Scan
scans = [n for n in fgraph.apply_nodes if isinstance(n.op, Scan)]
for r, new_r in replacements:
try:
fgraph.replace(r, new_r, reason=reason, verbose=False, **kwargs)
except Exception as e:
msg = str(e)
s1 = "The type of the replacement must be the same"
s2 = "does not belong to this FunctionGraph"
s3 = "maximum recursion depth exceeded"
if s3 in msg:
# There is nothing safe we can do to recover from this.
# So don't revert as this raise a different error
# that isn't helpful.
e.args += (
" As a temporary work around, you can raise Python"
" stack limit with:"
" import sys; sys.setrecursionlimit(10000)",
)
raise
elif s1 not in msg and s2 not in msg:
out = sys.stderr
print(
"<<!! BUG IN FGRAPH.REPLACE OR A LISTENER !!>>",
type(e),
e,
reason,
file=out,
)
# this might fail if the error is in a listener:
# (fgraph.replace kinda needs better internal error handling)
fgraph.revert(chk)
raise
try:
fgraph.validate()
except Exception as e:
fgraph.revert(chk)
if verbose:
print(f"validate failed on node {r}.\n Reason: {reason}, {e}")
raise
if config.scan__debug:
from aesara.scan.op import Scan
scans2 = [n for n in fgraph.apply_nodes if isinstance(n.op, Scan)]
nb = len(scans)
nb2 = len(scans2)
if nb2 > nb:
print(
"Extra scan introduced",
nb,
nb2,
getattr(reason, "name", reason),
r,
new_r,
)
elif nb2 < nb:
print(
"Scan removed", nb, nb2, getattr(reason, "name", reason), r, new_r
)
if verbose:
print(reason, r, new_r)
# The return is needed by replace_all_validate_remove
return chk
def replace_all_validate_remove(
self, fgraph, replacements, remove, reason=None, warn=True, **kwargs
):
"""
As replace_all_validate, revert the replacement if the ops
in the list remove are still in the graph. Also print a warning.
"""
chk = fgraph.replace_all_validate(replacements, reason=reason, **kwargs)
self._nodes_removed.update(remove)
for rm in remove:
if rm in fgraph.apply_nodes or rm in fgraph.variables:
fgraph.revert(chk)
if warn:
warn(
"An optimization wanted to replace a Variable"
" in the graph, but the replacement for it doesn't"
" remove it. We disabled the optimization."
f"{reason}: {replacements}",
)
raise ReplacementDidNotRemoveError()
def __getstate__(self):
d = self.__dict__.copy()
if "history" in d:
del d["history"]
return d
def on_import(self, fgraph, node, reason):
if node in self._nodes_removed:
self.fail_validate = True
def validate(self, fgraph):
if self.fail_validate:
self.fail_validate = False
raise aesara.graph.fg.InconsistencyError(
"Trying to reintroduce a removed node"
)
class NodeFinder(Bookkeeper):
def __init__(self):
self.fgraph = None
self.d = {}
def on_attach(self, fgraph):
if self.fgraph is not None:
raise Exception(
"A NodeFinder instance can only serve one " "FunctionGraph."
)
if hasattr(fgraph, "get_nodes"):
raise AlreadyThere(
"NodeFinder is already present or in conflict" " with another plugin."
)
self.fgraph = fgraph
fgraph.get_nodes = partial(self.query, fgraph)
Bookkeeper.on_attach(self, fgraph)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
if self.fgraph is not fgraph:
raise Exception(
"This NodeFinder instance was not attached to the" " provided fgraph."
)
self.fgraph = None
del fgraph.get_nodes
Bookkeeper.on_detach(self, fgraph)
def on_import(self, fgraph, node, reason):
try:
self.d.setdefault(node.op, []).append(node)
except TypeError: # node.op is unhashable
return
except Exception as e:
print("OFFENDING node", type(node), type(node.op), file=sys.stderr)
try:
print("OFFENDING node hash", hash(node.op), file=sys.stderr)
except Exception:
print("OFFENDING node not hashable", file=sys.stderr)
raise e
def on_prune(self, fgraph, node, reason):
try:
nodes = self.d[node.op]
except TypeError: # node.op is unhashable
return
nodes.remove(node)
if not nodes:
del self.d[node.op]
def query(self, fgraph, op):
try:
all = self.d.get(op, [])
except TypeError:
raise TypeError(
f"{op} in unhashable and cannot be queried by the optimizer"
)
all = list(all)
return all
class PrintListener(Feature):
def __init__(self, active=True):
self.active = active
def on_attach(self, fgraph):
if self.active:
print("-- attaching to: ", fgraph)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
if self.active:
print("-- detaching from: ", fgraph)
def on_import(self, fgraph, node, reason):
if self.active:
print(f"-- importing: {node}, reason: {reason}")
def on_prune(self, fgraph, node, reason):
if self.active:
print(f"-- pruning: {node}, reason: {reason}")
def on_change_input(self, fgraph, node, i, r, new_r, reason=None):
if self.active:
print(f"-- changing ({node}.inputs[{i}]) from {r} to {new_r}")
class PreserveNames(Feature):
"""
This preserve some variables names during optimization.
Deprecated. We need to keep it to allow unpickling.
"""
def on_change_input(self, fgraph, node, i, r, new_r, reason=None):
if r.name is not None and new_r.name is None:
new_r.name = r.name
class PreserveVariableAttributes(Feature):
"""
This preserve some variables attributes and tag during optimization.
"""
def on_change_input(self, fgraph, node, i, r, new_r, reason=None):
if r.name is not None and new_r.name is None:
new_r.name = r.name
if (
getattr(r.tag, "nan_guard_mode_check", False)
and getattr(new_r.tag, "nan_guard_mode_check", False) is False
):
new_r.tag.nan_guard_mode_check = r.tag.nan_guard_mode_check
class NoOutputFromInplace(Feature):
def __init__(self, first_output_idx=0, last_output_idx=None):
self.first_idx = first_output_idx
self.last_idx = last_output_idx
def validate(self, fgraph):
if not hasattr(fgraph, "destroyers"):
return True
outputs_to_validate = list(fgraph.outputs)[self.first_idx : self.last_idx]
for out in outputs_to_validate:
if out.owner is None:
continue
# Validate that the node that produces the output does not produce
# it by modifying something else inplace.
node = out.owner
op = node.op
out_idx = node.outputs.index(out)
if op.destroy_map and out_idx in op.destroy_map:
raise aesara.graph.fg.InconsistencyError(
"A function graph Feature has requested that outputs of the graph "
"be prevented from being the result of in-place "
f"operations. This has prevented the output {out} from "
"being computed by modifying another variable in-place."
)
def is_same_graph_with_merge(var1, var2, givens=None):
"""
Merge-based implementation of `aesara.graph.basic.is_same_graph`.
See help on `aesara.graph.basic.is_same_graph` for additional documentation.
"""
from aesara.graph.opt import MergeOptimizer
if givens is None:
givens = {}
# Copy variables since the MergeOptimizer will modify them.
copied = copy.deepcopy([var1, var2, givens])
vars = copied[0:2]
givens = copied[2]
# Create FunctionGraph.
inputs = list(graph_inputs(vars))
# The clone isn't needed as we did a deepcopy and we cloning will
# break the mapping in givens.
fgraph = aesara.graph.fg.FunctionGraph(inputs, vars, clone=False)
# Perform Variable substitution.
for to_replace, replace_by in givens.items():
fgraph.replace(to_replace, replace_by)
# Perform merge optimization.
MergeOptimizer().optimize(fgraph)
# When two variables perform the same computations, they will have the same
# owner in the optimized graph.
# We need to be careful with the special case where the owner is None,
# which happens when the graph is made of a single Variable.
# We also need to make sure we replace a Variable if it is present in
# `givens`.
vars_replaced = [givens.get(v, v) for v in fgraph.outputs]
o1, o2 = [v.owner for v in vars_replaced]
if o1 is None and o2 is None:
# Comparing two single-Variable graphs: they are equal if they are
# the same Variable.
return vars_replaced[0] == vars_replaced[1]
else:
return o1 is o2
def is_same_graph(var1, var2, givens=None):
"""
Return True iff Variables `var1` and `var2` perform the same computation.
By 'performing the same computation', we mean that they must share the same
graph, so that for instance this function will return False when comparing
(x * (y * z)) with ((x * y) * z).
The current implementation is not efficient since, when possible, it
verifies equality by calling two different functions that are expected to
return the same output. The goal is to verify this assumption, to
eventually get rid of one of them in the future.
Parameters
----------
var1
The first Variable to compare.
var2
The second Variable to compare.
givens
Similar to the `givens` argument of `aesara.function`, it can be used
to perform substitutions in the computational graph of `var1` and
`var2`. This argument is associated to neither `var1` nor `var2`:
substitutions may affect both graphs if the substituted variable
is present in both.
Examples
--------
====== ====== ====== ======
var1 var2 givens output
====== ====== ====== ======
x + 1 x + 1 {} True
x + 1 y + 1 {} False
x + 1 y + 1 {x: y} True
====== ====== ====== ======
"""
use_equal_computations = True
if givens is None:
givens = {}
if not isinstance(givens, dict):
givens = dict(givens)
# Get result from the merge-based function.
rval1 = is_same_graph_with_merge(var1=var1, var2=var2, givens=givens)
if givens:
# We need to build the `in_xs` and `in_ys` lists. To do this, we need
# to be able to tell whether a variable belongs to the computational
# graph of `var1` or `var2`.
# The typical case we want to handle is when `to_replace` belongs to
# one of these graphs, and `replace_by` belongs to the other one. In
# other situations, the current implementation of `equal_computations`
# is probably not appropriate, so we do not call it.
ok = True
in_xs = []
in_ys = []
# Compute the sets of all variables found in each computational graph.
inputs_var = list(map(graph_inputs, ([var1], [var2])))
all_vars = [
set(vars_between(v_i, v_o))
for v_i, v_o in ((inputs_var[0], [var1]), (inputs_var[1], [var2]))
]
def in_var(x, k):
# Return True iff `x` is in computation graph of variable `vark`.
return x in all_vars[k - 1]
for to_replace, replace_by in givens.items():
# Map a substitution variable to the computational graphs it
# belongs to.
inside = {
v: [in_var(v, k) for k in (1, 2)] for v in (to_replace, replace_by)
}
if (
inside[to_replace][0]
and not inside[to_replace][1]
and inside[replace_by][1]
and not inside[replace_by][0]
):
# Substitute variable in `var1` by one from `var2`.
in_xs.append(to_replace)
in_ys.append(replace_by)
elif (
inside[to_replace][1]
and not inside[to_replace][0]
and inside[replace_by][0]
and not inside[replace_by][1]
):
# Substitute variable in `var2` by one from `var1`.
in_xs.append(replace_by)
in_ys.append(to_replace)
else:
ok = False
break
if not ok:
# We cannot directly use `equal_computations`.
use_equal_computations = False
else:
in_xs = None
in_ys = None
if use_equal_computations:
rval2 = equal_computations(xs=[var1], ys=[var2], in_xs=in_xs, in_ys=in_ys)
assert rval2 == rval1
return rval1
......@@ -8,7 +8,7 @@ from aesara.configdefaults import config
from aesara.graph.basic import Apply, Constant, Variable, applys_between
from aesara.graph.basic import as_string as graph_as_string
from aesara.graph.basic import clone_get_equiv, graph_inputs, io_toposort, vars_between
from aesara.graph.toolbox import AlreadyThere, Feature, ReplaceValidate
from aesara.graph.features import AlreadyThere, Feature, ReplaceValidate
from aesara.graph.utils import MetaObject, TestValueError, get_variable_trace_string
from aesara.misc.ordered_set import OrderedSet
......@@ -548,16 +548,12 @@ class FunctionGraph(MetaObject):
)
def replace_all(self, pairs: List[Tuple[Variable, Variable]], **kwargs) -> NoReturn:
"""Replace variables in the `FunctionGraph` according to `(var, new_var)` pairs in a list."""
"""Replace variables in the ``FunctionGraph`` according to ``(var, new_var)`` pairs in a list."""
for var, new_var in pairs:
self.replace(var, new_var, **kwargs)
def attach_feature(self, feature: Feature) -> NoReturn:
"""
Adds a graph.toolbox.Feature to this function_graph and triggers its
on_attach callback.
"""
"""Add a ``graph.features.Feature`` to this function graph and trigger its on_attach callback."""
# Filter out literally identical `Feature`s
if feature in self._features:
return # the feature is already present
......
......@@ -31,9 +31,9 @@ from aesara.graph.basic import (
io_toposort,
nodes_constructed,
)
from aesara.graph.features import Feature, NodeFinder
from aesara.graph.fg import InconsistencyError
from aesara.graph.op import Op
from aesara.graph.toolbox import Feature, NodeFinder
from aesara.graph.utils import AssocList
from aesara.misc.ordered_set import OrderedSet
from aesara.utils import flatten
......
import copy
import inspect
import sys
import time
from collections import OrderedDict
from functools import partial
from io import StringIO
import warnings
import numpy as np
import aesara
from aesara.configdefaults import config
from aesara.graph.basic import (
Variable,
equal_computations,
graph_inputs,
io_toposort,
vars_between,
warnings.warn(
"The module `aesara.graph.toolbox` is deprecated "
"and has been renamed to `aesara.graph.features`",
DeprecationWarning,
stacklevel=2,
)
class AlreadyThere(Exception):
"""
Raised by a Feature's on_attach callback method if the FunctionGraph
attempting to attach the feature already has a functionally identical
feature.
"""
class ReplacementDidNotRemoveError(Exception):
"""
This exception should be thrown by replace_all_validate_remove
when an optimization wanted to remove a Variable or a Node from
the graph, but the replacement it gived didn't do that.
"""
class BadOptimization(Exception):
"""
Exception: some variable and its substitute take different runtime values.
Note: If there is only 1 parameter and it is a string, we will use
it as the error message. This is needed when we catch, extend and
reraise an error.
"""
new_r = None
"""
A `Variable` instance that took a different value from `old_r`,
but which replaced `old_r`.
"""
old_r = None
"""
A `Variable` instance that was replaced by `new_r`.
"""
old_r_val = None
"""
The value computed for `old_r`.
"""
new_r_val = None
"""
The value computed for `new_r`.
"""
reason = None
"""
An object that indicates why old_r was turned into new_r.
Convention is that this is the name of the optimization that
requested the replacement.
"""
old_graph = ""
"""
A multiline string representation of the graph leading to
old_r, at the time of the replacement.
"""
new_graph = ""
"""
A multiline string representation of the graph leading to
new_r, at the time of the replacement.
"""
def __init__(
self,
old_r,
new_r=None,
old_r_val=None,
new_r_val=None,
reason=None,
old_graph=None,
new_graph=None,
):
super().__init__()
self.old_r = old_r
self.new_r = new_r
self.old_r_val = old_r_val
self.new_r_val = new_r_val
self.reason = reason
done = dict()
used_ids = dict()
if isinstance(old_r, Variable):
self.old_graph = aesara.compile.debugmode.debugprint(
old_r,
prefix=" ",
depth=6,
file=StringIO(),
done=done,
print_type=True,
used_ids=used_ids,
).getvalue()
else:
self.old_graph = None
if isinstance(new_r, Variable):
self.new_graph = aesara.compile.debugmode.debugprint(
new_r,
prefix=" ",
depth=6,
file=StringIO(),
done=done,
print_type=True,
used_ids=used_ids,
).getvalue()
else:
self.new_graph = None
# To allow extending the error message of an existing error.
self.full_err = None
if isinstance(old_r, str):
assert (
new_r is None
and old_r_val is None
and new_r_val is None
and reason is None
and old_graph is None
and new_graph is None
)
self.full_err = old_r
def __str__(self):
return self.str_diagnostic()
def str_diagnostic(self):
"""
Return a pretty multiline string representating the cause
of the exception.
"""
# We have a pre-made message
if getattr(self, "full_err", None) is not None:
return self.full_err
sio = StringIO()
val_str_len_limit = 800
print("BadOptimization Error", super().__str__(), file=sio)
print(" Variable: id", id(self.new_r), self.new_r, file=sio)
print(" Op", self.new_r.owner, file=sio)
print(" Value Type:", type(self.new_r_val), file=sio)
try:
ssio = StringIO()
print(" Old Value shape, dtype, strides:", end=" ", file=ssio)
print(self.old_r_val.shape, end=" ", file=ssio)
print(self.old_r_val.dtype, end=" ", file=ssio)
print(self.old_r_val.strides, file=ssio)
# only if all succeeds to we add anything to sio
print(ssio.getvalue(), file=sio)
except Exception:
pass
str_old_r_val = str(self.old_r_val)
if len(str_old_r_val) > val_str_len_limit:
print(
" Old Value: ",
str(self.old_r_val)[:val_str_len_limit],
"...",
file=sio,
)
else:
print(" Old Value: ", str(self.old_r_val), file=sio)
try:
ssio = StringIO()
print(" New Value shape, dtype, strides:", end=" ", file=ssio)
print(self.new_r_val.shape, end=" ", file=ssio)
print(self.new_r_val.dtype, end=" ", file=ssio)
print(self.new_r_val.strides, file=ssio)
# only if all succeeds to we add anything to sio
print(ssio.getvalue(), file=sio)
except Exception:
pass
str_new_r_val = str(self.new_r_val)
if len(str_new_r_val) > val_str_len_limit:
print(
" New Value: ",
str(self.new_r_val)[:val_str_len_limit],
"...",
file=sio,
)
else:
print(" New Value: ", str(self.new_r_val), file=sio)
try:
ov = np.asarray(self.old_r_val)
nv = np.asarray(self.new_r_val)
ssio = StringIO()
abs_diff = np.absolute(nv - ov)
print(" Max Abs Diff: ", np.max(abs_diff), file=ssio)
print(" Mean Abs Diff: ", np.mean(abs_diff), file=ssio)
print(" Median Abs Diff: ", np.median(abs_diff), file=ssio)
print(" Std Abs Diff: ", np.std(abs_diff), file=ssio)
arg_max_val = np.argmax(abs_diff)
values_at_max = (nv.flatten()[arg_max_val], ov.flatten()[arg_max_val])
print(" Value at Max Diff: ", values_at_max, file=ssio)
# N.B. the maximum(..., 1e-8) protects against div by 0 when
# nv == ov == 0
reldiff = abs_diff / np.maximum(np.absolute(nv) + np.absolute(ov), 1e-8)
print(" Max Rel Diff: ", np.max(reldiff), file=ssio)
print(" Mean Rel Diff: ", np.mean(reldiff), file=ssio)
print(" Median Rel Diff: ", np.median(reldiff), file=ssio)
print(" Std Rel Diff: ", np.std(reldiff), file=ssio)
arg_max_val = np.argmax(reldiff)
values_at_max = (nv.flatten()[arg_max_val], ov.flatten()[arg_max_val])
print(" Value at Max Diff: ", values_at_max, file=ssio)
# only if all succeeds to we add anything to sio
print(ssio.getvalue(), file=sio)
except Exception:
pass
print(" Reason: ", str(self.reason), file=sio)
print(" Old Graph:", file=sio)
print(self.old_graph, file=sio)
print(" New Graph:", file=sio)
print(self.new_graph, file=sio)
print("", file=sio)
print("Hint: relax the tolerance by setting tensor__cmp_sloppy=1", file=sio)
print(" or even tensor__cmp_sloppy=2 for less-strict comparison", file=sio)
return sio.getvalue()
class Feature:
"""
Base class for FunctionGraph extensions.
A Feature is an object with several callbacks that are triggered
by various operations on FunctionGraphs. It can be used to enforce
graph properties at all stages of graph optimization.
See Also
--------
aesara.graph.toolbox : for common extensions.
"""
def on_attach(self, fgraph):
"""
Called by `FunctionGraph.attach_feature`, the method that attaches the
feature to the `FunctionGraph`. Since this is called after the
`FunctionGraph` is initially populated, this is where you should run
checks on the initial contents of the `FunctionGraph`.
The on_attach method may raise the `AlreadyThere` exception to cancel
the attach operation if it detects that another Feature instance
implementing the same functionality is already attached to the
`FunctionGraph`.
The feature has great freedom in what it can do with the `fgraph`: it
may, for example, add methods to it dynamically.
"""
def on_detach(self, fgraph):
"""
Called by `FunctionGraph.remove_feature`. Should remove any
dynamically-added functionality that it installed into the fgraph.
"""
def on_import(self, fgraph, node, reason):
"""
Called whenever a node is imported into `fgraph`, which is just before
the node is actually connected to the graph.
Note: this is not called when the graph is created. If you want to
detect the first nodes to be implemented to the graph, you should do
this by implementing `on_attach`.
"""
def on_change_input(self, fgraph, node, i, var, new_var, reason=None):
"""
Called whenever ``node.inputs[i]`` is changed from `var` to `new_var`.
At the moment the callback is done, the change has already taken place.
If you raise an exception in this function, the state of the graph
might be broken for all intents and purposes.
"""
def on_prune(self, fgraph, node, reason):
"""
Called whenever a node is pruned (removed) from the `fgraph`, after it
is disconnected from the graph.
"""
def orderings(self, fgraph):
"""
Called by `FunctionGraph.toposort`. It should return a dictionary of
``{node: predecessors}`` where ``predecessors`` is a list of
nodes that should be computed before the key node.
If you raise an exception in this function, the state of the graph
might be broken for all intents and purposes.
"""
return OrderedDict()
class Bookkeeper(Feature):
def on_attach(self, fgraph):
"""
Called by FunctionGraph.attach_feature, the method that attaches
the feature to the FunctionGraph. Since this is called after the
FunctionGraph is initially populated, this is where you should
run checks on the initial contents of the FunctionGraph.
"""
for node in io_toposort(fgraph.inputs, fgraph.outputs):
self.on_import(fgraph, node, "on_attach")
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
for node in io_toposort(fgraph.inputs, fgraph.outputs):
self.on_prune(fgraph, node, "Bookkeeper.detach")
class GetCheckpoint:
def __init__(self, history, fgraph):
self.h = history
self.fgraph = fgraph
self.nb = 0
def __call__(self):
self.h.history[self.fgraph] = []
self.nb += 1
return self.nb
class LambdaExtract:
def __init__(self, fgraph, node, i, r, reason=None):
self.fgraph = fgraph
self.node = node
self.i = i
self.r = r
self.reason = reason
def __call__(self):
return self.fgraph.change_input(
self.node, self.i, self.r, reason=("Revert", self.reason)
)
class History(Feature):
"""Keep an history of changes to an FunctionGraph.
This history can be reverted up to the last checkpoint.. We can
revert to only 1 point in the past. This limit was added to lower
the memory usage.
"""
pickle_rm_attr = ["checkpoint", "revert"]
def __init__(self):
self.history = {}
def on_attach(self, fgraph):
if hasattr(fgraph, "checkpoint") or hasattr(fgraph, "revert"):
raise AlreadyThere(
"History feature is already present or in"
" conflict with another plugin."
)
self.history[fgraph] = []
# Don't call unpickle here, as ReplaceValidate.on_attach()
# call to History.on_attach() will call the
# ReplaceValidate.unpickle and not History.unpickle
fgraph.checkpoint = GetCheckpoint(self, fgraph)
fgraph.revert = partial(self.revert, fgraph)
def unpickle(self, fgraph):
fgraph.checkpoint = GetCheckpoint(self, fgraph)
fgraph.revert = partial(self.revert, fgraph)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
del fgraph.checkpoint
del fgraph.revert
del self.history[fgraph]
def on_change_input(self, fgraph, node, i, r, new_r, reason=None):
if self.history[fgraph] is None:
return
h = self.history[fgraph]
h.append(LambdaExtract(fgraph, node, i, r, reason))
def revert(self, fgraph, checkpoint):
"""
Reverts the graph to whatever it was at the provided
checkpoint (undoes all replacements). A checkpoint at any
given time can be obtained using self.checkpoint().
"""
h = self.history[fgraph]
self.history[fgraph] = None
assert fgraph.checkpoint.nb == checkpoint
while h:
f = h.pop()
f()
self.history[fgraph] = h
class Validator(Feature):
pickle_rm_attr = ["validate", "consistent"]
def on_attach(self, fgraph):
for attr in ("validate", "validate_time"):
if hasattr(fgraph, attr):
raise AlreadyThere(
"Validator feature is already present or in"
" conflict with another plugin."
)
# Don't call unpickle here, as ReplaceValidate.on_attach()
# call to History.on_attach() will call the
# ReplaceValidate.unpickle and not History.unpickle
fgraph.validate = partial(self.validate_, fgraph)
fgraph.consistent = partial(self.consistent_, fgraph)
def unpickle(self, fgraph):
fgraph.validate = partial(self.validate_, fgraph)
fgraph.consistent = partial(self.consistent_, fgraph)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
del fgraph.validate
del fgraph.consistent
def validate_(self, fgraph):
"""
If the caller is replace_all_validate, just raise the
exception. replace_all_validate will print out the
verbose output. Or it has to be done here before raise.
"""
t0 = time.time()
try:
ret = fgraph.execute_callbacks("validate")
except Exception as e:
cf = inspect.currentframe()
uf = cf.f_back
uf_info = inspect.getframeinfo(uf)
# If the caller is replace_all_validate, just raise the
# exception. replace_all_validate will print out the
# verbose output.
# Or it has to be done here before raise.
if uf_info.function == "replace_all_validate":
raise
else:
verbose = uf.f_locals.get("verbose", False)
if verbose:
r = uf.f_locals.get("r", "")
reason = uf_info.function
print(f"validate failed on node {r}.\n Reason: {reason}, {e}")
raise
t1 = time.time()
if fgraph.profile:
fgraph.profile.validate_time += t1 - t0
return ret
def consistent_(self, fgraph):
try:
fgraph.validate()
return True
except Exception:
return False
class ReplaceValidate(History, Validator):
pickle_rm_attr = (
["replace_validate", "replace_all_validate", "replace_all_validate_remove"]
+ History.pickle_rm_attr
+ Validator.pickle_rm_attr
)
def on_attach(self, fgraph):
for attr in (
"replace_validate",
"replace_all_validate",
"replace_all_validate_remove",
):
if hasattr(fgraph, attr):
raise AlreadyThere(
"ReplaceValidate feature is already present"
" or in conflict with another plugin."
)
self._nodes_removed = set()
self.fail_validate = False
History.on_attach(self, fgraph)
Validator.on_attach(self, fgraph)
self.unpickle(fgraph)
def unpickle(self, fgraph):
History.unpickle(self, fgraph)
Validator.unpickle(self, fgraph)
fgraph.replace_validate = partial(self.replace_validate, fgraph)
fgraph.replace_all_validate = partial(self.replace_all_validate, fgraph)
fgraph.replace_all_validate_remove = partial(
self.replace_all_validate_remove, fgraph
)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
History.on_detach(self, fgraph)
Validator.on_detach(self, fgraph)
del self._nodes_removed
del fgraph.replace_validate
del fgraph.replace_all_validate
del fgraph.replace_all_validate_remove
def replace_validate(self, fgraph, r, new_r, reason=None, **kwargs):
self.replace_all_validate(fgraph, [(r, new_r)], reason=reason, **kwargs)
def replace_all_validate(
self, fgraph, replacements, reason=None, verbose=None, **kwargs
):
chk = fgraph.checkpoint()
if verbose is None:
verbose = config.optimizer_verbose
if config.scan__debug:
from aesara.scan.op import Scan
scans = [n for n in fgraph.apply_nodes if isinstance(n.op, Scan)]
for r, new_r in replacements:
try:
fgraph.replace(r, new_r, reason=reason, verbose=False, **kwargs)
except Exception as e:
msg = str(e)
s1 = "The type of the replacement must be the same"
s2 = "does not belong to this FunctionGraph"
s3 = "maximum recursion depth exceeded"
if s3 in msg:
# There is nothing safe we can do to recover from this.
# So don't revert as this raise a different error
# that isn't helpful.
e.args += (
" As a temporary work around, you can raise Python"
" stack limit with:"
" import sys; sys.setrecursionlimit(10000)",
)
raise
elif s1 not in msg and s2 not in msg:
out = sys.stderr
print(
"<<!! BUG IN FGRAPH.REPLACE OR A LISTENER !!>>",
type(e),
e,
reason,
file=out,
)
# this might fail if the error is in a listener:
# (fgraph.replace kinda needs better internal error handling)
fgraph.revert(chk)
raise
try:
fgraph.validate()
except Exception as e:
fgraph.revert(chk)
if verbose:
print(f"validate failed on node {r}.\n Reason: {reason}, {e}")
raise
if config.scan__debug:
from aesara.scan.op import Scan
scans2 = [n for n in fgraph.apply_nodes if isinstance(n.op, Scan)]
nb = len(scans)
nb2 = len(scans2)
if nb2 > nb:
print(
"Extra scan introduced",
nb,
nb2,
getattr(reason, "name", reason),
r,
new_r,
)
elif nb2 < nb:
print(
"Scan removed", nb, nb2, getattr(reason, "name", reason), r, new_r
)
if verbose:
print(reason, r, new_r)
# The return is needed by replace_all_validate_remove
return chk
def replace_all_validate_remove(
self, fgraph, replacements, remove, reason=None, warn=True, **kwargs
):
"""
As replace_all_validate, revert the replacement if the ops
in the list remove are still in the graph. Also print a warning.
"""
chk = fgraph.replace_all_validate(replacements, reason=reason, **kwargs)
self._nodes_removed.update(remove)
for rm in remove:
if rm in fgraph.apply_nodes or rm in fgraph.variables:
fgraph.revert(chk)
if warn:
warn(
"An optimization wanted to replace a Variable"
" in the graph, but the replacement for it doesn't"
" remove it. We disabled the optimization."
f"{reason}: {replacements}",
)
raise ReplacementDidNotRemoveError()
def __getstate__(self):
d = self.__dict__.copy()
if "history" in d:
del d["history"]
return d
def on_import(self, fgraph, node, reason):
if node in self._nodes_removed:
self.fail_validate = True
def validate(self, fgraph):
if self.fail_validate:
self.fail_validate = False
raise aesara.graph.fg.InconsistencyError(
"Trying to reintroduce a removed node"
)
class NodeFinder(Bookkeeper):
def __init__(self):
self.fgraph = None
self.d = {}
def on_attach(self, fgraph):
if self.fgraph is not None:
raise Exception(
"A NodeFinder instance can only serve one " "FunctionGraph."
)
if hasattr(fgraph, "get_nodes"):
raise AlreadyThere(
"NodeFinder is already present or in conflict" " with another plugin."
)
self.fgraph = fgraph
fgraph.get_nodes = partial(self.query, fgraph)
Bookkeeper.on_attach(self, fgraph)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
if self.fgraph is not fgraph:
raise Exception(
"This NodeFinder instance was not attached to the" " provided fgraph."
)
self.fgraph = None
del fgraph.get_nodes
Bookkeeper.on_detach(self, fgraph)
def on_import(self, fgraph, node, reason):
try:
self.d.setdefault(node.op, []).append(node)
except TypeError: # node.op is unhashable
return
except Exception as e:
print("OFFENDING node", type(node), type(node.op), file=sys.stderr)
try:
print("OFFENDING node hash", hash(node.op), file=sys.stderr)
except Exception:
print("OFFENDING node not hashable", file=sys.stderr)
raise e
def on_prune(self, fgraph, node, reason):
try:
nodes = self.d[node.op]
except TypeError: # node.op is unhashable
return
nodes.remove(node)
if not nodes:
del self.d[node.op]
def query(self, fgraph, op):
try:
all = self.d.get(op, [])
except TypeError:
raise TypeError(
f"{op} in unhashable and cannot be queried by the optimizer"
)
all = list(all)
return all
class PrintListener(Feature):
def __init__(self, active=True):
self.active = active
def on_attach(self, fgraph):
if self.active:
print("-- attaching to: ", fgraph)
def on_detach(self, fgraph):
"""
Should remove any dynamically added functionality
that it installed into the function_graph
"""
if self.active:
print("-- detaching from: ", fgraph)
def on_import(self, fgraph, node, reason):
if self.active:
print(f"-- importing: {node}, reason: {reason}")
def on_prune(self, fgraph, node, reason):
if self.active:
print(f"-- pruning: {node}, reason: {reason}")
def on_change_input(self, fgraph, node, i, r, new_r, reason=None):
if self.active:
print(f"-- changing ({node}.inputs[{i}]) from {r} to {new_r}")
class PreserveNames(Feature):
"""
This preserve some variables names during optimization.
Deprecated. We need to keep it to allow unpickling.
"""
def on_change_input(self, fgraph, node, i, r, new_r, reason=None):
if r.name is not None and new_r.name is None:
new_r.name = r.name
class PreserveVariableAttributes(Feature):
"""
This preserve some variables attributes and tag during optimization.
"""
def on_change_input(self, fgraph, node, i, r, new_r, reason=None):
if r.name is not None and new_r.name is None:
new_r.name = r.name
if (
getattr(r.tag, "nan_guard_mode_check", False)
and getattr(new_r.tag, "nan_guard_mode_check", False) is False
):
new_r.tag.nan_guard_mode_check = r.tag.nan_guard_mode_check
class NoOutputFromInplace(Feature):
def __init__(self, first_output_idx=0, last_output_idx=None):
self.first_idx = first_output_idx
self.last_idx = last_output_idx
def validate(self, fgraph):
if not hasattr(fgraph, "destroyers"):
return True
outputs_to_validate = list(fgraph.outputs)[self.first_idx : self.last_idx]
for out in outputs_to_validate:
if out.owner is None:
continue
# Validate that the node that produces the output does not produce
# it by modifying something else inplace.
node = out.owner
op = node.op
out_idx = node.outputs.index(out)
if op.destroy_map and out_idx in op.destroy_map:
raise aesara.graph.fg.InconsistencyError(
"A function graph Feature has requested that outputs of the graph "
"be prevented from being the result of in-place "
f"operations. This has prevented the output {out} from "
"being computed by modifying another variable in-place."
)
def is_same_graph_with_merge(var1, var2, givens=None):
"""
Merge-based implementation of `aesara.graph.basic.is_same_graph`.
See help on `aesara.graph.basic.is_same_graph` for additional documentation.
"""
from aesara.graph.opt import MergeOptimizer
if givens is None:
givens = {}
# Copy variables since the MergeOptimizer will modify them.
copied = copy.deepcopy([var1, var2, givens])
vars = copied[0:2]
givens = copied[2]
# Create FunctionGraph.
inputs = list(graph_inputs(vars))
# The clone isn't needed as we did a deepcopy and we cloning will
# break the mapping in givens.
fgraph = aesara.graph.fg.FunctionGraph(inputs, vars, clone=False)
# Perform Variable substitution.
for to_replace, replace_by in givens.items():
fgraph.replace(to_replace, replace_by)
# Perform merge optimization.
MergeOptimizer().optimize(fgraph)
# When two variables perform the same computations, they will have the same
# owner in the optimized graph.
# We need to be careful with the special case where the owner is None,
# which happens when the graph is made of a single Variable.
# We also need to make sure we replace a Variable if it is present in
# `givens`.
vars_replaced = [givens.get(v, v) for v in fgraph.outputs]
o1, o2 = [v.owner for v in vars_replaced]
if o1 is None and o2 is None:
# Comparing two single-Variable graphs: they are equal if they are
# the same Variable.
return vars_replaced[0] == vars_replaced[1]
else:
return o1 is o2
def is_same_graph(var1, var2, givens=None):
"""
Return True iff Variables `var1` and `var2` perform the same computation.
By 'performing the same computation', we mean that they must share the same
graph, so that for instance this function will return False when comparing
(x * (y * z)) with ((x * y) * z).
The current implementation is not efficient since, when possible, it
verifies equality by calling two different functions that are expected to
return the same output. The goal is to verify this assumption, to
eventually get rid of one of them in the future.
Parameters
----------
var1
The first Variable to compare.
var2
The second Variable to compare.
givens
Similar to the `givens` argument of `aesara.function`, it can be used
to perform substitutions in the computational graph of `var1` and
`var2`. This argument is associated to neither `var1` nor `var2`:
substitutions may affect both graphs if the substituted variable
is present in both.
Examples
--------
====== ====== ====== ======
var1 var2 givens output
====== ====== ====== ======
x + 1 x + 1 {} True
x + 1 y + 1 {} False
x + 1 y + 1 {x: y} True
====== ====== ====== ======
"""
use_equal_computations = True
if givens is None:
givens = {}
if not isinstance(givens, dict):
givens = dict(givens)
# Get result from the merge-based function.
rval1 = is_same_graph_with_merge(var1=var1, var2=var2, givens=givens)
if givens:
# We need to build the `in_xs` and `in_ys` lists. To do this, we need
# to be able to tell whether a variable belongs to the computational
# graph of `var1` or `var2`.
# The typical case we want to handle is when `to_replace` belongs to
# one of these graphs, and `replace_by` belongs to the other one. In
# other situations, the current implementation of `equal_computations`
# is probably not appropriate, so we do not call it.
ok = True
in_xs = []
in_ys = []
# Compute the sets of all variables found in each computational graph.
inputs_var = list(map(graph_inputs, ([var1], [var2])))
all_vars = [
set(vars_between(v_i, v_o))
for v_i, v_o in ((inputs_var[0], [var1]), (inputs_var[1], [var2]))
]
def in_var(x, k):
# Return True iff `x` is in computation graph of variable `vark`.
return x in all_vars[k - 1]
for to_replace, replace_by in givens.items():
# Map a substitution variable to the computational graphs it
# belongs to.
inside = {
v: [in_var(v, k) for k in (1, 2)] for v in (to_replace, replace_by)
}
if (
inside[to_replace][0]
and not inside[to_replace][1]
and inside[replace_by][1]
and not inside[replace_by][0]
):
# Substitute variable in `var1` by one from `var2`.
in_xs.append(to_replace)
in_ys.append(replace_by)
elif (
inside[to_replace][1]
and not inside[to_replace][0]
and inside[replace_by][0]
and not inside[replace_by][1]
):
# Substitute variable in `var2` by one from `var1`.
in_xs.append(replace_by)
in_ys.append(to_replace)
else:
ok = False
break
if not ok:
# We cannot directly use `equal_computations`.
use_equal_computations = False
else:
in_xs = None
in_ys = None
if use_equal_computations:
rval2 = equal_computations(xs=[var1], ys=[var2], in_xs=in_xs, in_ys=in_ys)
assert rval2 == rval1
return rval1
from aesara.graph.toolbox import *
......@@ -600,7 +600,7 @@ class CondMerge(GlobalOptimizer):
""" Graph Optimizer that merges different cond ops """
def add_requirements(self, fgraph):
from aesara.graph.toolbox import ReplaceValidate
from aesara.graph.features import ReplaceValidate
fgraph.add_feature(ReplaceValidate())
......
......@@ -70,9 +70,9 @@ from aesara.graph.basic import (
graph_inputs,
io_connection_pattern,
)
from aesara.graph.features import NoOutputFromInplace
from aesara.graph.fg import MissingInputError
from aesara.graph.op import Op, ops_with_inner_function
from aesara.graph.toolbox import NoOutputFromInplace
from aesara.link.c.basic import CLinker
from aesara.link.c.exceptions import MissingGXX
from aesara.link.utils import raise_with_op
......
......@@ -73,11 +73,11 @@ from aesara.graph.basic import (
is_in_ancestors,
)
from aesara.graph.destroyhandler import DestroyHandler
from aesara.graph.features import ReplaceValidate
from aesara.graph.fg import InconsistencyError
from aesara.graph.op import compute_test_value
from aesara.graph.opt import GlobalOptimizer, in2out, local_optimizer
from aesara.graph.optdb import EquilibriumDB, SequenceDB
from aesara.graph.toolbox import ReplaceValidate
from aesara.scan.op import Scan
from aesara.scan.utils import (
compress_outs,
......
......@@ -18,7 +18,7 @@ from aesara import compile
from aesara.assert_op import Assert, assert_op
from aesara.compile.ops import ViewOp
from aesara.configdefaults import config
from aesara.graph import toolbox
from aesara.graph import features
from aesara.graph.basic import (
Constant,
Variable,
......@@ -797,7 +797,7 @@ class MakeVectorPrinter:
pprint.assign(MakeVector, MakeVectorPrinter())
class ShapeFeature(toolbox.Feature):
class ShapeFeature(features.Feature):
"""Graph optimizer for removing all calls to shape().
This optimizer replaces all Shapes and Subtensors of Shapes with
......@@ -4674,7 +4674,7 @@ class FusionOptimizer(GlobalOptimizer):
self.optimizer = local_optimizer
def add_requirements(self, fgraph):
fgraph.attach_feature(toolbox.ReplaceValidate())
fgraph.attach_feature(features.ReplaceValidate())
def apply(self, fgraph):
did_something = True
......
......@@ -146,6 +146,7 @@ import aesara.scalar
from aesara.compile.mode import optdb
from aesara.configdefaults import config
from aesara.graph.basic import Apply, view_roots
from aesara.graph.features import ReplacementDidNotRemoveError, ReplaceValidate
from aesara.graph.fg import InconsistencyError
from aesara.graph.op import COp, Op
from aesara.graph.opt import (
......@@ -157,7 +158,6 @@ from aesara.graph.opt import (
)
from aesara.graph.optdb import SequenceDB
from aesara.graph.params_type import ParamsType
from aesara.graph.toolbox import ReplacementDidNotRemoveError, ReplaceValidate
from aesara.graph.utils import MethodNotDefined, TestValueError
from aesara.printing import FunctionPrinter, debugprint, pprint
from aesara.scalar import bool as bool_t
......
......@@ -123,7 +123,7 @@ simplification described above:
import aesara
from aesara.graph.opt import GlobalOptimizer
from aesara.graph.toolbox import ReplaceValidate
from aesara.graph.features import ReplaceValidate
class Simplify(GlobalOptimizer):
def add_requirements(self, fgraph):
......@@ -149,12 +149,12 @@ simplification described above:
Here's how it works: first, in ``add_requirements``, we add the
``ReplaceValidate`` :ref:`libdoc_graph_fgraphfeature` located in
:ref:`libdoc_graph_toolbox`. This feature adds the ``replace_validate``
:ref:`libdoc_graph_features`. This feature adds the ``replace_validate``
method to ``fgraph``, which is an enhanced version of ``replace`` that
does additional checks to ensure that we are not messing up the
computation graph (note: if ``ReplaceValidate`` was already added by
another optimizer, ``extend`` will do nothing). In a nutshell,
``toolbox.ReplaceValidate`` grants access to ``fgraph.replace_validate``,
``features.ReplaceValidate`` grants access to ``fgraph.replace_validate``,
and ``fgraph.replace_validate`` allows us to replace a Variable with
another while respecting certain validation constraints. You can
browse the list of :ref:`libdoc_graph_fgraphfeaturelist` and see if some of
......
.. _libdoc_graph_toolbox:
.. _libdoc_graph_features:
================================================
:mod:`toolbox` -- [doc TODO]
:mod:`features` -- [doc TODO]
================================================
.. module:: aesara.graph.toolbox
.. module:: aesara.graph.features
:platform: Unix, Windows
:synopsis: Aesara Internals
.. moduleauthor:: LISA
......
......@@ -33,7 +33,7 @@ FunctionGraph
FunctionGraph Features
----------------------
.. autoclass:: aesara.graph.toolbox.Feature
.. autoclass:: aesara.graph.features.Feature
:members:
.. _libdoc_graph_fgraphfeaturelist:
......
......@@ -15,7 +15,7 @@
graph
fgraph
toolbox
features
op
type
params_type
......
......@@ -4,6 +4,7 @@ ignore = E203,E231,E501,E741,W503,W504,C901
max-line-length = 88
per-file-ignores =
**/__init__.py:F401,E402,F403
aesara/graph/toolbox.py:E402,F403,F401
aesara/link/jax/jax_dispatch.py:E402,F403,F401
aesara/link/jax/jax_linker.py:E402,F403,F401
aesara/sparse/sandbox/sp2.py:F401
......
......@@ -15,10 +15,10 @@ from aesara.compile.debugmode import (
)
from aesara.configdefaults import config
from aesara.graph.basic import Apply, Variable
from aesara.graph.features import BadOptimization
from aesara.graph.op import COp, Op
from aesara.graph.opt import local_optimizer
from aesara.graph.optdb import EquilibriumDB
from aesara.graph.toolbox import BadOptimization
from aesara.tensor.math import add, dot, log
from aesara.tensor.type import TensorType, dvector, fmatrix, fvector, vector
from tests import unittest_tools as utt
......
......@@ -2,7 +2,7 @@ import pytest
import aesara
from aesara.compile.mode import AddFeatureOptimizer, Mode
from aesara.graph.toolbox import NoOutputFromInplace
from aesara.graph.features import NoOutputFromInplace
from aesara.tensor.math import dot, tanh
from aesara.tensor.type import matrix
......
......@@ -5,6 +5,7 @@ import pytest
from aesara.configdefaults import config
from aesara.graph.basic import Apply, Constant, Variable, clone
from aesara.graph.destroyhandler import DestroyHandler
from aesara.graph.features import ReplaceValidate
from aesara.graph.fg import FunctionGraph, InconsistencyError
from aesara.graph.op import Op
from aesara.graph.opt import (
......@@ -14,7 +15,6 @@ from aesara.graph.opt import (
PatternSub,
TopoOptimizer,
)
from aesara.graph.toolbox import ReplaceValidate
from aesara.graph.type import Type
from tests.unittest_tools import assertFailure_fast
......
from aesara.graph.basic import Apply, Variable
from aesara.graph.features import NodeFinder, is_same_graph
from aesara.graph.fg import FunctionGraph
from aesara.graph.op import Op
from aesara.graph.toolbox import NodeFinder, is_same_graph
from aesara.graph.type import Type
from aesara.tensor.math import neg
from aesara.tensor.type import vectors
......
......@@ -17,10 +17,10 @@ from aesara.compile.mode import Mode, get_default_mode, get_mode
from aesara.compile.ops import DeepCopyOp, deep_copy_op
from aesara.configdefaults import config
from aesara.graph.basic import Constant
from aesara.graph.features import is_same_graph
from aesara.graph.fg import FunctionGraph
from aesara.graph.opt import LocalOptGroup, TopoOptimizer, check_stack_trace, out2in
from aesara.graph.optdb import Query
from aesara.graph.toolbox import is_same_graph
from aesara.misc.safe_asarray import _asarray
from aesara.tensor import inplace
from aesara.tensor.basic import Alloc, join, switch
......
......@@ -12,8 +12,8 @@ import aesara.tensor.basic as aet
from aesara.compile import DeepCopyOp, shared
from aesara.compile.io import In
from aesara.configdefaults import config
from aesara.graph.features import is_same_graph
from aesara.graph.op import get_test_value
from aesara.graph.toolbox import is_same_graph
from aesara.tensor.elemwise import DimShuffle
from aesara.tensor.math import exp, isinf
from aesara.tensor.math import sum as aet_sum
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论