提交 e6ecae1c authored 作者: Iban Harlouchet's avatar Iban Harlouchet

numpydoc for theano/compile/debugmode.py

上级 d10856f6
"""Provides `DebugMode`, an evaluation mode for debugging theano internals. """
Provides `DebugMode`, an evaluation mode for debugging theano internals.
:TODO: add support for IfElse Op, LazyLinker, PureOp, etc. TODO: add support for IfElse Op, LazyLinker, PureOp, etc.
""" """
from __future__ import print_function from __future__ import print_function
...@@ -123,7 +124,11 @@ _logger.addFilter(NoDuplicateOptWarningFilter()) ...@@ -123,7 +124,11 @@ _logger.addFilter(NoDuplicateOptWarningFilter())
# #
######################## ########################
class DebugModeError(Exception): class DebugModeError(Exception):
"""Generic Exception raised to indicate an internal theano problem""" """
Generic Exception raised to indicate an internal theano problem.
"""
pass pass
...@@ -135,21 +140,30 @@ class BadThunkOutput(DebugModeError): ...@@ -135,21 +140,30 @@ class BadThunkOutput(DebugModeError):
do not agree, or if one of these methods do not give the same result do not agree, or if one of these methods do not give the same result
when called twice with the same inputs (but different memory layouts when called twice with the same inputs (but different memory layouts
for the output). for the output).
""" """
r = None r = None
"""The `Variable` instance for which conflicting values were computed""" """
The `Variable` instance for which conflicting values were computed.
"""
thunk1 = '' thunk1 = ''
val1 = None val1 = None
"""The value computed by `thunk1`""" """
The value computed by `thunk1`.
"""
thunk2 = '' thunk2 = ''
val2 = None val2 = None
"""The value computed by `thunk2`""" """
The value computed by `thunk2`.
"""
def __init__(self, r, thunk1, val1, thunk2, val2, inputs_val=()): def __init__(self, r, thunk1, val1, thunk2, val2, inputs_val=()):
"""Initialize members"""
super(BadThunkOutput, self).__init__() super(BadThunkOutput, self).__init__()
self.r = r self.r = r
self.thunk1 = thunk1 self.thunk1 = thunk1
...@@ -159,16 +173,22 @@ class BadThunkOutput(DebugModeError): ...@@ -159,16 +173,22 @@ class BadThunkOutput(DebugModeError):
self.inputs_val = inputs_val self.inputs_val = inputs_val
def offending_op(self): def offending_op(self):
"""Return the Op class whose c_code and perform """
implementations didn't match""" Return the Op class whose c_code and perform implementations
didn't match.
"""
return type(self.r.owner.op) return type(self.r.owner.op)
def __str__(self): def __str__(self):
return self.str_diagnostic() return self.str_diagnostic()
def str_diagnostic(self): def str_diagnostic(self):
"""Return a pretty multiline string representating the cause """
of the exception""" Return a pretty multiline string representing the cause of
the exception.
"""
sio = StringIO() sio = StringIO()
print("BadThunkOutput", file=sio) print("BadThunkOutput", file=sio)
print(" Apply :", self.r.owner, file=sio) print(" Apply :", self.r.owner, file=sio)
...@@ -202,41 +222,61 @@ class BadThunkOutput(DebugModeError): ...@@ -202,41 +222,61 @@ class BadThunkOutput(DebugModeError):
class BadOptimization(DebugModeError): class BadOptimization(DebugModeError):
"""Exception: some variable and its substitute take different """
runtime values. Exception: some variable and its substitute take different runtime values.
""" """
new_r = None new_r = None
"""A `Variable` instance that took a different value from `old_r`, """
but which replaced `old_r`.""" A `Variable` instance that took a different value from `old_r`,
but which replaced `old_r`.
"""
old_r = None old_r = None
"""A `Variable` instance that was replaced by `new_r`.""" """
A `Variable` instance that was replaced by `new_r`.
"""
old_r_val = None old_r_val = None
"""The value computed for `old_r`.""" """
The value computed for `old_r`.
"""
new_r_val = None new_r_val = None
"""The value computed for `new_r`.""" """
The value computed for `new_r`.
"""
reason = None reason = None
"""An object that indicates why old_r was turned into new_r. """
An object that indicates why old_r was turned into new_r.
Convention is that this is the name of the optimization that Convention is that this is the name of the optimization that
requested the replacement. requested the replacement.
""" """
old_graph = "" old_graph = ""
"""A multiline string representation of the graph leading to """
old_r, at the time of the replacement.""" A multiline string representation of the graph leading to
old_r, at the time of the replacement.
"""
new_graph = "" new_graph = ""
"""A multiline string representation of the graph leading to """
new_r, at the time of the replacement.""" A multiline string representation of the graph leading to
new_r, at the time of the replacement.
"""
def __init__(self, old_r, new_r, old_r_val, new_r_val, reason, def __init__(self, old_r, new_r, old_r_val, new_r_val, reason,
old_graph, new_graph): old_graph, new_graph):
"""Initialize members"""
super(BadOptimization, self).__init__() super(BadOptimization, self).__init__()
self.old_r = old_r self.old_r = old_r
self.new_r = new_r self.new_r = new_r
...@@ -250,8 +290,11 @@ class BadOptimization(DebugModeError): ...@@ -250,8 +290,11 @@ class BadOptimization(DebugModeError):
return self.str_diagnostic() return self.str_diagnostic()
def str_diagnostic(self): def str_diagnostic(self):
"""Return a pretty multiline string representating the cause """
of the exception""" Return a pretty multiline string representating the cause
of the exception.
"""
sio = StringIO() sio = StringIO()
val_str_len_limit = 800 val_str_len_limit = 800
print("BadOptimization Error", super(BadOptimization, print("BadOptimization Error", super(BadOptimization,
...@@ -340,8 +383,11 @@ class BadOptimization(DebugModeError): ...@@ -340,8 +383,11 @@ class BadOptimization(DebugModeError):
class BadDestroyMap(DebugModeError): class BadDestroyMap(DebugModeError):
"""Exception: Some perform() or c_code() modified an input that """
wasn't in the destroy_map""" Exception: Some perform() or c_code() modified an input that
wasn't in the destroy_map.
"""
def __init__(self, node, idx, old_val, new_val, perform): def __init__(self, node, idx, old_val, new_val, perform):
super(BadDestroyMap, self).__init__() super(BadDestroyMap, self).__init__()
self.node = node self.node = node
...@@ -395,8 +441,12 @@ class BadDestroyMap(DebugModeError): ...@@ -395,8 +441,12 @@ class BadDestroyMap(DebugModeError):
class BadViewMap(DebugModeError): class BadViewMap(DebugModeError):
"""Exception: Some perform() or c_code() created a memory alias """
that wasn't in the view_map""" Exception: Some perform() or c_code() created a memory alias
that wasn't in the view_map.
"""
def __init__(self, node, output_idx, out_storage, def __init__(self, node, output_idx, out_storage,
in_alias_idx=None, out_alias_idx=None): in_alias_idx=None, out_alias_idx=None):
super(BadViewMap, self).__init__() super(BadViewMap, self).__init__()
...@@ -426,7 +476,8 @@ class BadViewMap(DebugModeError): ...@@ -426,7 +476,8 @@ class BadViewMap(DebugModeError):
class StochasticOrder(DebugModeError): class StochasticOrder(DebugModeError):
"""Exception: Repeated Optimizations of the same graph do not give """
Exception: Repeated Optimizations of the same graph do not give
identical results. identical results.
The most common cause is that an Optimization iterates over some The most common cause is that an Optimization iterates over some
...@@ -440,8 +491,12 @@ class StochasticOrder(DebugModeError): ...@@ -440,8 +491,12 @@ class StochasticOrder(DebugModeError):
class InvalidValueError(DebugModeError): class InvalidValueError(DebugModeError):
"""Exception: some Op an output value that is inconsistent with """
the Type of that output""" Exception: some Op an output value that is inconsistent with
the Type of that output.
"""
def __init__(self, r, v, client_node=None, hint='none', def __init__(self, r, v, client_node=None, hint='none',
specific_hint='none'): specific_hint='none'):
super(InvalidValueError, self).__init__() super(InvalidValueError, self).__init__()
...@@ -498,8 +553,11 @@ class InvalidValueError(DebugModeError): ...@@ -498,8 +553,11 @@ class InvalidValueError(DebugModeError):
def char_from_number(number): def char_from_number(number):
""" Converts number to string by rendering it in base 26 using """
capital letters as digits """ Converts number to string by rendering it in base 26 using
capital letters as digits.
"""
base = 26 base = 26
...@@ -523,31 +581,45 @@ def debugprint(r, prefix='', depth=-1, done=None, print_type=False, ...@@ -523,31 +581,45 @@ def debugprint(r, prefix='', depth=-1, done=None, print_type=False,
stop_on_name=False, prefix_child=None, stop_on_name=False, prefix_child=None,
scan_ops=None, profile=None, scan_ops=None, profile=None,
scan_inner_to_outer_inputs=None): scan_inner_to_outer_inputs=None):
"""Print the graph leading to `r` to given depth. """
Print the graph leading to `r` to given depth.
:param r: Variable instance
:param prefix: prefix to each line (typically some number of spaces) Parameters
:param depth: maximum recursion depth (Default -1 for unlimited). ----------
:param done: dict of Apply instances that have already been printed r
and their associated printed ids Variable instance.
:param print_type: whether to print the Variable type after the other infos prefix
:param file: file-like object to which to print Prefix to each line (typically some number of spaces).
:param print_destroy_map: whether to print the op destroy_map after depth
other info Maximum recursion depth (Default -1 for unlimited).
:param print_view_map: whether to print the op view_map after other info done
:param order: If not empty will print the index in the toposort. dict of Apply instances that have already been printed and their
:param ids: How do we print the identifier of the variable associated printed ids.
id - print the python id value print_type
int - print integer character Whether to print the Variable type after the other infos.
CHAR - print capital character file
"" - don't print an identifier File-like object to which to print.
:param stop_on_name: When True, if a node in the graph has a name, print_destroy_map
we don't print anything below it. Whether to print the op destroy_map after other info.
:param scan_ops: Scan ops in the graph will be added inside this list print_view_map
for later printing purposes. Whether to print the op view_map after other info.
:param scan_inner_to_outer_inputs: a dictionary mapping a scan ops order
inner function inputs to the scan op inputs (outer inputs) for If not empty will print the index in the toposort.
printing purposes. ids
How do we print the identifier of the variable :
id - print the python id value,
int - print integer character,
CHAR - print capital character,
"" - don't print an identifier.
stop_on_name
When True, if a node in the graph has a name, we don't print anything
below it.
scan_ops
Scan ops in the graph will be added inside this list for later printing
purposes.
scan_inner_to_outer_inputs
A dictionary mapping a scan ops inner function inputs to the scan op
inputs (outer inputs) for printing purposes.
""" """
if depth == 0: if depth == 0:
...@@ -712,17 +784,24 @@ def debugprint(r, prefix='', depth=-1, done=None, print_type=False, ...@@ -712,17 +784,24 @@ def debugprint(r, prefix='', depth=-1, done=None, print_type=False,
def _optcheck_fgraph(input_specs, output_specs, accept_inplace=False): def _optcheck_fgraph(input_specs, output_specs, accept_inplace=False):
"""Create an FunctionGraph for debugging. """
Create a FunctionGraph for debugging.
:param input_specs: fgraph inputs
:type input_specs: WRITEME Parameters
:param output_specs: fgraph outputs ----------
:type output_specs: WRITEME input_specs: WRITEME
:param accept_inplace: are inplace ops permitted in the original graph? fgraph inputs.
:type accept_inplace: Bool output_specs: WRITEME
:rtype: `FunctionGraph` fgraph outputs.
:returns: a new FunctionGraph with a cloned graph, with debugging accept_inplace : bool
`Feature` instances already installed. Are inplace ops permitted in the original graph?
Returns
-------
FunctionGraph
A new FunctionGraph with a cloned graph, with debugging `Feature`
instances already installed.
""" """
orig_inputs = [spec.variable for spec in input_specs] orig_inputs = [spec.variable for spec in input_specs]
updates = [spec.update for spec in input_specs if spec.update] updates = [spec.update for spec in input_specs if spec.update]
...@@ -784,7 +863,8 @@ def check_eq(var, val1, val2): ...@@ -784,7 +863,8 @@ def check_eq(var, val1, val2):
def _check_inputs(node, storage_map, r_vals, dr_vals, active_nodes, def _check_inputs(node, storage_map, r_vals, dr_vals, active_nodes,
clobber_dr_vals=True, clobber_dr_vals=True,
perform=None, warn_input_not_reused=True): perform=None, warn_input_not_reused=True):
"""Raise BadDestroyMap if necessary, update dr_vals """
Raise BadDestroyMap if necessary, update dr_vals.
Returns a list of output variables that actually worked inplace Returns a list of output variables that actually worked inplace
(their value is aliased to the value of at least one input). (their value is aliased to the value of at least one input).
...@@ -871,10 +951,11 @@ def _check_viewmap(node, storage_map): ...@@ -871,10 +951,11 @@ def _check_viewmap(node, storage_map):
""" """
This functions raises a BadViewMap exception when it detects the This functions raises a BadViewMap exception when it detects the
following: following:
- output node storages aliased to input storage, with no declaration - Output node storages aliased to input storage, with no declaration
in view_map in view_map.
- if not aliased to an input, check if two outputs are aliased together - If not aliased to an input, check if two outputs are aliased together
and used subsequently in the graph and used subsequently in the graph.
""" """
for oi, onode in enumerate(node.outputs): for oi, onode in enumerate(node.outputs):
...@@ -937,14 +1018,24 @@ def _check_viewmap(node, storage_map): ...@@ -937,14 +1018,24 @@ def _check_viewmap(node, storage_map):
def _is_used_in_graph(var): def _is_used_in_graph(var):
""" """
Returns True if `var` is used by another node in the graph
Returns
-------
bool
True if `var` is used by another node in the graph.
""" """
return not(var.clients == [('output', 1)] or var.clients == []) return not(var.clients == [('output', 1)] or var.clients == [])
def _check_strides_match(a, b, warn_err, op): def _check_strides_match(a, b, warn_err, op):
""" """
param: warn_err: if 0, no warning, if 1 warning, if 2 error
Parameters
----------
warn_err
If 0, no warning, if 1 warning, if 2 error.
""" """
if warn_err == 0: if warn_err == 0:
return return
...@@ -965,12 +1056,20 @@ def _check_strides_match(a, b, warn_err, op): ...@@ -965,12 +1056,20 @@ def _check_strides_match(a, b, warn_err, op):
def _lessbroken_deepcopy(a): def _lessbroken_deepcopy(a):
""" """
:param a: any object
Returns a copy of `a` that shares no internal storage with the original Parameters
(a deep copy). ----------
This function handles numpy arrays specially, because copy.deepcopy() a
called on a 0-d array will return a numpy scalar, not an array. Any object
Returns
-------
object
A copy of `a` that shares no internal storage with the original
(a deep copy). This function handles numpy arrays specially, because
copy.deepcopy() called on a 0-d array will return a numpy scalar,
not an array.
""" """
# this exists because copy.deepcopy on numpy arrays is broken # this exists because copy.deepcopy on numpy arrays is broken
# This logic is also in link.py # This logic is also in link.py
...@@ -990,13 +1089,15 @@ def _lessbroken_deepcopy(a): ...@@ -990,13 +1089,15 @@ def _lessbroken_deepcopy(a):
def _find_bad_optimizations0(order, reasons, r_vals): def _find_bad_optimizations0(order, reasons, r_vals):
"""Use a simple algorithm to find broken optimizations. """
Use a simple algorithm to find broken optimizations.
This algorithm is simple to understand, but sometimes when there's This algorithm is simple to understand, but sometimes when there's
a problem it identifies the wrong optimization as the culprit. a problem it identifies the wrong optimization as the culprit.
The problem stems from the fact that results are not evaluated in The problem stems from the fact that results are not evaluated in
chronological order (looking at when they were introduced to the chronological order (looking at when they were introduced to the
graph). graph).
""" """
# iterate over variables looking for values that don't match the # iterate over variables looking for values that don't match the
# values of the variables they replaced. This is the sign of a # values of the variables they replaced. This is the sign of a
...@@ -1078,19 +1179,24 @@ def _find_bad_optimizations1(order, reasons, r_vals): ...@@ -1078,19 +1179,24 @@ def _find_bad_optimizations1(order, reasons, r_vals):
def _find_bad_optimizations2(order, reasons, r_vals): def _find_bad_optimizations2(order, reasons, r_vals):
"""Use a simple algorithm to find broken optimizations. """
Use a simple algorithm to find broken optimizations.
This algorithm is simple to understand, but sometimes when there's This algorithm is simple to understand, but sometimes when there's
a problem it identifies the wrong optimization as the culprit. a problem it identifies the wrong optimization as the culprit.
The problem stems from the fact that results are not evaluated in The problem stems from the fact that results are not evaluated in
chronological order (looking at when they were introduced to the chronological order (looking at when they were introduced to the
graph). graph).
""" """
checked_variables = set() checked_variables = set()
def check_variable_norec(new_r): def check_variable_norec(new_r):
"""Verify that `r` has the same value as the results it replaces """ """
Verify that `r` has the same value as the results it replaces.
"""
for reason, r, old_graph_str, new_graph_str in reasons[new_r]: for reason, r, old_graph_str, new_graph_str in reasons[new_r]:
new_r_val = r_vals[new_r] new_r_val = r_vals[new_r]
r_val = r_vals[r] r_val = r_vals[r]
...@@ -1134,7 +1240,10 @@ _find_bad_optimizations = _find_bad_optimizations0 ...@@ -1134,7 +1240,10 @@ _find_bad_optimizations = _find_bad_optimizations0
def _get_preallocated_maps(node, thunk, prealloc_modes, def_val, def _get_preallocated_maps(node, thunk, prealloc_modes, def_val,
storage_map, r_vals, dr_vals, perform, storage_map, r_vals, dr_vals, perform,
active_order_set, inplace_outs, init_outputs): active_order_set, inplace_outs, init_outputs):
'''Preallocate outputs in different memory layouts''' """
Preallocate outputs in different memory layouts.
"""
# To avoid circular imports # To avoid circular imports
from theano.tensor import TensorType from theano.tensor import TensorType
...@@ -1357,7 +1466,10 @@ def _get_preallocated_maps(node, thunk, prealloc_modes, def_val, ...@@ -1357,7 +1466,10 @@ def _get_preallocated_maps(node, thunk, prealloc_modes, def_val,
def _check_preallocated_output(node, thunk, prealloc_modes, def_val, def _check_preallocated_output(node, thunk, prealloc_modes, def_val,
storage_map, r_vals, dr_vals, perform, storage_map, r_vals, dr_vals, perform,
active_order_set, inplace_outs, init_outputs): active_order_set, inplace_outs, init_outputs):
'''Try to apply thunk() on different output storages''' """
Try to apply thunk() on different output storages.
"""
# If node has an inner compiled Theano function with mode DebugMode, # If node has an inner compiled Theano function with mode DebugMode,
# disable memory checks in that mode, since they were already run. # disable memory checks in that mode, since they were already run.
...@@ -1460,26 +1572,40 @@ def _check_preallocated_output(node, thunk, prealloc_modes, def_val, ...@@ -1460,26 +1572,40 @@ def _check_preallocated_output(node, thunk, prealloc_modes, def_val,
class _FunctionGraphEvent(object): class _FunctionGraphEvent(object):
"""A record of an event in the life of an FunctionGraph. """
A record of an event in the life of an FunctionGraph.
The __eq__ function is important here, as it is the basis for The __eq__ function is important here, as it is the basis for
comparing optimization runs. comparing optimization runs.
""" """
kind = "" kind = ""
"""One of 'import', 'change', 'prune'""" """
One of 'import', 'change', 'prune'.
"""
node = None node = None
"""Either 'output' or an Apply instance""" """
Either 'output' or an Apply instance.
"""
op = None op = None
"""Either 'output' or an Op instance""" """Either 'output' or an Op instance"""
idx = None idx = None
"""change events involve an position index of the input variable""" """
Change events involve an position index of the input variable.
"""
reason = None reason = None
"""change events sometimes have a reason""" """
Change events sometimes have a reason.
"""
def __init__(self, kind, node, idx=None, reason=None): def __init__(self, kind, node, idx=None, reason=None):
self.kind = kind self.kind = kind
...@@ -1522,8 +1648,11 @@ class _FunctionGraphEvent(object): ...@@ -1522,8 +1648,11 @@ class _FunctionGraphEvent(object):
class _VariableEquivalenceTracker(object): class _VariableEquivalenceTracker(object):
"""A FunctionGraph Feature that keeps tabs on an FunctionGraph and """
tries to detect problems.""" A FunctionGraph Feature that keeps tabs on an FunctionGraph and
tries to detect problems.
"""
fgraph = None fgraph = None
"""WRITEME""" """WRITEME"""
...@@ -1675,7 +1804,11 @@ class _DummyLinker(object): ...@@ -1675,7 +1804,11 @@ class _DummyLinker(object):
class _Linker(gof.link.LocalLinker): class _Linker(gof.link.LocalLinker):
"""Special debugging linker""" """
Special debugging linker.
"""
def __init__(self, maker, schedule=None): def __init__(self, maker, schedule=None):
super(gof.LocalLinker, self).__init__() super(gof.LocalLinker, self).__init__()
self.fgraph = None self.fgraph = None
...@@ -2236,11 +2369,39 @@ _NODEFAULT = ['NODEFAULT'] ...@@ -2236,11 +2369,39 @@ _NODEFAULT = ['NODEFAULT']
class _Maker(FunctionMaker): # inheritance buys a few helper functions class _Maker(FunctionMaker): # inheritance buys a few helper functions
"""Special debugging FunctionMaker
""" """
Special debugging FunctionMaker.
Parameters
----------
inputs : list of SymbolicInput instances
outputs : list of SymbolicOutput instances
Outputs may also be a single Variable (not a list), in which case
the functions produced by FunctionMaker will return their output
value directly.
accept_inplace
True iff it is acceptable to have inplace operations in the graph from
the inputs to the outputs.
on_unused_input
What to do if a variable in the 'inputs' list is not used in the
graph. Possible values are 'raise', 'warn' and 'ignore'.
output_keys
If the outputs argument for theano.function was a list, then
output_keys is None. If the outputs argument was a dict, then
output_keys is a sorted list of the keys from that dict.
Notes
-----
The constructor sets TensorType.filter_checks_isfinite when
`mode.check_isfinite` is True.
"""
verbose = 0 verbose = 0
"""Verbosity level of compile-time and run-time checks. (Default """
0: silent)""" Verbosity level of compile-time and run-time checks. (Default 0: silent).
"""
def __init__(self, inputs, outputs, optimizer, mode, def __init__(self, inputs, outputs, optimizer, mode,
accept_inplace=False, accept_inplace=False,
...@@ -2248,33 +2409,6 @@ class _Maker(FunctionMaker): # inheritance buys a few helper functions ...@@ -2248,33 +2409,6 @@ class _Maker(FunctionMaker): # inheritance buys a few helper functions
profile=None, profile=None,
on_unused_input=None, on_unused_input=None,
output_keys=None): output_keys=None):
"""
:type inputs: a list of SymbolicInput instances
:type outputs: a list of SymbolicOutput instances outputs may
also be a single Variable (not a list), in
which case the functions produced by
FunctionMaker will return their output value
directly
:param accept_inplace: True iff it is acceptable to have
inplace operations in the graph from the inputs to
the outputs
:param on_unused_input: What to do if a variable in the
'inputs' list is not used in the
graph. Possible values are 'raise',
'warn', and 'ignore'.
:param output_keys: If the outputs argument for
theano.function was a list, then
output_keys is None. If the outputs
argument was a dict, then output_keys is a
sorted list of the keys from that dict.
:note: this function sets TensorType.filter_checks_isfinite
when `mode.check_isfinite` is True
"""
self.profile = profile self.profile = profile
# Handle the case where inputs and/or outputs is a single # Handle the case where inputs and/or outputs is a single
# Variable (not in a list) # Variable (not in a list)
...@@ -2395,11 +2529,15 @@ class _Maker(FunctionMaker): # inheritance buys a few helper functions ...@@ -2395,11 +2529,15 @@ class _Maker(FunctionMaker): # inheritance buys a few helper functions
""" """
Create a function. Create a function.
defaults -> a list matching the inputs list and providing default Parameters
values if the default for an input is None, then that input ----------
is a required input. For an input with an update, the defaults
default acts as initialization. A list matching the inputs list and providing default values if the
trustme -> disables some exceptions, used internally default for an input is None, then that input is a required input.
For an input with an update, the default acts as initialization.
trustme
Disables some exceptions, used internally.
""" """
if defaults is None: if defaults is None:
defaults = [None] * len(self.inputs) defaults = [None] * len(self.inputs)
...@@ -2514,35 +2652,40 @@ copyreg.pickle(_Maker, _pickle_DebugMode_Maker) ...@@ -2514,35 +2652,40 @@ copyreg.pickle(_Maker, _pickle_DebugMode_Maker)
class DebugMode(Mode): class DebugMode(Mode):
"""Evaluation Mode that detects internal theano errors. """
Evaluation Mode that detects internal theano errors.
This mode catches several kinds of internal error: This mode catches several kinds of internal error:
- inconsistent outputs when calling the same Op twice with the same - Inconsistent outputs when calling the same Op twice with the same
inputs, for instance if c_code and perform implementations, are inputs, for instance if c_code and perform implementations, are
inconsistent, or in case of incorrect handling of output memory inconsistent, or in case of incorrect handling of output memory
(see `BadThunkOutput`), (see `BadThunkOutput`).
- a variable replacing another when their runtime values don't - A variable replacing another when their runtime values don't
match. This is a symptom of an incorrect optimization step, or match. This is a symptom of an incorrect optimization step, or
faulty Op implementation (raises `BadOptimization`) faulty Op implementation (raises `BadOptimization`).
- stochastic optimization ordering (raises `StochasticOrder`) - Stochastic optimization ordering (raises `StochasticOrder`).
- incomplete `destroy_map` specification (raises `BadDestroyMap`) - Incomplete `destroy_map` specification (raises `BadDestroyMap`).
- an op that returns an illegal value not matching the output - An op that returns an illegal value not matching the output
Variable Type (raises InvalidValueError) Variable Type (raises InvalidValueError).
Each of these exceptions inherits from the more generic `DebugModeError`. Each of these exceptions inherits from the more generic `DebugModeError`.
If there are no internal errors, this mode behaves like FAST_RUN If there are no internal errors, this mode behaves like FAST_RUN
or FAST_COMPILE, but takes a little longer and uses more memory. or FAST_COMPILE, but takes a little longer and uses more memory.
If there are internal errors, this mode will raise an Raises
`DebugModeError` exception. ------
DebugModeError
If there are internal errors.
:remark: The work of debugging is implemented by the `_Maker`, `_Linker`, Notes
-----
The work of debugging is implemented by the `_Maker`, `_Linker`,
and `_VariableEquivalenceTracker` classes. and `_VariableEquivalenceTracker` classes.
""" """
...@@ -2551,22 +2694,26 @@ class DebugMode(Mode): ...@@ -2551,22 +2694,26 @@ class DebugMode(Mode):
""" """
When checking for the stability of optimization, recompile the When checking for the stability of optimization, recompile the
graph this many times. graph this many times.
""" """
check_c_code = config.DebugMode.check_c check_c_code = config.DebugMode.check_c
""" """
Should we evaluate (and check) the `c_code` implementations? Should we evaluate (and check) the `c_code` implementations?
""" """
check_py_code = config.DebugMode.check_py check_py_code = config.DebugMode.check_py
""" """
Should we evaluate (and check) the `perform` implementations? Should we evaluate (and check) the `perform` implementations?
Always checked if no `c_code`. Always checked if no `c_code`.
""" """
check_isfinite = config.DebugMode.check_finite check_isfinite = config.DebugMode.check_finite
""" """
Should we check for (and complain about) NaN/Inf ndarray elements? Should we check for (and complain about) NaN/Inf ndarray elements?
""" """
require_matching_strides = config.DebugMode.check_strides require_matching_strides = config.DebugMode.check_strides
...@@ -2574,6 +2721,7 @@ class DebugMode(Mode): ...@@ -2574,6 +2721,7 @@ class DebugMode(Mode):
Should we check for (and complain about) Ops whose python and C Should we check for (and complain about) Ops whose python and C
outputs are ndarrays with different strides? (This can catch bugs, outputs are ndarrays with different strides? (This can catch bugs,
but is generally overly strict.) 0 no check, 1 warn, 2 err. but is generally overly strict.) 0 no check, 1 warn, 2 err.
""" """
check_preallocated_output = config.DebugMode.check_preallocated_output check_preallocated_output = config.DebugMode.check_preallocated_output
...@@ -2584,13 +2732,15 @@ class DebugMode(Mode): ...@@ -2584,13 +2732,15 @@ class DebugMode(Mode):
"c_contiguous", "f_contiguous", "strided" (positive and negative "c_contiguous", "f_contiguous", "strided" (positive and negative
strides), "wrong_size" (larger and smaller dimensions), and "ALL" strides), "wrong_size" (larger and smaller dimensions), and "ALL"
(all of the above). (all of the above).
""" """
# This function will be used to create a FunctionMaker in # This function will be used to create a FunctionMaker in
# function_module.function # function_module.function
def function_maker(self, i, o, m, *args, **kwargs): def function_maker(self, i, o, m, *args, **kwargs):
""" """
Return an instance of `_Maker` which handles much of the debugging work Return an instance of `_Maker` which handles much of the debugging work.
""" """
assert m is self assert m is self
return _Maker(i, o, self.optimizer, self, *args, **kwargs) return _Maker(i, o, self.optimizer, self, *args, **kwargs)
...@@ -2604,12 +2754,11 @@ class DebugMode(Mode): ...@@ -2604,12 +2754,11 @@ class DebugMode(Mode):
check_preallocated_output=None, check_preallocated_output=None,
require_matching_strides=None, require_matching_strides=None,
linker=_DummyLinker()): linker=_DummyLinker()):
"""Initialize member variables. """
If any of these arguments (except optimizer) is not None, it overrides If any of these arguments (except optimizer) is not None, it overrides
the class default. the class default. The linker argument is not used. It is set there to
The linker argument is not used. It is set there to allow allow Mode.requiring() and some other fct to work with DebugMode too.
Mode.requiring() and some other fct to work with DebugMode too.
""" """
if not isinstance(linker, _DummyLinker): if not isinstance(linker, _DummyLinker):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论