提交 5c44a014 authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Merge pull request #2978 from nouiz/small

Small doc and crash fix.
......@@ -4,7 +4,7 @@
:mod:`toolbox` -- [doc TODO]
================================================
.. module:: toolbox
.. module:: theano.gof.toolbox
:platform: Unix, Windows
:synopsis: Theano Internals
.. moduleauthor:: LISA
......
.. _fgraph:
===
=============
FunctionGraph
===
=============
TODO: clean up/update the doc/check if complete
WRITEME
.. autoclass:: theano.gof.fg.FunctionGraph
.. _fgraphfeature:
Feature
=======
WRITEME
.. autoclass:: theano.gof.toolbox.Feature
:members:
......@@ -136,9 +136,7 @@ might not have Theano installed, who are using a different Python version, or if
you are planning to save your model for a long time (in which case version
mismatches might make it difficult to unpickle objects).
.. autofunction:: theano.misc.pkl_utils.dump
.. autofunction:: theano.misc.pkl_utils.load
See :func:`theano.misc.pkl_utils.dump` and :func:`theano.misc.pkl_utils.load`.
Long-Term Serialization
......
......@@ -1753,6 +1753,14 @@ class _Linker(gof.link.LocalLinker):
else:
thunks_py.append(None)
if not self.maker.mode.check_c_code and thunks_py[-1] is None:
_logger.warn(
"Op %s don't have a perform, forcing check of the c code" %
node.op)
thunk = node.op.make_c_thunk(node, storage_map, compute_map,
no_recycling)
thunks_c[-1] = thunk
# If the op defined its own make_thunk, use the generated thunk
if thunk_other is not None:
if thunks_py[-1] is None:
......@@ -2090,6 +2098,12 @@ class _Linker(gof.link.LocalLinker):
for r in node.inputs:
storage_map[r][0] = None
_logger.debug("%i - done with node", i)
for r in node.outputs:
if r not in r_vals:
idx = order.index(node)
assert thunks_py[idx] is None
assert thunks_c[idx] is None
raise Exception("No code run for %s" % node)
if False:
# This could be useful to help finding refcount problem.
......
......@@ -175,72 +175,110 @@ def raise_with_op(node, thunk=None, exc_info=None, storage_map=None):
detailed_err_msg += "\nDebugprint of the apply node: \n"
detailed_err_msg += f.getvalue()
# Prints output_map
if storage_map is not None:
detailed_err_msg += "\nStorage map footprint:\n"
shared_input_list = [
item for item in node.fgraph.inputs
if isinstance(item, theano.compile.SharedVariable)]
nonshared_input_list = [
item for item in node.fgraph.inputs
if not isinstance(item, theano.compile.SharedVariable)]
storage_map_list = []
for k in storage_map.keys():
storage_map_item = []
# storage_map_item[0]
storage_map_item.append(str(k))
# storage_map_item[1]
shapeinfo = None
if hasattr(storage_map[k][0], 'shape'):
shapeinfo = storage_map[k][0].shape
if len(shapeinfo) != 0:
storage_map_item.append(shapeinfo)
else:
storage_map_item.append(tuple())
else:
storage_map_item.append(None)
# storage_map_item[2]
# storage_map_item[3]
if hasattr(storage_map[k][0], 'dtype'):
dtype = storage_map[k][0].dtype
storage_map_item.append(numpy.dtype(dtype).itemsize)
if shapeinfo is None:
storage_map_item.append(None)
else:
storage_map_item.append(numpy.dtype(dtype).itemsize * numpy.prod(shapeinfo))
else:
bytes = getsizeof(storage_map[k][0])
storage_map_item.append(bytes)
storage_map_item.append(None)
# Flag of shared val
# storage_map_item[4]
if k in shared_input_list:
storage_map_item.append(True)
elif k in nonshared_input_list:
storage_map_item.append(False)
# Prints output_map
if theano.config.exception_verbosity == 'high' and storage_map is not None:
detailed_err_msg += "\nStorage map footprint:\n"
shared_input_list = [
item for item in node.fgraph.inputs
if isinstance(item, theano.compile.SharedVariable)]
nonshared_input_list = [
item for item in node.fgraph.inputs
if not isinstance(item, theano.compile.SharedVariable)]
storage_map_list = []
total_size = 0
total_size_inputs = 0
for k in storage_map.keys():
storage_map_item = []
# storage_map_item[0]: the variable
storage_map_item.append(str(k))
# storage_map_item[1]: the shape
shapeinfo = None
if hasattr(storage_map[k][0], 'shape'):
shapeinfo = storage_map[k][0].shape
if len(shapeinfo) != 0:
storage_map_item.append(shapeinfo)
else:
storage_map_item.append(tuple())
else:
storage_map_item.append(None)
# storage_map_item[2]: itemsize
# storage_map_item[3]: bytes
if hasattr(storage_map[k][0], 'dtype'):
dtype = storage_map[k][0].dtype
storage_map_item.append(numpy.dtype(dtype).itemsize)
if shapeinfo is None:
storage_map_item.append(None)
storage_map_list.append(storage_map_item)
from operator import itemgetter
storage_map_list.sort(key=itemgetter(3), reverse=True)
for storage_map_item in storage_map_list:
detailed_err_msg += " - " + storage_map_item[0] + ", "
if storage_map_item[4] is True:
detailed_err_msg += "Shared Input, "
elif storage_map_item[4] is False:
detailed_err_msg += "Input, "
if storage_map_item[1] is not None:
detailed_err_msg += "Shape: %s, " % str(storage_map_item[1])
detailed_err_msg += "ElemSize: %s Byte(s)" % storage_map_item[2]
if storage_map_item[3] is not None:
detailed_err_msg += ", TotalSize: %s Byte(s)\n" % storage_map_item[3]
else:
detailed_err_msg += "\n"
sz = numpy.dtype(dtype).itemsize * numpy.prod(shapeinfo)
storage_map_item.append(sz)
total_size += sz
if not k.owner:
total_size_inputs += sz
else:
# If it is a view, don't count it twice.
if getattr(k.owner.op, 'view_map', None):
vmap = k.owner.op.view_map
out_idx = k.owner.outputs.index(k)
data = storage_map[k][0]
if out_idx in vmap:
assert len(vmap[out_idx]) == 1
input_data = storage_map[
k.owner.inputs[vmap[out_idx][0]]][0]
if k.type.may_share_memory(data, input_data):
total_size -= sz
# If it is a destroyed input, the input
# shouldn't be in the storage_map anymore
# except if there is a special flag used. So
# we still must check it.
if getattr(k.owner.op, 'destroy_map', None):
vmap = k.owner.op.destroy_map
out_idx = k.owner.outputs.index(k)
data = storage_map[k][0]
if out_idx in vmap:
assert len(vmap[out_idx]) == 1
input_data = storage_map[
k.owner.inputs[vmap[out_idx][0]]][0]
if k.type.may_share_memory(data, input_data):
total_size -= sz
else:
bytes = getsizeof(storage_map[k][0])
storage_map_item.append(bytes)
storage_map_item.append(None)
# Flag of shared val
# storage_map_item[4]
if k in shared_input_list:
storage_map_item.append(True)
elif k in nonshared_input_list:
storage_map_item.append(False)
else:
storage_map_item.append(None)
storage_map_list.append(storage_map_item)
from operator import itemgetter
storage_map_list.sort(key=itemgetter(3), reverse=True)
for item in storage_map_list:
if item[3] is None:
continue
detailed_err_msg += " - " + item[0] + ", "
if item[4] is True:
detailed_err_msg += "Shared Input, "
elif item[4] is False:
detailed_err_msg += "Input, "
if item[1] is not None:
detailed_err_msg += "Shape: %s, " % str(item[1])
detailed_err_msg += "ElemSize: %s Byte(s)" % item[2]
if item[3] is not None:
detailed_err_msg += ", TotalSize: %s Byte(s)\n" % item[3]
else:
detailed_err_msg += "\n"
detailed_err_msg += " TotalSize: %s Byte(s) %.3f GB\n" % (
total_size, total_size/1024./1024/1024)
detailed_err_msg += " TotalSize inputs: %s Byte(s) %.3f BG\n" % (
total_size_inputs, total_size_inputs/1024./1024/1024)
else:
hints.append(
......
......@@ -32,7 +32,7 @@ class Feature(object):
by various operations on FunctionGraphs. It can be used to enforce
graph properties at all stages of graph optimization.
See toolbox and ext modules for common extensions.
See :func:`theano.gof.toolbox` for common extensions.
"""
def on_attach(self, function_graph):
......
......@@ -548,7 +548,7 @@ class CDataType(Type):
def __eq__(self, other):
return (type(self) == type(other) and
self.ctype == other.ctype,
self.ctype == other.ctype and
self.freefunc == other.freefunc)
def __hash__(self):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论