提交 dd580bba authored 作者: Cesar Laurent's avatar Cesar Laurent

Fixed pygpu import, docstring, and unified names.

上级 4969ddfe
......@@ -532,7 +532,7 @@ by the inner function, otherwise an error will be raised.
Reducing Scan's memory usage
----------------------------
This section presents the ``scan_with_checkpoints`` function. In short, this
This section presents the ``scan_checkpoints`` function. In short, this
function reduces the memory usage of scan (at the cost of more computation
time) by not keeping in memory all the intermediate time steps of the loop,
and recomputing them when computing the gradients. This function is therefore
......@@ -553,7 +553,7 @@ Before going more into the details, here are its current limitations:
Often, in order to be able to compute the gradients through scan operations,
Theano needs to keep in memory some intermediate computations of scan. This
can sometimes use a prohibitively large amount of memory.
``scan_with_checkpoints`` allows to discard some of those intermediate steps and
``scan_checkpoints`` allows to discard some of those intermediate steps and
recompute them again when computing the gradients. Its ``save_every_N`` argument
specifies the number time steps to do without storing the intermediate results.
For example, ``save_every_N = 4`` will reduce the memory usage by 4, while having
......@@ -646,4 +646,4 @@ reference
.. autofunction:: theano.foldl
.. autofunction:: theano.foldr
.. autofunction:: theano.scan
.. autofunction:: theano.scan_with_checkpoints
.. autofunction:: theano.scan_checkpoints
......@@ -81,7 +81,7 @@ from theano.misc.safe_asarray import _asarray
from theano.printing import pprint, pp
from theano.scan_module import (scan, map, reduce, foldl, foldr, clone,
scan_with_checkpoints)
scan_checkpoints)
from theano.updates import OrderedUpdates
......
......@@ -40,6 +40,6 @@ __contact__ = "Razvan Pascanu <r.pascanu@gmail>"
from theano.scan_module import scan_opt
from theano.scan_module.scan import scan
from theano.scan_module.scan_checkpoint import scan_with_checkpoints
from theano.scan_module.scan_checkpoints import scan_checkpoints
from theano.scan_module.scan_views import map, reduce, foldl, foldr
from theano.scan_module.scan_utils import clone, until
......@@ -3,34 +3,36 @@ from __future__ import absolute_import, print_function, division
import theano
def scan_with_checkpoints(fn, sequences=[], outputs_info=None,
non_sequences=[], name="checkpointscan_fn",
n_steps=None, save_every_N=10):
def scan_checkpoints(fn, sequences=[], outputs_info=None, non_sequences=[],
name="checkpointscan_fn", n_steps=None, save_every_N=10):
"""Scan function that uses less memory, but is more restrictive.
In ``scan``, if you compute the gradient of the output with respect
to the input, you will have to store the intermediate results at
each time step, which can be prohibitively huge. This function allows
to do several steps of forward computations without storing the
intermediate results, and to recompute them during the gradient
computation.
Current assumptions :
- Every sequence has the same length.
- If n_steps is specified, it has the same value as the length of any
sequence.
- The value of "save_every_N" divides the number of steps the Scan will
run without remainder.
- Only singly-recurrent and non-recurrent outputs are used.
No multiple recurrences.
- Only the last timestep of any output will ever be used.
In :func:`~theano.scan`, if you compute the gradient of the output
with respect to the input, you will have to store the intermediate
results at each time step, which can be prohibitively huge. This
function allows to do ``save_every_N`` steps of forward computations
without storing the intermediate results, and to recompute them during
the gradient computation.
Notes
-----
Current assumptions:
* Every sequence has the same length.
* If ``n_steps`` is specified, it has the same value as the length of
any sequence.
* The value of ``save_every_N`` divides the number of steps the scan
will run without remainder.
* Only singly-recurrent and non-recurrent outputs are used.
No multiple recurrences.
* Only the last timestep of any output will ever be used.
Parameters
----------
fn
``fn`` is a function that describes the operations involved in one
step of ``scan``. See the documentation of ``scan`` for more
information.
step of ``scan``. See the documentation of :func:`~theano.scan`
for more information.
sequences
``sequences`` is the list of Theano variables or dictionaries
......@@ -51,36 +53,30 @@ def scan_with_checkpoints(fn, sequences=[], outputs_info=None,
n_steps
``n_steps`` is the number of steps to iterate given as an int
or Theano scalar. If any of the input sequences do not have
enough elements, scan will raise an error. If the *value is 0* the
outputs will have *0 rows*. If the value is negative, ``scan``
will run backwards in time. If the ``go_backwards`` flag is already
set and also ``n_steps`` is negative, ``scan`` will run forward
in time. If n_steps is not provided, ``scan`` will figure
enough elements, scan will raise an error. If the **value is 0**
the outputs will have **0 rows**. If the value is negative,
``scan`` will run backwards in time. If the ``go_backwards`` flag
is already set and also ``n_steps`` is negative, ``scan`` will run
forward in time. If n_steps is not provided, ``scan`` will figure
out the amount of steps it should run given its input sequences.
save_every_N
``save_every_N`` is the number of steps to go without storing
the computations of scan (ie they will have to be recomputed
the computations of ``scan`` (ie they will have to be recomputed
during the gradient computation).
Returns
-------
tuple
Tuple of the form (outputs, updates); ``outputs`` is either a
Theano variable or a list of Theano variables representing the
outputs of ``scan`` (in the same order as in ``outputs_info``).
``updates`` is a subclass of dictionary specifying the update rules for
all shared variables used in scan.
This dictionary should be passed to ``theano.function`` when you compile
your function. The change compared to a normal dictionary is that we
validate that keys are SharedVariable and addition of those dictionary
are validated to be consistent.
Note that only the last time step of ``outputs`` can be used with this
type of scan.
Tuple of the form ``(outputs, updates)`` as in :func:`~theano.scan`, but
with a small change: It only contain the output at each
``save_every_N`` step. The time steps that are not returned by
this function will be recomputed during the gradient computation
(if any).
See Also
--------
scan : Looping in Theano.
:func:`~theano.scan`: Looping in Theano.
"""
# Standardize the format of input arguments
......@@ -94,7 +90,7 @@ def scan_with_checkpoints(fn, sequences=[], outputs_info=None,
# Check that outputs_info has no taps:
for element in outputs_info:
if isinstance(element, dict) and 'taps' in element:
raise RuntimeError("scan_with_checkpoints doesn't work with taps.")
raise RuntimeError("scan_checkpoints doesn't work with taps.")
# Determine how many steps the original scan would run
if n_steps is None:
......
......@@ -6,7 +6,11 @@ import unittest
import theano
import theano.tensor as T
from pygpu.gpuarray import GpuArrayException
try:
from pygpu.gpuarray import GpuArrayException
PYGPU_AVAILABLE = True
except ImportError:
PYGPU_AVAILABLE = False
class TestScanCheckpoint(unittest.TestCase):
......@@ -19,7 +23,7 @@ class TestScanCheckpoint(unittest.TestCase):
outputs_info=T.ones_like(self.A),
non_sequences=self.A,
n_steps=self.k)
result_check, _ = theano.scan_with_checkpoints(
result_check, _ = theano.scan_checkpoints(
fn=lambda prior_result, A: prior_result * A,
outputs_info=T.ones_like(self.A),
non_sequences=self.A,
......@@ -44,6 +48,7 @@ class TestScanCheckpoint(unittest.TestCase):
out, out_check = f(range(10), 100)
assert numpy.allclose(out, out_check)
@unittest.skipUnless(PYGPU_AVAILABLE, 'Requires pygpu.')
def test_memory(self):
"""Test that scan_checkpoint reduces memory usage."""
if None not in theano.gpuarray.type.list_contexts():
......@@ -61,5 +66,5 @@ class TestScanCheckpoint(unittest.TestCase):
def test_taps_error(self):
"""Test that an error rises if we use taps in outputs_info."""
self.assertRaises(RuntimeError, theano.scan_with_checkpoints,
self.assertRaises(RuntimeError, theano.scan_checkpoints,
lambda: None, [], {'initial': self.A, 'taps': [-2]})
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论