提交 63d4beb3 authored 作者: AndreiCostinescu's avatar AndreiCostinescu

Changed comments to docstrings where necessary in theano/tests

上级 f3d712ef
......@@ -107,8 +107,9 @@ whitelist_flake8 = [
def list_files(dir_path=theano.__path__[0], pattern='*.py', no_match=".#"):
# List all files under theano's path.
"""
List all files under theano's path.
"""
files_list = []
for (dir, _, files) in os.walk(dir_path):
for f in files:
......@@ -121,7 +122,6 @@ def list_files(dir_path=theano.__path__[0], pattern='*.py', no_match=".#"):
def test_format_flake8():
# Test if flake8 is respected.
if not flake8_available:
raise SkipTest("flake8 is not installed")
total_errors = 0
......@@ -137,10 +137,11 @@ def test_format_flake8():
def print_files_information_flake8():
# Print the list of files which can be removed from the whitelist and the
# list of files which do not respect FLAKE8 formatting that aren't in the
# whitelist.
"""
Print the list of files which can be removed from the whitelist and the
list of files which do not respect FLAKE8 formatting that aren't in the
whitelist.
"""
infracting_files = []
non_infracting_files = []
for path in list_files():
......@@ -162,10 +163,12 @@ def print_files_information_flake8():
def check_all_files(dir_path=theano.__path__[0], pattern='*.py'):
# List all .py files under dir_path (theano path), check if they follow
# flake8 format, save all the error-formatted files into
# theano_filelist.txt. This function is used for generating
# the "whitelist_flake8" in this file.
"""
List all .py files under dir_path (theano path), check if they follow
flake8 format, save all the error-formatted files into
theano_filelist.txt. This function is used for generating
the "whitelist_flake8" in this file.
"""
with open('theano_filelist.txt', 'a') as f_txt:
for (dir, _, files) in os.walk(dir_path):
......
......@@ -21,9 +21,10 @@ one = theano.tensor.as_tensor_variable(1.)
def grad_sources_inputs(sources, inputs):
# This implements the old grad_sources_inputs function in terms of
# the new interface so the tests don't need to be rewritten.
"""
This implements the old grad_sources_inputs function in terms of
the new interface so the tests don't need to be rewritten.
"""
if inputs is None:
inputs = theano.gof.graph.inputs([source[0] for source in sources])
return dict(izip(inputs, theano.gradient.grad(cost=None, known_grads=dict(sources),
......
"""
WRITE ME
WRITE ME
Tests for the R operator / L operator
Tests for the R operator / L operator
For the list of op with r op defined, with or without missing test
see this file: doc/library/tensor/basic.txt
For function to automatically test your Rop implementation, look at
the docstring of the functions: check_mat_rop_lop, check_rop_lop,
check_nondiff_rop,
For the list of op with r op defined, with or without missing test
see this file: doc/library/tensor/basic.txt
For function to automatically test your Rop implementation, look at
the docstring of the functions: check_mat_rop_lop, check_rop_lop,
check_nondiff_rop,
"""
from __future__ import absolute_import, print_function, division
import unittest
......@@ -30,9 +29,10 @@ Special Op created to test what happens when you have one op that is not
differentiable in the computational graph
'''
class BreakRop(Op):
# @note: Non-differentiable.
"""
@note: Non-differentiable.
"""
__props__ = ()
def make_node(self, x):
......@@ -53,9 +53,10 @@ break_op = BreakRop()
class RopLop_checker(unittest.TestCase):
# Don't peform any test, but provide the function to test the
# Rop to class that inherit from it.
"""
Don't peform any test, but provide the function to test the
Rop to class that inherit from it.
"""
def setUp(self):
utt.seed_rng()
# Using vectors make things a lot simpler for generating the same
......@@ -70,8 +71,10 @@ class RopLop_checker(unittest.TestCase):
5 + self.rng.randint(3))
def check_nondiff_rop(self, y):
# If your op is not differentiable(so you can't define Rop)
# test that an error is raised.
"""
If your op is not differentiable(so you can't define Rop)
test that an error is raised.
"""
raised = False
try:
tensor.Rop(y, self.x, self.v)
......@@ -83,24 +86,25 @@ class RopLop_checker(unittest.TestCase):
' is not differentiable'))
def check_mat_rop_lop(self, y, out_shape):
# Test the Rop/Lop when input is a matrix and the output is a vector
#
# :param y: the output variable of the op applied to self.mx
# :param out_shape: Used to generate a random tensor
# corresponding to the evaluation point of the Rop
# (i.e. the tensor with which you multiply the
# Jacobian). It should be a tuple of ints.
#
# If the Op has more than 1 input, one of them must be mx, while
# others must be shared variables / constants. We will test only
# against the input self.mx, so you must call
# check_mat_rop_lop/check_rop_lop for the other inputs.
#
# We expect all inputs/outputs have dtype floatX.
#
# If you want to test an Op with an output matrix, add a sum
# after the Op you want to test.
"""
Test the Rop/Lop when input is a matrix and the output is a vector
:param y: the output variable of the op applied to self.mx
:param out_shape: Used to generate a random tensor
corresponding to the evaluation point of the Rop
(i.e. the tensor with which you multiply the
Jacobian). It should be a tuple of ints.
If the Op has more than 1 input, one of them must be mx, while
others must be shared variables / constants. We will test only
against the input self.mx, so you must call
check_mat_rop_lop/check_rop_lop for the other inputs.
We expect all inputs/outputs have dtype floatX.
If you want to test an Op with an output matrix, add a sum
after the Op you want to test.
"""
vx = np.asarray(self.rng.uniform(size=self.mat_in_shape),
theano.config.floatX)
vv = np.asarray(self.rng.uniform(size=self.mat_in_shape),
......@@ -132,9 +136,10 @@ class RopLop_checker(unittest.TestCase):
assert np.allclose(v1, v2), ('LOP mismatch: %s %s' % (v1, v2))
def check_rop_lop(self, y, out_shape):
# As check_mat_rop_lop, except the input is self.x which is a
# vector. The output is still a vector.
"""
As check_mat_rop_lop, except the input is self.x which is a
vector. The output is still a vector.
"""
# TEST ROP
vx = np.asarray(self.rng.uniform(size=self.in_shape),
theano.config.floatX)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论