提交 b06dcd20 authored 作者: bergstrj@iro.umontreal.ca's avatar bergstrj@iro.umontreal.ca

merged

...@@ -7,6 +7,7 @@ cutils_ext.cpp ...@@ -7,6 +7,7 @@ cutils_ext.cpp
html html
*.o *.o
pdf pdf
pull.sh
*.pyc *.pyc
*.so *.so
*.sw? *.sw?
...@@ -463,6 +463,16 @@ class T_log2(unittest.TestCase): ...@@ -463,6 +463,16 @@ class T_log2(unittest.TestCase):
def test0(self): def test0(self):
verify_grad(self, Log2, [numpy.random.rand(3,1)+0.0001]) verify_grad(self, Log2, [numpy.random.rand(3,1)+0.0001])
class T_log(unittest.TestCase):
def test0(self):
verify_grad(self, Log, [numpy.random.rand(3,1)+0.0001])
def test1(self):
a = astensor(numpy.ones(2))
b = astensor(numpy.ones(2))
aa = numpy.asarray([0.5, 4.0])
bb = numpy.asarray([0.5, 2.0])
check_eq2(self, [a], log(a), [aa], numpy.log(numpy.asarray(aa)))
class T_pow(unittest.TestCase): class T_pow(unittest.TestCase):
def setUp(self): def setUp(self):
numpy.random.seed(9999) numpy.random.seed(9999)
...@@ -493,10 +503,7 @@ class _testCase_matinv(unittest.TestCase): ...@@ -493,10 +503,7 @@ class _testCase_matinv(unittest.TestCase):
# Sum of squared errors # Sum of squared errors
ssdiff = sum((diff**2.0)) ssdiff = sum((diff**2.0))
# May be able to abbreviate this by assuming default parameter g_b = gradient.grad(ssdiff, b)
# TODO: Test that default works
g_b = gradient.grad(ssdiff, b, astensor(numpy.ones(1),name='g_cost'))
#g_b = gradient.grad(ssdiff, b) # This should be the abbreviated version
# compilation to function # compilation to function
# [a,b] are the inputs, [ssdiff,g_b] are the outputs # [a,b] are the inputs, [ssdiff,g_b] are the outputs
......
...@@ -81,7 +81,10 @@ def grad_sources_inputs(sources, graph_inputs): ...@@ -81,7 +81,10 @@ def grad_sources_inputs(sources, graph_inputs):
op_grad = op.grad(input_arg, output_arg) op_grad = op.grad(input_arg, output_arg)
if op_grad is None: if op_grad is None:
raise ValueError(_msg_retNone, op.__class__) raise ValueError(_msg_retNone, op.__class__)
if isinstance(op_grad, float):
raise TypeError('wtf!!!!!!!!', op)
g_inputs = _pack_result(op_grad) g_inputs = _pack_result(op_grad)
assert isinstance(g_inputs, (list, tuple))
if len(g_inputs) != len(op.inputs): if len(g_inputs) != len(op.inputs):
raise ValueError(_msg_badlen, raise ValueError(_msg_badlen,
op.__class__, op.__class__,
......
...@@ -107,7 +107,7 @@ def _assert_same_shapes(x, *rest): ...@@ -107,7 +107,7 @@ def _assert_same_shapes(x, *rest):
shape = x.shape shape = x.shape
for other in rest: for other in rest:
if other.shape != shape: if other.shape != shape:
raise ValueError(_assert_same_shapes.E_shape) raise ValueError(_assert_same_shapes.E_shape, shape, other.shape)
_assert_same_shapes.E_shape = "The dimensions of the inputs do not match." _assert_same_shapes.E_shape = "The dimensions of the inputs do not match."
def _assert_tensor_scalar(x, a): def _assert_tensor_scalar(x, a):
...@@ -470,7 +470,11 @@ subtensor = _constructor(Subtensor) ...@@ -470,7 +470,11 @@ subtensor = _constructor(Subtensor)
# Elemwise # # Elemwise #
class AddElemwise(_Elemwise): class AddElemwise(_Elemwise):
def impl(self, x, y): def impl(self, x, y):
try:
_assert_same_shapes(x, y) _assert_same_shapes(x, y)
except Exception, e:
print '------ ERROR HERE'
raise
return x + y return x + y
def grad(self, (x, y), gz): def grad(self, (x, y), gz):
return gz, gz return gz, gz
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论