@@ -2023,6 +2023,10 @@ class T_max_and_argmax(unittest.TestCase):
because there is no differentiable path from cost to the input and
not because of an error of the grad method of the op
"""
raiseKnownFailureTest("The desired behavior of the grad method in this case is currently under debate. In any case, the result should be to return NaN or 0, not to report a disconnected input.")
x=matrix()
cost=argmax(x,axis=0).sum()
value_error_raised=False
...
...
@@ -2220,6 +2224,7 @@ class T_argmin_argmax(unittest.TestCase):
deftest_grad_argmin(self):
data=rand(2,3)
n=as_tensor_variable(data)
n.name='n'
#test grad of argmin
utt.verify_grad(lambdav:argmin(v,axis=-1),[data])
...
...
@@ -2231,7 +2236,9 @@ class T_argmin_argmax(unittest.TestCase):