提交 fe14a910 authored 作者: lamblin's avatar lamblin

Merge pull request #1284 from nouiz/assert

Add assert in make_node
...@@ -382,6 +382,7 @@ class Cholesky(Op): ...@@ -382,6 +382,7 @@ class Cholesky(Op):
assert imported_scipy, ( assert imported_scipy, (
"Scipy not available. Scipy is needed for the Cholesky op") "Scipy not available. Scipy is needed for the Cholesky op")
x = as_tensor_variable(x) x = as_tensor_variable(x)
assert x.ndim == 2
return Apply(self, [x], [x.type()]) return Apply(self, [x], [x.type()])
def perform(self, node, inputs, outputs): def perform(self, node, inputs, outputs):
...@@ -428,6 +429,9 @@ class CholeskyGrad(Op): ...@@ -428,6 +429,9 @@ class CholeskyGrad(Op):
x = as_tensor_variable(x) x = as_tensor_variable(x)
l = as_tensor_variable(l) l = as_tensor_variable(l)
dz = as_tensor_variable(dz) dz = as_tensor_variable(dz)
assert x.ndim == 2
assert l.ndim == 2
assert dz.ndim == 2
assert l.owner.op.lower == self.lower, ( assert l.owner.op.lower == self.lower, (
"lower/upper mismatch between Cholesky op and CholeskyGrad op" "lower/upper mismatch between Cholesky op and CholeskyGrad op"
) )
...@@ -511,6 +515,7 @@ class MatrixPinv(Op): ...@@ -511,6 +515,7 @@ class MatrixPinv(Op):
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
assert x.ndim == 2
return Apply(self, [x], [x.type()]) return Apply(self, [x], [x.type()])
def perform(self, node, (x,), (z, )): def perform(self, node, (x,), (z, )):
...@@ -559,6 +564,7 @@ class MatrixInverse(Op): ...@@ -559,6 +564,7 @@ class MatrixInverse(Op):
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
assert x.ndim == 2
return Apply(self, [x], [x.type()]) return Apply(self, [x], [x.type()])
def perform(self, node, (x,), (z, )): def perform(self, node, (x,), (z, )):
...@@ -647,6 +653,8 @@ class Solve(Op): ...@@ -647,6 +653,8 @@ class Solve(Op):
"Scipy not available. Scipy is needed for the Solve op") "Scipy not available. Scipy is needed for the Solve op")
A = as_tensor_variable(A) A = as_tensor_variable(A)
b = as_tensor_variable(b) b = as_tensor_variable(b)
assert A.ndim == 2
assert b.ndim in [1, 2]
otype = tensor.tensor( otype = tensor.tensor(
broadcastable=b.broadcastable, broadcastable=b.broadcastable,
dtype=(A * b).dtype) dtype=(A * b).dtype)
...@@ -789,6 +797,7 @@ class Det(Op): ...@@ -789,6 +797,7 @@ class Det(Op):
""" """
def make_node(self, x): def make_node(self, x):
x = as_tensor_variable(x) x = as_tensor_variable(x)
assert x.ndim == 2
o = theano.tensor.scalar(dtype=x.dtype) o = theano.tensor.scalar(dtype=x.dtype)
return Apply(self, [x], [o]) return Apply(self, [x], [o])
...@@ -853,8 +862,11 @@ class A_Xinv_b(Op): ...@@ -853,8 +862,11 @@ class A_Xinv_b(Op):
assert imported_scipy, ( assert imported_scipy, (
"Scipy not available. Scipy is needed for the A_Xinv_b op") "Scipy not available. Scipy is needed for the A_Xinv_b op")
a = as_tensor_variable(a) a = as_tensor_variable(a)
b = as_tensor_variable(b)
X = as_tensor_variable(X) X = as_tensor_variable(X)
b = as_tensor_variable(b)
assert a.ndim == 2
assert X.ndim == 2
assert b.ndim == 2
o = theano.tensor.matrix(dtype=x.dtype) o = theano.tensor.matrix(dtype=x.dtype)
return Apply(self, [a, X, b], [o]) return Apply(self, [a, X, b], [o])
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论