提交 e096f644 authored 作者: Frederic Bastien's avatar Frederic Bastien

Update import and indent.

上级 ecc69291
import numpy import numpy
from theano.gof import Variable, Op, utils, Type, Constant, Value, Apply from theano.gof import Op, Apply
from theano.tensor import as_tensor_variable, dot, DimShuffle from theano.tensor import as_tensor_variable, dot, DimShuffle
from theano import tensor from theano import tensor
...@@ -174,7 +174,7 @@ def is_positive(v): ...@@ -174,7 +174,7 @@ def is_positive(v):
print 'is_positive', v print 'is_positive', v
if v.owner and v.owner.op == tensor.pow: if v.owner and v.owner.op == tensor.pow:
print 'try for pow', v, v.owner.inputs print 'try for pow', v, v.owner.inputs
try: try:
exponent = tensor.get_constant_value(v.owner.inputs[1]) exponent = tensor.get_constant_value(v.owner.inputs[1])
except TypeError: except TypeError:
return False return False
...@@ -530,5 +530,3 @@ class A_Xinv_b(Op): ...@@ -530,5 +530,3 @@ class A_Xinv_b(Op):
gX = -matrix_dot(iX.T, a, gz, b.T, iX.T) gX = -matrix_dot(iX.T, a, gz, b.T, iX.T)
gb = matrix_dot(ix.T, a.T, gz) gb = matrix_dot(ix.T, a.T, gz)
return [ga, gX, gb] return [ga, gX, gb]
...@@ -5,7 +5,18 @@ import theano.scipy # To know if scipy is available. ...@@ -5,7 +5,18 @@ import theano.scipy # To know if scipy is available.
from theano import tensor, function from theano import tensor, function
from theano.tensor.basic import _allclose from theano.tensor.basic import _allclose
from theano.sandbox.linalg.ops import * # The one in comment are not tested...
from theano.sandbox.linalg.ops import (cholesky,
matrix_inverse,
#solve,
#diag,
#extract_diag,
#alloc_diag,
det,
#PSD_hint,
#trace,
#spectral_radius_bound
)
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
...@@ -21,7 +32,7 @@ if 0: ...@@ -21,7 +32,7 @@ if 0:
pd = numpy.dot(r,r.T) pd = numpy.dot(r,r.T)
x = tensor.matrix() x = tensor.matrix()
chol = Cholesky()(x) chol = cholesky(x)
f = function([x], tensor.dot(chol, chol.T)) # an optimization could remove this f = function([x], tensor.dot(chol, chol.T)) # an optimization could remove this
ch_f = function([x], chol) ch_f = function([x], chol)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论