提交 f1467cf0 authored 作者: Frederic's avatar Frederic

Add import.

上级 a6f8aa1a
...@@ -55,3 +55,5 @@ from theano.gradient import Rop, Lop, grad, numeric_grad, verify_grad, \ ...@@ -55,3 +55,5 @@ from theano.gradient import Rop, Lop, grad, numeric_grad, verify_grad, \
jacobian, hessian jacobian, hessian
from theano.tensor.sort import sort from theano.tensor.sort import sort
from extra_ops import (DiffOp, bincount, squeeze,
repeat, bartlett, fill_diagonal)
...@@ -3,8 +3,8 @@ import numpy ...@@ -3,8 +3,8 @@ import numpy
import theano import theano
import basic import basic
from theano import gof, tensor, scalar from theano import gof, scalar
from theano.sandbox.linalg.ops import diag import basic as tensor
class DiffOp(theano.Op): class DiffOp(theano.Op):
...@@ -538,7 +538,9 @@ class FillDiagonal(gof.Op): ...@@ -538,7 +538,9 @@ class FillDiagonal(gof.Op):
raise NotImplementedError('%s: gradient is currently implemented' raise NotImplementedError('%s: gradient is currently implemented'
' for matrices only' % self.__class__.__name__) ' for matrices only' % self.__class__.__name__)
wr_a = fill_diagonal(grad, 0) # valid for any number of dimensions wr_a = fill_diagonal(grad, 0) # valid for any number of dimensions
wr_val = diag(grad).sum() # diag is only valid for matrices # diag is only valid for matrices
import theano.sandbox.linalg
wr_val = theano.sandbox.linalg.ops.diag(grad).sum()
return [wr_a, wr_val] return [wr_a, wr_val]
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论