提交 aee5674a authored 作者: abalkin's avatar abalkin

Convert relative imports to absolute

上级 47b91264
......@@ -3,34 +3,34 @@ __docformat__ = "restructuredtext en"
import warnings
from basic import *
import opt
import opt_uncanonicalize
import blas
import blas_scipy
import blas_c
import xlogx
import raw_random
import randomstreams
import shared_randomstreams
from randomstreams import \
from theano.tensor.basic import *
import theano.tensor.opt
import theano.tensor.opt_uncanonicalize
import theano.tensor.blas
import theano.tensor.blas_scipy
import theano.tensor.blas_c
import theano.tensor.xlogx
import theano.tensor.raw_random
import theano.tensor.randomstreams
import theano.tensor.shared_randomstreams
from theano.tensor.randomstreams import \
RandomStreams
random = RandomStreams(seed=0xBAD5EED, no_warn = True)
"""Imitate the numpy.random symbol with a tensor.random one"""
from elemwise import \
from theano.tensor.elemwise import \
DimShuffle, Elemwise, CAReduce
import sharedvar # adds shared-variable constructors
import theano.tensor.sharedvar # adds shared-variable constructors
# We import as `_shared` instead of `shared` to avoid confusion between
# `theano.shared` and `tensor._shared`.
from sharedvar import tensor_constructor as _shared
from theano.tensor.sharedvar import tensor_constructor as _shared
from io import *
from theano.tensor.io import *
def shared(*args, **kw):
"""
......@@ -49,11 +49,11 @@ def shared(*args, **kw):
return _shared(*args, **kw)
import nnet # used for softmax, sigmoid, etc.
import theano.tensor.nnet # used for softmax, sigmoid, etc.
from theano.gradient import Rop, Lop, grad, numeric_grad, verify_grad, \
jacobian, hessian
from theano.tensor.sort import sort, argsort
from extra_ops import (DiffOp, bincount, squeeze,
from theano.tensor.extra_ops import (DiffOp, bincount, squeeze,
repeat, bartlett, fill_diagonal)
......@@ -14,7 +14,7 @@ from theano.configparser import config
from theano import gof
from theano.gof import Apply, Constant, Op, Type, Variable
import elemwise
from theano.tensor import elemwise
from theano import scalar as scal
from theano.gof.python25 import partial, any, all, maxsize
from theano import compile, printing
......@@ -29,7 +29,7 @@ from theano.gradient import grad_not_implemented
from theano.gradient import DisconnectedType
### set up the external interface
from elemwise import Elemwise, DimShuffle, CAReduce, Sum
from theano.tensor.elemwise import Elemwise, DimShuffle, CAReduce, Sum
import logging
_logger = logging.getLogger("theano.tensor.basic")
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论