提交 d218ee92 authored 作者: Benjamin Scellier's avatar Benjamin Scellier

file theano/gpuarray/dnn.py

上级 2b9d83ae
...@@ -4,7 +4,7 @@ import os ...@@ -4,7 +4,7 @@ import os
import sys import sys
import warnings import warnings
import numpy import numpy as np
from six import integer_types from six import integer_types
import theano import theano
...@@ -456,8 +456,8 @@ gpu_dnn_conv_desc.cache = {} ...@@ -456,8 +456,8 @@ gpu_dnn_conv_desc.cache = {}
# scalar constants # scalar constants
_zero = constant(numpy.asarray(0.0, dtype='float64')) _zero = constant(np.asarray(0.0, dtype='float64'))
_one = constant(numpy.asarray(1.0, dtype='float64')) _one = constant(np.asarray(1.0, dtype='float64'))
def ensure_dt(val, default, name, dtype): def ensure_dt(val, default, name, dtype):
...@@ -2388,8 +2388,8 @@ class RNNBlock(object): ...@@ -2388,8 +2388,8 @@ class RNNBlock(object):
bytesize = _get_param_size(self.desc, input_size, self.dtype, bytesize = _get_param_size(self.desc, input_size, self.dtype,
self.context_name) self.context_name)
bytesize = int(bytesize) bytesize = int(bytesize)
assert bytesize % numpy.dtype(self.dtype).itemsize == 0 assert bytesize % np.dtype(self.dtype).itemsize == 0
return bytesize // numpy.dtype(self.dtype).itemsize return bytesize // np.dtype(self.dtype).itemsize
def split_params(self, w, layer, input_size): def split_params(self, w, layer, input_size):
if not isinstance(w, GpuArraySharedVariable): if not isinstance(w, GpuArraySharedVariable):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论