提交 a44e2e50 authored 作者: Frederic Bastien's avatar Frederic Bastien

Add a script to help test GPU power consumption.

上级 8aaf55bf
"""This script trigger convolution operation. We think it cause more
GPU power consumption then gemm call.
"""
import numpy as np
import theano
import theano.tensor as T
from theano.gpuarray import dnn
from theano.tensor.nnet.abstract_conv import get_conv_output_shape
def burn():
# This test that we have inplace work correctly even when
# GpuAllocEmpty get merged together.
sz = 128
img_shp = [sz, sz, sz, sz]
kern_shp = [sz//2, sz, 3, 3]
out_shp = get_conv_output_shape(img_shp, kern_shp, 'valid', (1, 1))
img = T.tensor4('img')
kern = T.tensor4('kern')
out = T.tensor4('out')
desc1 = dnn.GpuDnnConvDesc(border_mode='valid', conv_mode='conv')(
kern.shape)
def rand(shp):
return np.random.rand(*shp).astype(theano.config.floatX)
img = theano.shared(rand(img_shp))
kern = theano.shared(rand(kern_shp))
out = theano.shared(rand(out_shp))
# beta 1 is needed to force the reuse of out, otherwise, it is
# replaced by a GpuAllocEmpty
o1 = dnn._dnn_conv(img, kern, conv_mode='conv', out=out, beta=1.)
mode = theano.compile.get_default_mode().including(
"local_remove_all_assert")
f = theano.function([], [o1], mode=mode)
theano.printing.debugprint(f)
print("Start computation")
for i in range(10000):
f.fn()
print("Computation stopped")
if __name__ == "__main__":
burn()
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论