提交 aeac604e authored 作者: Arnaud Bergeron's avatar Arnaud Bergeron

Add a config variable to force ops to work synchronously.

上级 d2d012e2
import logging
import theano
from theano.configparser import config
from theano.configparser import config, AddConfigVar, BoolParam
from theano.compile import optdb
_logger_name = 'theano.sandbox.gpuarray'
......@@ -18,12 +18,18 @@ try:
except ImportError:
pygpu = None
AddConfigVar('gpuarray.sync',
"""If True, every op will make sure its work is done before
returning. Setting this to True will slow down execution,
but give much more accurate results in profiling.""",
BoolParam(False),
in_c_key=True)
# This is for documentation not to depend on the availability of pygpu
from type import (GpuArrayType, GpuArrayVariable, GpuArrayConstant,
GpuArraySharedVariable, gpuarray_shared_constructor)
import opt
def init_dev(dev):
global pygpu_activated
context = pygpu.init(dev)
......
......@@ -2,7 +2,7 @@ import copy
from itertools import izip
import numpy
from theano import Op, Apply, scalar
from theano import Op, Apply, scalar, config
from theano.tensor.elemwise import Elemwise, DimShuffle
try:
......@@ -163,6 +163,8 @@ class GpuElemwise(HideC, Elemwise):
# the dict call is there to avoid a syntax error in python < 2.6
node._cache_elemwise_k(*args, **dict(broadcast=True))
if config.gpuarray.sync:
output_storage[0][0].sync()
class SupportCodeError(Exception):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论