logging.getLogger('theano_cuda_ndarray').warning("WARNING: Won't use the GPU as the initialisation of device %i failed. %s"%(device,e))
raise
elifuse.device_number!=device:
logging.getLogger('theano_cuda_ndarray').warning("WARNING: ignoring call to use(%s), GPU number %i is already in use."%(str(device),use.device_number))
use.device_number=None
defhandle_shared_float32(tf):
defhandle_shared_float32(tf):
"""Set the CudaNdarrayType as the default handler for shared float32 arrays
"""Set the CudaNdarrayType as the default handler for shared float32 arrays.
Use use(tf) instead as this is a bad name.
This function is intended to be called from use(gpu_index), not directly.