提交 88782a76 authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Skip check in VM_Linker

We already encountered then in ProfileStats.__init__
上级 9093449e
...@@ -757,21 +757,6 @@ class VM_Linker(link.LocalLinker): ...@@ -757,21 +757,6 @@ class VM_Linker(link.LocalLinker):
associated to self, else, a new VM_Linker associated to fgraph. associated to self, else, a new VM_Linker associated to fgraph.
""" """
if ((config.profile or config.print_global_stats) and
((hasattr(theano, 'sandbox') and
hasattr(theano.sandbox, 'cuda') and
theano.sandbox.cuda.cuda_enabled) or
(hasattr(theano, 'gpuarray') and
theano.gpuarray.pygpu_activated))):
if os.environ.get('CUDA_LAUNCH_BLOCKING', '0') != '1':
raise Exception(
"You are running the Theano profiler with CUDA enabled."
" Theano GPU ops execution is asynchronous by default."
" So by default, the profile is useless."
" You must set the environment variable"
" CUDA_LAUNCH_BLOCKING to 1 to tell the CUDA driver to"
" synchronize the execution to get a meaningful profile.")
if no_recycling is None: if no_recycling is None:
no_recycling = [] no_recycling = []
if self.fgraph is not None and self.fgraph is not fgraph: if self.fgraph is not None and self.fgraph is not fgraph:
......
...@@ -91,7 +91,7 @@ def test_pydotprint_profile(): ...@@ -91,7 +91,7 @@ def test_pydotprint_profile():
raise SkipTest('pydot not available') raise SkipTest('pydot not available')
A = tensor.matrix() A = tensor.matrix()
prof = theano.compile.ProfileStats(atexit_print=False) prof = theano.compile.ProfileStats(atexit_print=False, gpu_checks=False)
f = theano.function([A], A + 1, profile=prof) f = theano.function([A], A + 1, profile=prof)
theano.printing.pydotprint(f, print_output_file=False) theano.printing.pydotprint(f, print_output_file=False)
f([[1]]) f([[1]])
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论