提交 93879ae4 authored 作者: Arnaud Bergeron's avatar Arnaud Bergeron

Don't try to use opt when cuda is not available.

上级 1c3afdf6
...@@ -182,17 +182,18 @@ sparse_block_outer_ss = SparseBlockOuterSS(False) ...@@ -182,17 +182,18 @@ sparse_block_outer_ss = SparseBlockOuterSS(False)
sparse_block_outer_ss_inplace = SparseBlockOuterSS(True) sparse_block_outer_ss_inplace = SparseBlockOuterSS(True)
@opt.register_opt() if cuda_available:
@opt.local_optimizer([sparse_block_gemv_ss], inplace=True) @opt.register_opt()
def local_inplace_blocksparse_gemv(node): @opt.local_optimizer([sparse_block_gemv_ss], inplace=True)
if node.op == sparse_block_gemv_ss: def local_inplace_blocksparse_gemv(node):
return [sparse_block_gemv_ss_inplace(*node.inputs)] if node.op == sparse_block_gemv_ss:
return [sparse_block_gemv_ss_inplace(*node.inputs)]
@opt.register_opt()
@opt.local_optimizer([sparse_block_outer_ss], inplace=True) @opt.register_opt()
def local_inplace_blocksparse_outer(node): @opt.local_optimizer([sparse_block_outer_ss], inplace=True)
if node.op == sparse_block_outer_ss: def local_inplace_blocksparse_outer(node):
return [sparse_block_outer_ss_inplace(*node.inputs)] if node.op == sparse_block_outer_ss:
return [sparse_block_outer_ss_inplace(*node.inputs)]
def sparse_block_dot_SS(W, h, inputIdx, b, outputIdx): def sparse_block_dot_SS(W, h, inputIdx, b, outputIdx):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论