提交 0b5aa217 authored 作者: Frederic's avatar Frederic 提交者: Arnaud Bergeron

Add config var NanGuardMode.{nan,inf,big}_is_error

上级 6c4738d6
......@@ -705,6 +705,24 @@ import theano and print the config variable, as in:
Generate a warning when the destroy_map or view_map tell that an op work
inplace, but the op did not reuse the input for its output.
.. attribute:: config.NanGuardMode.nan_is_error
Bool value, default: True
Do NanGuardMode generate an error when it see a nan.
.. attribute:: config.NanGuardMode.inf_is_error
Bool value, default: True
Do NanGuardMode generate an error when it see an inf.
.. attribute:: config.NanGuardMode.nan_is_error
Bool value, default: True
Do NanGuardMode generate an error when it see a big value (>1e10).
.. attribute:: numpy
This section contains different attributes for configuring numpy's
......
import logging
import collections
import logging
import numpy as np
import theano
from theano.configparser import config, AddConfigVar, BoolParam
import theano.tensor as T
import theano.sandbox.cuda as cuda
from theano.compile import Mode
AddConfigVar('NanGuardMode.nan_is_error',
"Default value for nan_is_error",
BoolParam(True),
in_c_key=False)
AddConfigVar('NanGuardMode.inf_is_error',
"Default value for inf_is_error",
BoolParam(True),
in_c_key=False)
AddConfigVar('NanGuardMode.big_is_error',
"Default value for big_is_error",
BoolParam(True),
in_c_key=False)
logger = logging.getLogger("theano.compile.nanguardmode")
......@@ -116,10 +134,19 @@ class NanGuardMode(Mode):
"""
# We currently loose the 3 first param freuquently, when calling
# mode.including() and variant.
def __init__(self, nan_is_error=True, inf_is_error=True, big_is_error=True,
def __init__(self, nan_is_error=None, inf_is_error=None, big_is_error=None,
optimizer=None, linker=None):
self.provided_optimizer = optimizer
cuda_compile_failed = False
if nan_is_error is None:
nan_is_error = config.NanGuardMode.nan_is_error
if inf_is_error is None:
inf_is_error = config.NanGuardMode.inf_is_error
if big_is_error is None:
big_is_error = config.NanGuardMode.big_is_error
assert nan_is_error or inf_is_error or big_is_error
if cuda.cuda_available:
self.guard_input = cuda.fvector('nan_guard')
if nan_is_error or inf_is_error:
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论