提交 4572ae48 authored 作者: Brandon T. Willard's avatar Brandon T. Willard 提交者: Brandon T. Willard

Remove aesara.tensor.random.op.Observed

上级 adb0558a
......@@ -19,7 +19,6 @@ from aesara.tensor.exceptions import NotScalarConstantError
from aesara.tensor.random.type import RandomStateType
from aesara.tensor.random.utils import normalize_size_param, params_broadcast_shapes
from aesara.tensor.type import TensorType, all_dtypes
from aesara.tensor.type_other import NoneConst
def default_shape_from_params(
......@@ -422,55 +421,3 @@ class RandomVariable(Op):
def R_op(self, inputs, eval_points):
return [None for i in eval_points]
class Observed(Op):
"""An `Op` that represents an observed random variable.
This `Op` establishes an observation relationship between a random
variable and a specific value.
"""
default_output = 0
view_map = {0: [1]}
def make_node(self, rv, val):
"""Make an `Observed` random variable.
Parameters
----------
rv: RandomVariable
The distribution from which `val` is assumed to be a sample value.
val: Variable
The observed value.
"""
val = as_tensor_variable(val)
if rv is not None:
if not hasattr(rv, "type") or rv.type.convert_variable(val) is None:
raise TypeError(
(
"`rv` and `val` do not have compatible types:"
f" rv={rv}, val={val}"
)
)
else:
rv = NoneConst.clone()
inputs = [rv, val]
return Apply(self, inputs, [val.type()])
def perform(self, node, inputs, out):
out[0][0] = inputs[1]
def grad(self, inputs, outputs):
return [
aesara.gradient.grad_undefined(
self, k, inp, "No gradient defined for random variables"
)
for k, inp in enumerate(inputs)
]
observed = Observed()
......@@ -6,10 +6,8 @@ from aesara import config
from aesara.assert_op import Assert
from aesara.gradient import NullTypeGradError, grad
from aesara.tensor.math import eq
from aesara.tensor.random.basic import normal
from aesara.tensor.random.op import RandomVariable, default_shape_from_params, observed
from aesara.tensor.type import all_dtypes, iscalar, tensor, vector
from aesara.tensor.type_other import NoneTypeT
from aesara.tensor.random.op import RandomVariable, default_shape_from_params
from aesara.tensor.type import all_dtypes, iscalar, tensor
@fixture(scope="module", autouse=True)
......@@ -149,28 +147,3 @@ def test_RandomVariable_floatX():
with config.change_flags(floatX=new_floatX):
assert test_rv_op(0, 1).dtype == new_floatX
def test_observed():
rv_var = normal(0, 1, size=3)
obs_var = observed(rv_var, np.array([0.2, 0.1, -2.4], dtype=config.floatX))
assert obs_var.owner.inputs[0] is rv_var
with raises(TypeError):
observed(rv_var, np.array([1, 2], dtype=int))
with raises(TypeError):
observed(rv_var, np.array([[1.0, 2.0]], dtype=rv_var.dtype))
obs_rv = observed(None, np.array([0.2, 0.1, -2.4], dtype=config.floatX))
assert isinstance(obs_rv.owner.inputs[0].type, NoneTypeT)
rv_val = vector()
rv_val.tag.test_value = np.array([0.2, 0.1, -2.4], dtype=config.floatX)
obs_var = observed(rv_var, rv_val)
with raises(NullTypeGradError):
grad(obs_var.sum(), [rv_val])
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论