提交 15637d23 authored 作者: Maxim Kochurov's avatar Maxim Kochurov 提交者: Maxim Kochurov

remove deprecated MRG_RandomStream

上级 cc364dd9
...@@ -15,4 +15,3 @@ ...@@ -15,4 +15,3 @@
linalg linalg
neighbours neighbours
rng_mrg
.. _libdoc_rng_mrg:
===================================================================
:mod:`sandbox.rng_mrg` -- MRG random number generator
===================================================================
.. module:: sandbox.rng_mrg
:platform: Unix, Windows
:synopsis: MRG random number generator
.. moduleauthor:: LISA
API
===
.. automodule:: pytensor.sandbox.rng_mrg
:members:
...@@ -1485,7 +1485,6 @@ class ProfileStats: ...@@ -1485,7 +1485,6 @@ class ProfileStats:
from pytensor import scalar as aes from pytensor import scalar as aes
from pytensor.tensor.elemwise import Elemwise from pytensor.tensor.elemwise import Elemwise
from pytensor.tensor.math import Dot from pytensor.tensor.math import Dot
from pytensor.tensor.random.op import RandomVariable
scalar_op_amdlibm_no_speed_up = [ scalar_op_amdlibm_no_speed_up = [
aes.LT, aes.LT,
...@@ -1628,18 +1627,7 @@ class ProfileStats: ...@@ -1628,18 +1627,7 @@ class ProfileStats:
printed_tip = True printed_tip = True
# tip 5 # tip 5
for (fgraph, a) in self.apply_time: # The tip was about MRG_RandomStream which is removed
node = a
if isinstance(node.op, RandomVariable):
printed_tip = True
print(
" - Replace the default random number generator by "
"'from pytensor.sandbox.rng_mrg import MRG_RandomStream "
"as RandomStream', as this is is faster. It is still "
"experimental, but seems to work correctly.",
file=file,
)
break
# tip 6 # tip 6
for (fgraph, a) in self.apply_time: for (fgraph, a) in self.apply_time:
......
差异被折叠。
差异被折叠。
0.7353244530968368
0.6142074400559068
0.11007806099951267
0.6487741703167558
0.36619443260133266
0.2585685825906694
0.9489980279468
0.4309556516818702
0.12257590936496854
0.9760319022461772
0.6940806899219751
0.18046841165050864
0.003993193618953228
0.5351603352464736
0.02472442388534546
0.7705746139399707
0.8138928869739175
0.9650539481081069
0.24507411010563374
0.35767574002966285
0.4939101580530405
0.9027785388752818
0.27498403564095497
0.03848231676965952
0.3081609820947051
0.9062023567967117
0.009030417073518038
0.7953705741092563
0.5061718439683318
0.5975547162815928
0.5435514179989696
0.330895590595901
0.49919482320547104
0.9409166998229921
0.8276205519214272
0.5180770065635443
0.2319392478093505
0.36197659047320485
0.11120751267299056
0.5018561617471278
0.47852187464013696
0.7188052111305296
0.3030327311716974
0.6756376498378813
0.03624899685382843
0.34987151669338346
0.031225718092173338
0.06772322440519929
0.06820952938869596
0.9987128847278655
0.08330700965598226
0.9731874465942383
0.6345655219629407
0.7169904578477144
0.5793502484448254
0.7396790678612888
0.9926023166626692
0.7522463691420853
0.6768838302232325
0.3253784184344113
0.05375300580635667
0.4912636987864971
0.6485021142289042
0.3043024237267673
0.24868384934961796
0.8166692252270877
0.5274319797754288
0.31434731651097536
0.9961257497780025
0.3549888739362359
0.8423425843939185
0.21591948671266437
0.8698299624957144
0.17033040337264538
0.22816143138334155
0.11795765580609441
0.7024209997616708
0.15607220400124788
0.5493582566268742
0.5827712984755635
0.8592293248511851
0.785309090744704
0.6115233600139618
0.019046304281800985
0.2573754615150392
0.03130705002695322
0.6572857238352299
0.2033171127550304
0.5058645992539823
0.15793190989643335
0.6273676953278482
0.7285307059064507
0.265245848800987
0.6073522809892893
0.3896624594926834
0.27189663611352444
0.705508322454989
0.12823439668864012
0.39648046158254147
0.6584051586687565
0.07818163838237524
0.33628708589822054
0.20613654889166355
0.4277639244683087
0.5401185592636466
0.07513022050261497
0.4920963351614773
0.18214095244184136
0.3235122123733163
0.29958881670609117
0.7304665613919497
0.05146520072594285
0.2471711952239275
0.8797005712985992
0.5029069227166474
0.526974250562489
0.15968210343271494
0.4696163134649396
0.17607332626357675
0.362843859475106
0.7626461815088987
0.960180682130158
0.2536660563200712
0.710880630183965
0.28728525526821613
0.78940424695611
0.5242114691063762
0.8314367309212685
0.5898511232808232
0.015212591737508774
0.4944482510909438
0.06396882887929678
0.519745257217437
0.3558214954100549
0.04566589882597327
0.8368005948141217
0.979805170558393
0.7622401369735599
0.2578657674603164
0.5378834479488432
0.9926298237405717
0.4013678622432053
0.510077933780849
0.018817965406924486
0.21481098141521215
0.5357040031813085
0.8512061606161296
0.009026535786688328
0.27302876580506563
0.21162108704447746
0.5273029855452478
0.1086404686793685
0.14079083362594247
0.14331109775230289
0.8190496540628374
0.3947252375073731
0.28109811525791883
0.4066850380040705
0.9154577874578536
0.8929708409123123
0.13500721845775843
0.6328344400972128
0.5668322211131454
0.5448646773584187
0.5418433886952698
0.1141617177054286
0.15885689994320273
0.3867143443785608
0.5574855520389974
0.9173167692497373
0.22908265376463532
0.2047420055605471
0.05979115655645728
0.44121386017650366
0.9507057839073241
0.15352962678298354
0.23290937673300505
0.46427791472524405
8.519855327904224E-4
0.7947354763746262
0.6385304923169315
0.8696001935750246
0.6022149357013404
0.02299323584884405
0.5036068987101316
0.7541037476621568
0.9995524706318974
0.5888469088822603
0.3318097642622888
0.32492663664743304
0.6643895329907537
0.3656829949468374
0.4912424306385219
0.1900841724127531
0.5945985522121191
0.5709856003522873
0.35780346347019076
0.388774358201772
0.9446004652418196
0.14594348100945354
0.6250799335539341
0.5504232128150761
0.16380576323717833
0.7428167965263128
0.5522975320927799
0.655389194842428
0.47579632699489594
0.29743909696117043
0.6319712968543172
0.8178138644434512
0.2785301594994962
0.46813122322782874
0.2898342702537775
0.3287009159103036
0.12909299414604902
0.5859099281951785
0.1891166502609849
0.14497734932228923
0.5543341124430299
0.11846801871433854
0.8499364419840276
0.6603211951442063
0.35630465345457196
0.9680569358170033
0.6639338186942041
0.24408268369734287
0.030771974939852953
0.17226932244375348
0.7909302446059883
0.4327161009423435
0.6732332338578999
0.0849734228104353
0.7278832173906267
0.5536605608649552
0.7091806619428098
0.01754110073670745
0.8406045655719936
0.4815619965083897
0.0535086034797132
0.9874794147908688
0.07097038673236966
0.023544831201434135
0.42413365049287677
0.2970325672067702
0.48028060607612133
0.1990663455799222
0.6099434774369001
0.5050413520075381
0.7814605687744915
0.2650358658283949
0.5148864723742008
0.7807142282836139
0.0976667134091258
0.1516015767119825
0.6566055505536497
0.3946392172947526
0.8052488421089947
0.2964451564475894
0.07394864456728101
0.6961450576782227
0.01576960226520896
0.3434433783404529
0.08799878368154168
0.785557022318244
0.7494717631489038
0.45548726338893175
0.7672475459985435
0.5134695749729872
0.7000438082031906
0.49818582693114877
0.4293400440365076
0.9961911663413048
0.016769078094512224
0.013044610153883696
0.8661804771982133
0.7819683295674622
0.33438047766685486
0.966121535282582
0.7259743176400661
0.9887824659235775
0.9494950002990663
0.037431647535413504
0.8268285538069904
0.7355263698846102
0.3120658891275525
0.3588241692632437
0.471130283549428
0.7047113911248744
0.980073744431138
0.6762627908028662
0.869295812677592
0.9070576094090939
0.7852784115821123
0.16342713963240385
0.06330870278179646
0.6165989111177623
0.342802997212857
0.8414176292717457
0.6921333004720509
0.2594374935142696
0.4386491202749312
0.555369642097503
0.3660965468734503
0.6484139142557979
0.9005299550481141
0.25335891311988235
0.23852926725521684
0.9044205779209733
0.8694673446007073
0.46783560374751687
0.34727911837399006
0.19556640228256583
0.8798208390362561
0.3131108647212386
0.6312824171036482
0.5722001581452787
0.9441223978064954
0.7707183314487338
0.17464511329308152
0.08897313429042697
0.5044040409848094
0.5735817537643015
0.4467783076688647
0.19051036844030023
0.4578995378687978
0.6395204453729093
0.460110604763031
0.576092894654721
0.7038368303328753
0.5555814192630351
0.4171535111963749
0.8905360852368176
0.12811446748673916
0.6814800254069269
0.8502416326664388
0.12028768053278327
0.16715052351355553
0.3563938206061721
0.049810963682830334
0.27328392397612333
0.2407418810762465
0.6631906591355801
0.674483266659081
0.10489491606131196
0.04698043642565608
0.0812066881917417
0.312124056275934
0.6798701109364629
0.7286937129683793
0.9784366562962532
0.5650205011479557
0.833059043623507
0.8976074242964387
0.9441233519464731
0.6146679543890059
0.9019614770077169
0.5529476394876838
0.7665416682139039
0.39598167687654495
0.26307358546182513
0.14862705068662763
0.9521124185994267
0.17644333699718118
0.7684473628178239
0.4274347145110369
0.6102834036573768
0.9328651092946529
0.058630190789699554
0.04729347629472613
0.9597438890486956
0.6761234584264457
0.21832499839365482
0.20707347383722663
0.7274158899672329
0.9477886455133557
0.7821800266392529
0.07305240212008357
0.40399201214313507
0.22684293938800693
0.053185423370450735
0.330069282092154
0.6862794999033213
0.7821815954521298
0.22617859859019518
0.8118352359160781
0.015444065444171429
0.6732339109294116
0.9980663135647774
0.8833195753395557
0.21191661106422544
0.32638366147875786
0.5747208022512496
0.07515769777819514
0.02952938713133335
0.4980746121145785
0.8762881984002888
0.17386484891176224
0.10696181375533342
0.5474299816414714
0.016154434997588396
0.6960771018639207
0.47133891424164176
0.9015861176885664
0.782880718819797
0.6602211343124509
0.6578835439868271
0.6049443730153143
0.17169494135305285
0.9915955001488328
0.10519243823364377
0.37815978936851025
0.20879409136250615
0.45666090911254287
0.6456936108879745
0.684759714640677
0.8762755445204675
0.8020628895610571
0.1663151141256094
0.31246642768383026
0.18852565623819828
...@@ -6,7 +6,6 @@ import numpy as np ...@@ -6,7 +6,6 @@ import numpy as np
import pytensor import pytensor
from pytensor.misc.pkl_utils import StripPickler, dump, load from pytensor.misc.pkl_utils import StripPickler, dump, load
from pytensor.sandbox.rng_mrg import MRG_RandomStream
from pytensor.tensor.type import matrix from pytensor.tensor.type import matrix
...@@ -23,17 +22,6 @@ class TestDumpLoad: ...@@ -23,17 +22,6 @@ class TestDumpLoad:
if self.tmpdir is not None: if self.tmpdir is not None:
shutil.rmtree(self.tmpdir) shutil.rmtree(self.tmpdir)
def test_dump_load_mrg(self):
rng = MRG_RandomStream()
with open("test", "wb") as f:
dump(rng, f)
with open("test", "rb") as f:
rng = load(f)
assert type(rng) == MRG_RandomStream
def test_dump_zip_names(self): def test_dump_zip_names(self):
foo_1 = pytensor.shared(0, name="foo") foo_1 = pytensor.shared(0, name="foo")
foo_2 = pytensor.shared(1, name="foo") foo_2 = pytensor.shared(1, name="foo")
......
import numpy as np
import tests.unittest_tools as utt
from pytensor import function
from pytensor.configdefaults import config
from pytensor.sandbox.multinomial import MultinomialFromUniform
from pytensor.tensor.type import dmatrix, dvector, fmatrix, fvector, iscalar
def test_n_samples_1():
p = fmatrix()
u = fvector()
n = iscalar()
m = MultinomialFromUniform("auto")(p, u, n)
f = function([p, u, n], m, allow_input_downcast=True)
rng = np.random.default_rng(12345)
for i in [1, 5, 10, 100, 1000, 10000]:
uni = rng.random(2 * i).astype(config.floatX)
res = f([[1.0, 0.0], [0.0, 1.0]], uni, i)
utt.assert_allclose(res, [[i * 1.0, 0.0], [0.0, i * 1.0]])
def test_n_samples_2():
p = fmatrix()
u = fvector()
n = iscalar()
m = MultinomialFromUniform("auto")(p, u, n)
f = function([p, u, n], m, allow_input_downcast=True)
rng = np.random.default_rng(12345)
for i in [1, 5, 10, 100, 1000]:
uni = rng.random(i).astype(config.floatX)
pvals = rng.integers(1, 1000, (1, 1000)).astype(config.floatX)
pvals /= pvals.sum(1)
res = f(pvals, uni, i)
assert res.sum() == i
for i in [1, 5, 10, 100, 1000]:
uni = rng.random(i).astype(config.floatX)
pvals = rng.integers(1, 1000000, (1, 1000000)).astype(config.floatX)
pvals /= pvals.sum(1)
res = f(pvals, uni, i)
assert res.sum() == i
def test_multinomial_0():
# This tests the MultinomialFromUniform Op directly, not going through the
# multinomial() call in GPU random generation.
p = fmatrix()
u = fvector()
m = MultinomialFromUniform("auto")(p, u)
# the m*2 allows the multinomial to reuse output
f = function([p, u], m * 2, allow_input_downcast=True)
# test that both first and second samples can be drawn
utt.assert_allclose(f([[1, 0], [0, 1]], [0.1, 0.1]), [[2, 0], [0, 2]])
# test that both second labels can be drawn
r = f([[0.2, 0.8], [0.3, 0.7]], [0.31, 0.31])
utt.assert_allclose(r, [[0, 2], [0, 2]])
# test that both first labels can be drawn
r = f([[0.2, 0.8], [0.3, 0.7]], [0.21, 0.21])
utt.assert_allclose(r, [[0, 2], [2, 0]])
# change the size to make sure output gets reallocated ok
# and also make sure that the GPU version doesn't screw up the
# transposed-ness
r = f([[0.2, 0.8]], [0.25])
utt.assert_allclose(r, [[0, 2]])
# TODO: check a bigger example (make sure blocking on GPU is handled correctly)
def test_multinomial_large():
p = fmatrix()
u = fvector()
m = MultinomialFromUniform("auto")(p, u)
f = function([p, u], m * 2, allow_input_downcast=True)
pval = np.arange(10000 * 4, dtype="float32").reshape((10000, 4)) + 0.1
pval = pval / pval.sum(axis=1)[:, None]
uval = np.ones_like(pval[:, 0]) * 0.5
mval = f(pval, uval)
assert mval.shape == pval.shape
if config.cast_policy == "custom":
assert mval.dtype == pval.dtype
elif config.cast_policy == "numpy+floatX":
assert mval.dtype == config.floatX
elif config.cast_policy == "numpy":
assert mval.dtype == "float64"
else:
raise NotImplementedError(config.cast_policy)
utt.assert_allclose(mval.sum(axis=1), 2)
asdf = np.asarray([0, 0, 2, 0]) + 0 * pval
utt.assert_allclose(mval, asdf) # broadcast over all rows
def test_multinomial_dtypes():
p = dmatrix()
u = dvector()
m = MultinomialFromUniform("auto")(p, u)
assert m.dtype == "float64", m.dtype
p = fmatrix()
u = fvector()
m = MultinomialFromUniform("auto")(p, u)
assert m.dtype == "float32", m.dtype
p = fmatrix()
u = fvector()
m = MultinomialFromUniform("float64")(p, u)
assert m.dtype == "float64", m.dtype
import numpy as np
import pytest
from pytensor import function
from pytensor.configdefaults import config
from pytensor.sandbox import multinomial
from pytensor.sandbox.rng_mrg import MRG_RandomStream as RandomStream
from pytensor.tensor.type import fmatrix, fvector, iscalar
class TestOP:
@pytest.mark.xfail(
reason="This test is designed around very specific random draws from the old NumPy API"
)
def test_select_distinct(self):
# Tests that ChoiceFromUniform always selects distinct elements
p = fmatrix()
u = fvector()
n = iscalar()
m = multinomial.ChoiceFromUniform(odtype="auto")(p, u, n)
f = function([p, u, n], m, allow_input_downcast=True)
n_elements = 1000
all_indices = range(n_elements)
rng = np.random.default_rng(12345)
expected = [
np.asarray([[931, 318, 185, 209, 559]]),
np.asarray([[477, 887, 2, 717, 333, 665, 159, 559, 348, 136]]),
np.asarray(
[
[
546,
28,
79,
665,
295,
779,
433,
531,
411,
716,
244,
234,
70,
88,
612,
639,
383,
335,
451,
100,
175,
492,
848,
771,
559,
214,
568,
596,
370,
486,
855,
925,
138,
300,
528,
507,
730,
199,
882,
357,
58,
195,
705,
900,
66,
468,
513,
410,
816,
672,
]
]
),
]
for i in [5, 10, 50, 100, 500, n_elements]:
uni = rng.random(i).astype(config.floatX)
pvals = rng.integers(1, 100, (1, n_elements)).astype(config.floatX)
pvals /= pvals.sum(1)
res = f(pvals, uni, i)
for ii in range(len(expected)):
if expected[ii].shape == res.shape:
assert (expected[ii] == res).all()
res = np.squeeze(res)
assert len(res) == i
assert np.all(np.in1d(np.unique(res), all_indices)), res
def test_fail_select_alot(self):
# Tests that ChoiceFromUniform fails when asked to sample more
# elements than the actual number of elements
p = fmatrix()
u = fvector()
n = iscalar()
m = multinomial.ChoiceFromUniform(odtype="auto")(p, u, n)
f = function([p, u, n], m, allow_input_downcast=True)
n_elements = 100
n_selected = 200
rng = np.random.default_rng(12345)
uni = rng.random(n_selected).astype(config.floatX)
pvals = rng.integers(1, 100, (1, n_elements)).astype(config.floatX)
pvals /= pvals.sum(1)
with pytest.raises(ValueError):
f(pvals, uni, n_selected)
def test_select_proportional_to_weight(self):
# Tests that ChoiceFromUniform selects elements, on average,
# proportional to the their probabilities
p = fmatrix()
u = fvector()
n = iscalar()
m = multinomial.ChoiceFromUniform(odtype="auto")(p, u, n)
f = function([p, u, n], m, allow_input_downcast=True)
n_elements = 100
n_selected = 10
mean_rtol = 0.0005
rng = np.random.default_rng(12345)
pvals = rng.integers(1, 100, (1, n_elements)).astype(config.floatX)
pvals /= pvals.sum(1)
avg_pvals = np.zeros((n_elements,), dtype=config.floatX)
for rep in range(10000):
uni = rng.random(n_selected).astype(config.floatX)
res = f(pvals, uni, n_selected)
res = np.squeeze(res)
avg_pvals[res] += 1
avg_pvals /= avg_pvals.sum()
avg_diff = np.mean(abs(avg_pvals - pvals))
assert avg_diff < mean_rtol, avg_diff
class TestFunction:
def test_select_distinct(self):
# Tests that multinomial_wo_replacement always selects distinct elements
th_rng = RandomStream(12345)
p = fmatrix()
n = iscalar()
with pytest.deprecated_call():
m = th_rng.multinomial_wo_replacement(pvals=p, n=n)
f = function([p, n], m, allow_input_downcast=True)
n_elements = 1000
all_indices = range(n_elements)
rng = np.random.default_rng(12345)
for i in [5, 10, 50, 100, 500, n_elements]:
pvals = rng.integers(1, 100, (1, n_elements)).astype(config.floatX)
pvals /= pvals.sum(1)
res = f(pvals, i)
res = np.squeeze(res)
assert len(res) == i
assert np.all(np.in1d(np.unique(res), all_indices)), res
def test_fail_select_alot(self):
# Tests that multinomial_wo_replacement fails when asked to sample more
# elements than the actual number of elements
th_rng = RandomStream(12345)
p = fmatrix()
n = iscalar()
with pytest.deprecated_call():
m = th_rng.multinomial_wo_replacement(pvals=p, n=n)
f = function([p, n], m, allow_input_downcast=True)
n_elements = 100
n_selected = 200
rng = np.random.default_rng(12345)
pvals = rng.integers(1, 100, (1, n_elements)).astype(config.floatX)
pvals /= pvals.sum(1)
with pytest.raises(ValueError):
f(pvals, n_selected)
def test_select_proportional_to_weight(self):
# Tests that multinomial_wo_replacement selects elements, on average,
# proportional to the their probabilities
th_rng = RandomStream(12345)
p = fmatrix()
n = iscalar()
m = th_rng.choice(size=n, p=p, replace=False)
f = function([p, n], m, allow_input_downcast=True)
n_elements = 100
n_selected = 10
mean_rtol = 0.0005
rng = np.random.default_rng(12345)
pvals = rng.integers(1, 100, (1, n_elements)).astype(config.floatX)
pvals /= pvals.sum(1)
avg_pvals = np.zeros((n_elements,), dtype=config.floatX)
for rep in range(10000):
res = f(pvals, n_selected)
res = np.squeeze(res)
avg_pvals[res] += 1
avg_pvals /= avg_pvals.sum()
avg_diff = np.mean(abs(avg_pvals - pvals))
assert avg_diff < mean_rtol
差异被折叠。
...@@ -30,10 +30,10 @@ from pytensor.gradient import ( ...@@ -30,10 +30,10 @@ from pytensor.gradient import (
from pytensor.graph.basic import Apply, graph_inputs from pytensor.graph.basic import Apply, graph_inputs
from pytensor.graph.null_type import NullType from pytensor.graph.null_type import NullType
from pytensor.graph.op import Op from pytensor.graph.op import Op
from pytensor.sandbox.rng_mrg import MRG_RandomStream
from pytensor.tensor.math import add, dot, exp, sigmoid, sqr from pytensor.tensor.math import add, dot, exp, sigmoid, sqr
from pytensor.tensor.math import sum as at_sum from pytensor.tensor.math import sum as at_sum
from pytensor.tensor.math import tanh from pytensor.tensor.math import tanh
from pytensor.tensor.random import RandomStream
from pytensor.tensor.type import ( from pytensor.tensor.type import (
discrete_dtypes, discrete_dtypes,
dmatrix, dmatrix,
...@@ -956,13 +956,13 @@ def test_grad_scale(): ...@@ -956,13 +956,13 @@ def test_grad_scale():
@config.change_flags(compute_test_value="off") @config.change_flags(compute_test_value="off")
def test_undefined_grad_opt(): def test_undefined_grad_opt():
# Make sure that undefined grad get removed in optimized graph. # Make sure that undefined grad get removed in optimized graph.
random = MRG_RandomStream(np.random.default_rng().integers(1, 2147462579)) random = RandomStream(np.random.default_rng().integers(1, 2147462579))
pvals = pytensor.shared(np.random.random((10, 20)).astype(config.floatX)) pvals = pytensor.shared(np.random.random((10, 20)).astype(config.floatX))
pvals = pvals / pvals.sum(axis=1) pvals = pvals / pvals.sum(axis=1)
pvals = zero_grad(pvals) pvals = zero_grad(pvals)
samples = random.multinomial(pvals=pvals, n=1) samples = random.multinomial(p=pvals, n=1)
samples = at.cast(samples, pvals.dtype) samples = at.cast(samples, pvals.dtype)
samples = zero_grad(samples) samples = zero_grad(samples)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论