Skip to content

Commit

Permalink
Use the modern numpy RNG interface
Browse files Browse the repository at this point in the history
Rather than using np.random.seed and using APIs
like np.random.rand, instead create a Generator
and use methods on that.

This changes RNG state and so some test code has
to have the seed adjusted.
  • Loading branch information
timj committed Feb 5, 2025
1 parent 9cea5d0 commit f40c6f6
Show file tree
Hide file tree
Showing 18 changed files with 140 additions and 135 deletions.
5 changes: 3 additions & 2 deletions python/lsst/meas/algorithms/testUtils.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,10 @@ def plantSources(bbox, kwid, sky, coordList, addPoissonNoise=True):

# add Poisson noise
if (addPoissonNoise):
np.random.seed(seed=1) # make results reproducible
# Make results predictable over different numpy versions.
rng = np.random.Generator(np.random.MT19937(5))
imgArr = img.getArray()
imgArr[:] = np.random.poisson(imgArr)
imgArr[:] = rng.poisson(imgArr)

# bundle into a maskedimage and an exposure
mask = afwImage.Mask(bbox)
Expand Down
48 changes: 24 additions & 24 deletions tests/convertReferenceCatalogTestBase.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,22 +129,22 @@ def makeSkyCatalog(cls, outPath, size=1000, idStart=1, seed=123):
refCatData : `np.ndarray`
The data contained in the on-sky catalog files.
"""
np.random.seed(seed)
rng = np.random.Generator(np.random.MT19937(seed))
ident = np.arange(idStart, size + idStart, dtype=int)
ra = np.random.random(size)*360.
dec = np.degrees(np.arccos(2.*np.random.random(size) - 1.))
ra = rng.random(size)*360.
dec = np.degrees(np.arccos(2.*rng.random(size) - 1.))
dec -= 90.
ra_err = np.ones(size)*0.1 # arcsec
dec_err = np.ones(size)*0.1 # arcsec
a_mag = 16. + np.random.random(size)*4.
a_mag_err = 0.01 + np.random.random(size)*0.2
b_mag = 17. + np.random.random(size)*5.
b_mag_err = 0.02 + np.random.random(size)*0.3
is_photometric = np.random.randint(2, size=size)
is_resolved = np.random.randint(2, size=size)
is_variable = np.random.randint(2, size=size)
extra_col1 = np.random.normal(size=size)
extra_col2 = np.random.normal(1000., 100., size=size)
a_mag = 16. + rng.random(size)*4.
a_mag_err = 0.01 + rng.random(size)*0.2
b_mag = 17. + rng.random(size)*5.
b_mag_err = 0.02 + rng.random(size)*0.3
is_photometric = rng.integers(2, size=size)
is_resolved = rng.integers(2, size=size)
is_variable = rng.integers(2, size=size)
extra_col1 = rng.normal(size=size)
extra_col2 = rng.normal(1000., 100., size=size)
# compute proper motion and PM error in arcseconds/year
# and let the convert task scale them to radians
pm_amt_arcsec = cls.properMotionAmt.asArcseconds()
Expand All @@ -155,21 +155,21 @@ def makeSkyCatalog(cls, outPath, size=1000, idStart=1, seed=123):
pm_dec_err = np.ones(size)*cls.properMotionErr.asArcseconds()*abs(math.sin(pm_dir_rad))
parallax = np.ones(size)*0.1 # arcseconds
parallax_error = np.ones(size)*0.003 # arcseconds
ra_dec_corr = 2 * np.random.random(size) - 1
ra_parallax_corr = 2 * np.random.random(size) - 1
ra_pmra_corr = 2 * np.random.random(size) - 1
ra_pmdec_corr = 2 * np.random.random(size) - 1
dec_parallax_corr = 2 * np.random.random(size) - 1
dec_pmra_corr = 2 * np.random.random(size) - 1
dec_pmdec_corr = 2 * np.random.random(size) - 1
parallax_pmra_corr = 2 * np.random.random(size) - 1
parallax_pmdec_corr = 2 * np.random.random(size) - 1
pmra_pmdec_corr = 2 * np.random.random(size) - 1
ra_dec_corr = 2 * rng.random(size) - 1
ra_parallax_corr = 2 * rng.random(size) - 1
ra_pmra_corr = 2 * rng.random(size) - 1
ra_pmdec_corr = 2 * rng.random(size) - 1
dec_parallax_corr = 2 * rng.random(size) - 1
dec_pmra_corr = 2 * rng.random(size) - 1
dec_pmdec_corr = 2 * rng.random(size) - 1
parallax_pmra_corr = 2 * rng.random(size) - 1
parallax_pmdec_corr = 2 * rng.random(size) - 1
pmra_pmdec_corr = 2 * rng.random(size) - 1
unixtime = np.ones(size)*cls.epoch.unix

def get_word(word_len):
return "".join(np.random.choice([s for s in string.ascii_letters], word_len))
extra_col3 = np.array([get_word(num) for num in np.random.randint(11, size=size)])
return "".join(rng.choice([s for s in string.ascii_letters], word_len))
extra_col3 = np.array([get_word(num) for num in rng.integers(11, size=size)])

dtype = np.dtype([('id', float), ('ra', float), ('dec', float),
('ra_error', float), ('dec_error', float), ('a', float),
Expand Down
4 changes: 2 additions & 2 deletions tests/nopytest_convertReferenceCatalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ class TestConvertRefcatManager(convertReferenceCatalogTestBase.ConvertReferenceC
Uses mocks to force particular behavior regarding e.g. catalogs.
"""
def setUp(self):
np.random.seed(10)
self.rng = np.random.Generator(np.random.MT19937(5))

self.tempDir = tempfile.TemporaryDirectory()
tempPath = self.tempDir.name
Expand Down Expand Up @@ -176,7 +176,7 @@ def _createFakeCatalog(self, nOld=5, nNew=0, idStart=42):
catalog = lsst.afw.table.SimpleCatalog(self.schema)
catalog.resize(nOld)
for x in self.schema:
catalog[x.key] = np.random.random(nOld)
catalog[x.key] = self.rng.random(nOld)
# do the ids separately, so there are no duplicates
catalog['id'] = np.arange(idStart, idStart + nOld)
catalog.resize(nOld + nNew) # make space for the elements we will add
Expand Down
20 changes: 10 additions & 10 deletions tests/test_accumulator_mean_stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
class AccumulatorMeanStackTestCase(lsst.utils.tests.TestCase):
def make_test_images_to_coadd(self):
"""Make test images to coadd."""
np.random.seed(12345)
rng = np.random.Generator(np.random.MT19937(5))

# Choose an arbitrary number of images and image size.
# Below we set bad pixels for either 1 or half these images,
Expand All @@ -25,18 +25,18 @@ def make_test_images_to_coadd(self):
# Create some bad pixels at random positions, and some others at
# fixed positions (but not in all images).
exposures = []
variances = np.random.uniform(size=n_image, low=5.0, high=10.0)
variances = rng.uniform(size=n_image, low=5.0, high=10.0)

for i in range(n_image):
exposure = afwImage.ExposureF(width=image_size[0], height=image_size[1])
exposure.variance.array[:, :] = variances[i]
exposure.image.array[:, :] = np.random.normal(loc=0.0,
scale=np.sqrt(variances[i]),
size=image_size)
exposure.image.array[50, 50] = np.random.normal(loc=100.0,
scale=np.sqrt(100.0))
exposure.image.array[100, 100] = np.random.normal(loc=200.0,
scale=np.sqrt(200.0))
exposure.image.array[:, :] = rng.normal(loc=0.0,
scale=np.sqrt(variances[i]),
size=image_size)
exposure.image.array[50, 50] = rng.normal(loc=100.0,
scale=np.sqrt(100.0))
exposure.image.array[100, 100] = rng.normal(loc=200.0,
scale=np.sqrt(200.0))
if (i == 2):
# Create one outlier pixel
exposure.image.array[150, 125] = 1000.0
Expand All @@ -54,7 +54,7 @@ def make_test_images_to_coadd(self):

exposures.append(exposure)

weights = np.random.uniform(size=n_image, low=0.9, high=1.1)
weights = rng.uniform(size=n_image, low=0.9, high=1.1)

return exposures, weights

Expand Down
10 changes: 5 additions & 5 deletions tests/test_brightStarStamps.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,23 +34,23 @@ class BrightStarStampsTestCase(lsst.utils.tests.TestCase):
"""Test BrightStarStamps.
"""
def setUp(self):
np.random.seed(12)
rng = np.random.Generator(np.random.MT19937(5))
stampSize = (25, 25)
# create dummy star stamps
starImages = [afwImage.MaskedImageF(*stampSize)
for _ in range(3)]
for starIm in starImages:
starImArray = starIm.image.array
starImArray += np.random.rand(*stampSize)
starImArray += rng.random(stampSize)
ids = ["111", "aaa", "bbb"]
positions = [Point2I(x0, y0)
for x0, y0 in zip(np.random.randint(11, size=3), np.random.randint(11, size=3))]
mags = np.random.rand(3)
for x0, y0 in zip(rng.integers(11, size=3), rng.integers(11, size=3))]
mags = rng.random(3)
# faint object to test magnitude cuts
self.faintObjIdx = 1
mags[self.faintObjIdx] = 18.
ids[self.faintObjIdx] = "faint"
fluxes = np.random.rand(3)
fluxes = rng.random(3)
self.starStamps = [brightStarStamps.BrightStarStamp(stamp_im=starIm,
gaiaGMag=mag,
position=pos,
Expand Down
4 changes: 2 additions & 2 deletions tests/test_cloughTocher2DInterpolate.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,8 @@ def setUp(self):

# Create a noise image
self.noise = self.maskedimage.clone()
np.random.seed(12345)
self.noise.image.array[:, :] = np.random.normal(size=self.noise.image.array.shape)
rng = np.random.Generator(np.random.MT19937(5))
self.noise.image.array[:, :] = rng.normal(size=self.noise.image.array.shape)

@lsst.utils.tests.methodParametersProduct(n_runs=(1, 2), flipXY=(False, True))
def test_interpolation(self, n_runs: int, flipXY: bool):
Expand Down
13 changes: 7 additions & 6 deletions tests/test_coaddBoundedField.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,23 +42,23 @@ def makeRandomWcs(self, crval, maxOffset=10.0):
warped images (which implicitly include the Jacobian) against the behavior of CoaddBoundedField
(which intentionally does not).
"""
crpix = lsst.geom.Point2D(*np.random.uniform(low=-maxOffset, high=maxOffset, size=2))
crpix = lsst.geom.Point2D(*self.rng.uniform(low=-maxOffset, high=maxOffset, size=2))
scale = 0.01*lsst.geom.arcseconds
orientation = np.pi*np.random.rand()*lsst.geom.radians
orientation = np.pi*self.rng.random()*lsst.geom.radians
cdMatrix = lsst.afw.geom.makeCdMatrix(scale=scale, orientation=orientation)
return lsst.afw.geom.makeSkyWcs(crpix=crpix, crval=crval, cdMatrix=cdMatrix)

def makeRandomField(self, bbox):
"""Create a random ChebyshevBoundedField"""
coefficients = np.random.randn(3, 3)
coefficients = self.rng.standard_normal((3, 3))
# We add a positive DC offset to make sure our fields more likely to combine constructively;
# this makes the test more stringent, because we get less accidental small numbers.
coefficients[0, 0] = np.random.uniform(low=4, high=6)
coefficients[0, 0] = self.rng.uniform(low=4, high=6)
return lsst.afw.math.ChebyshevBoundedField(bbox, coefficients)

def constructElements(self, validBox):
"""Construct the elements of a CoaddBoundedField."""
np.random.seed(50)
rng = np.random.Generator(np.random.MT19937(50))
validPolygon = lsst.afw.geom.Polygon(lsst.geom.Box2D(validBox)) if validBox else None
elements = []
validBoxes = []
Expand All @@ -69,13 +69,14 @@ def constructElements(self, validBox):
self.makeRandomField(self.elementBBox),
self.makeRandomWcs(self.crval),
validPolygon,
np.random.uniform(low=0.5, high=1.5)
rng.uniform(low=0.5, high=1.5)
)
)
validBoxes.append(validBox)
return elements, validBoxes

def setUp(self):
self.rng = np.random.Generator(np.random.MT19937(50))
self.crval = lsst.geom.SpherePoint(45.0, 45.0, lsst.geom.degrees)
self.elementBBox = lsst.geom.Box2I(lsst.geom.Point2I(-50, -50), lsst.geom.Point2I(50, 50))
self.coaddWcs = self.makeRandomWcs(self.crval, maxOffset=0.0)
Expand Down
17 changes: 9 additions & 8 deletions tests/test_computeExPsf.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def generate_gaussian_random_field(
Default: ``10000``
nmax: `int`
Max number of data points generated using
`np.random.multivariate_normal`. If
`np.random.Generator.multivariate_normal`. If
npoints>nmax, a GP will be used in addition.
Default: ``1000``
std: `float`
Expand Down Expand Up @@ -126,7 +126,8 @@ def generate_gaussian_random_field(
``z``: `np.ndarray`
Scalar value of the gaussian random field
"""
np.random.seed(seed)
# Choose explicit RNG.
rng = np.random.Generator(np.random.MT19937(seed))

if input_coord is not None:
npoints = len(input_coord[:, 0])
Expand All @@ -137,24 +138,24 @@ def generate_gaussian_random_field(
ngenerated = npoints

if input_coord is None or npoints > nmax:
x1 = np.random.uniform(xmin, xmax, ngenerated)
x2 = np.random.uniform(ymin, ymax, ngenerated)
x1 = rng.uniform(xmin, xmax, ngenerated)
x2 = rng.uniform(ymin, ymax, ngenerated)
coord1 = np.array([x1, x2]).T
else:
coord1 = input_coord
kernel = rbf_kernel(coord1, coord1, std, correlation_length)
kernel += np.eye(ngenerated) * white_noise**2

z1 = np.random.multivariate_normal(np.zeros(ngenerated), kernel)
z1 = rng.multivariate_normal(np.zeros(ngenerated), kernel)

# Data augmentation. Create a gaussian random field
# with npoints>nmax is to slow. So generate nmax points
# and then interpolate it with a GP to do data augmentation.

if npoints > nmax:
if input_coord is None:
x1 = np.random.uniform(xmin, xmax, npoints)
x2 = np.random.uniform(ymin, ymax, npoints)
x1 = rng.uniform(xmin, xmax, npoints)
x2 = rng.uniform(ymin, ymax, npoints)
coord = np.array([x1, x2]).T
else:
coord = input_coord
Expand Down Expand Up @@ -190,7 +191,7 @@ def setUp(self) -> None:
std=1.0,
correlation_length=200.0,
white_noise=0.01,
seed=42,
seed=1,
input_coord=None,
)

Expand Down
3 changes: 0 additions & 3 deletions tests/test_convertReferenceCatalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,9 +281,6 @@ class ConvertGaiaManagerTests(convertReferenceCatalogTestBase.ConvertReferenceCa
"""Unittests specific to the Gaia catalog.
"""
def setUp(self):

np.random.seed(10)

self.tempDir = tempfile.TemporaryDirectory()
tempPath = self.tempDir.name
self.log = logging.getLogger("lsst.TestConvertRefcatManager")
Expand Down
6 changes: 3 additions & 3 deletions tests/test_dynamicDetection.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ def setUp(self):
starBox.grow(-int(buffer*sigma))
scale = 1.0e-5*degrees # Pixel scale

np.random.seed(12345)
rng = np.random.Generator(np.random.MT19937(12345))
stars = [(xx, yy, ff, sigma) for xx, yy, ff in
zip(np.random.uniform(starBox.getMinX(), starBox.getMaxX(), numStars),
np.random.uniform(starBox.getMinY(), starBox.getMaxY(), numStars),
zip(rng.uniform(starBox.getMinX(), starBox.getMaxX(), numStars),
rng.uniform(starBox.getMinY(), starBox.getMaxY(), numStars),
np.linspace(faint, bright, numStars))]
self.exposure = plantSources(box, 2*int(nSigmaForKernel*sigma) + 1, sky, stars, True)
self.exposure.setWcs(makeSkyWcs(crpix=Point2D(0, 0),
Expand Down
12 changes: 6 additions & 6 deletions tests/test_gp_interp.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,10 @@ def setUp(self):
self.correlation_length = 10.0
self.white_noise = 1e-5

np.random.seed(42)
rng = np.random.Generator(np.random.MT19937(5))

x1 = np.random.uniform(0, 99, npoints)
x2 = np.random.uniform(0, 120, npoints)
x1 = rng.uniform(0, 99, npoints)
x2 = rng.uniform(0, 120, npoints)
coord1 = np.array([x1, x2]).T

kernel = rbf_kernel(coord1, coord1, self.std, self.correlation_length)
Expand All @@ -82,7 +82,7 @@ def setUp(self):
# on a 100 * 100 is to slow. So generate 1e3 points
# and then interpolate it with a GP to do data augmentation.

z1 = np.random.multivariate_normal(np.zeros(npoints), kernel)
z1 = rng.multivariate_normal(np.zeros(npoints), kernel)

x1 = np.linspace(0, 99, 100)
x2 = np.linspace(0, 120, 121)
Expand Down Expand Up @@ -140,8 +140,8 @@ def setUp(self):

# Create a noise image
# self.noise = self.maskedimage.clone()
# np.random.seed(12345)
# self.noise.image.array[:, :] = np.random.normal(size=self.noise.image.array.shape)
# rng = np.random.Generator(np.random.MT19937(5))
# self.noise.image.array[:, :] = rng.normal(size=self.noise.image.array.shape)

def test_interpolation(self):
"""Test that the interpolation is done correctly.
Expand Down
4 changes: 2 additions & 2 deletions tests/test_interp.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,8 @@ def testEdge(self):
for nBadCol in range(0, 20):
mi.set((0, 0x0, 0))

np.random.seed(666)
ima[:] = np.random.uniform(-1, 1, ima.shape)
rng = np.random.Generator(np.random.MT19937(5))
ima[:] = rng.uniform(-1, 1, ima.shape)

defects = []

Expand Down
5 changes: 3 additions & 2 deletions tests/test_measureApCorr.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ def makeCatalog(self, apCorrScale=1.0, numSources=5):
sourceCat = afwTable.SourceCatalog(self.schema)

centroidKey = afwTable.Point2DKey(self.schema["slot_Centroid"])
x = np.random.rand(numSources)*self.exposure.getWidth() + self.exposure.getX0()
y = np.random.rand(numSources)*self.exposure.getHeight() + self.exposure.getY0()
x = self.rng.random(numSources)*self.exposure.getWidth() + self.exposure.getX0()
y = self.rng.random(numSources)*self.exposure.getHeight() + self.exposure.getY0()
for _i in range(numSources):
source_test_centroid = lsst.geom.Point2D(x[_i], y[_i])
source = sourceCat.addNew()
Expand All @@ -73,6 +73,7 @@ def makeCatalog(self, apCorrScale=1.0, numSources=5):
return sourceCat

def setUp(self):
self.rng = np.random.Generator(np.random.MT19937(1))
schema = afwTable.SourceTable.makeMinimalSchema()
apNameStr = "Ap"
config = measureApCorr.MeasureApCorrTask.ConfigClass()
Expand Down
Loading

0 comments on commit f40c6f6

Please sign in to comment.