diff --git a/src/toast/data.py b/src/toast/data.py index 4b8c0790d..9df94ce56 100644 --- a/src/toast/data.py +++ b/src/toast/data.py @@ -51,6 +51,10 @@ def __repr__(self): val += "\n>" return val + def __del__(self): + if hasattr(self, "obs"): + self.clear() + @property def comm(self): """The toast.Comm over which the data is distributed.""" diff --git a/src/toast/future_ops/sim_tod_noise.py b/src/toast/future_ops/sim_tod_noise.py index b213f1742..40e7c516b 100644 --- a/src/toast/future_ops/sim_tod_noise.py +++ b/src/toast/future_ops/sim_tod_noise.py @@ -6,6 +6,10 @@ import numpy as np +from scipy import interpolate + +from .. import rng + from ..timing import function_timer from ..traits import trait_docs, Int, Unicode @@ -21,17 +25,17 @@ @function_timer def sim_noise_timestream( - realization, - telescope, - component, - obsindx, - detindx, - rate, - firstsamp, - samples, - oversample, - freq, - psd, + realization=0, + telescope=0, + component=0, + obsindx=0, + detindx=0, + rate=1.0, + firstsamp=0, + samples=0, + oversample=2, + freq=None, + psd=None, py=False, ): """Generate a noise timestream, given a starting RNG state. @@ -124,7 +128,9 @@ def sim_noise_timestream( logfreq = np.log10(freq + freqshift) logpsd = np.log10(psd + psdshift) - interp = si.interp1d(logfreq, logpsd, kind="linear", fill_value="extrapolate") + interp = interpolate.interp1d( + logfreq, logpsd, kind="linear", fill_value="extrapolate" + ) loginterp_psd = interp(loginterp_freq) interp_psd = np.power(10.0, loginterp_psd) - psdshift @@ -292,17 +298,18 @@ def _exec(self, data, detectors=None, **kwargs): # Simulate the noise matching this key nsedata = sim_noise_timestream( - self.realization, - telescope, - self.component, - obsindx, - nse.index(key), - rate, - ob.local_index_offset + global_offset, - ob.n_local_samples, - self._oversample, - nse.freq(key), - nse.psd(key), + realization=self.realization, + telescope=telescope, + component=self.component, + obsindx=obsindx, + detindx=nse.index(key), + rate=rate, + firstsamp=ob.local_index_offset + global_offset, + samples=ob.n_local_samples, + oversample=self._oversample, + freq=nse.freq(key), + psd=nse.psd(key), + py=False, ) # Add the noise to all detectors that have nonzero weights diff --git a/src/toast/observation_data.py b/src/toast/observation_data.py index d6e08adb3..decd7355a 100644 --- a/src/toast/observation_data.py +++ b/src/toast/observation_data.py @@ -377,8 +377,9 @@ def __getitem__(self, key): return self._internal[key] def __delitem__(self, key): - self._internal[key].clear() - del self._internal[key] + if key in self._internal: + self._internal[key].clear() + del self._internal[key] def __setitem__(self, key, value): if isinstance(value, DetectorData): @@ -626,8 +627,9 @@ def __getitem__(self, key): return self._internal[key] def __delitem__(self, key): - self._internal[key].close() - del self._internal[key] + if key in self._internal: + self._internal[key].close() + del self._internal[key] def __setitem__(self, key, value): if isinstance(value, MPIShared): diff --git a/src/toast/tests/_helpers.py b/src/toast/tests/_helpers.py index 6afe2f61a..23d20c038 100644 --- a/src/toast/tests/_helpers.py +++ b/src/toast/tests/_helpers.py @@ -87,7 +87,12 @@ def create_telescope(group_size, sample_rate=10.0 * u.Hz): while 2 * npix < group_size: npix += 6 * ring ring += 1 - fp = fake_hexagon_focalplane(n_pix=npix) + fp = fake_hexagon_focalplane( + n_pix=npix, + sample_rate=sample_rate, + f_min=1.0e-5 * u.Hz, + f_knee=(sample_rate / 2000.0), + ) return Telescope("test", focalplane=fp) @@ -147,7 +152,7 @@ def create_satellite_data( sim_sat = ops.SimSatellite( name="sim_sat", - n_observation=(toastcomm.ngroups * obs_per_group), + num_observations=(toastcomm.ngroups * obs_per_group), telescope=tele, hwp_rpm=10.0, observation_time=obs_time, diff --git a/src/toast/tests/mpi.py b/src/toast/tests/mpi.py index a77bbfba4..6e9ba89c1 100644 --- a/src/toast/tests/mpi.py +++ b/src/toast/tests/mpi.py @@ -2,26 +2,27 @@ # All rights reserved. Use of this source code is governed by # a BSD-style license that can be found in the LICENSE file. +from ..mpi import MPI, use_mpi + import sys import time import warnings from unittest.signals import registerResult + from unittest import TestCase from unittest import TestResult class MPITestCase(TestCase): - """A simple wrapper around the standard TestCase which provides - one extra method to set the communicator. - """ + """A simple wrapper around the standard TestCase which stores the communicator.""" def __init__(self, *args, **kwargs): - super(MPITestCase, self).__init__(*args, **kwargs) - - def setComm(self, comm): - self.comm = comm + super().__init__(*args, **kwargs) + self.comm = None + if use_mpi: + self.comm = MPI.COMM_WORLD class MPITestResult(TestResult): @@ -29,17 +30,18 @@ class MPITestResult(TestResult): The actions needed are coordinated across all processes. - Used by MPITestRunner. """ separator1 = "=" * 70 separator2 = "-" * 70 - def __init__(self, comm, stream=None, descriptions=None, verbosity=None, **kwargs): - super(MPITestResult, self).__init__( + def __init__(self, stream=None, descriptions=None, verbosity=None, **kwargs): + super().__init__( stream=stream, descriptions=descriptions, verbosity=verbosity, **kwargs ) - self.comm = comm + self.comm = None + if use_mpi: + self.comm = MPI.COMM_WORLD self.stream = stream self.descriptions = descriptions self.buffer = False @@ -53,8 +55,7 @@ def getDescription(self, test): return str(test) def startTest(self, test): - if isinstance(test, MPITestCase): - test.setComm(self.comm) + super().startTest(test) self.stream.flush() if self.comm is not None: self.comm.barrier() @@ -65,11 +66,10 @@ def startTest(self, test): self.stream.flush() if self.comm is not None: self.comm.barrier() - super(MPITestResult, self).startTest(test) return def addSuccess(self, test): - super(MPITestResult, self).addSuccess(test) + super().addSuccess(test) if self.comm is None: self.stream.write("ok ") else: @@ -78,7 +78,7 @@ def addSuccess(self, test): return def addError(self, test, err): - super(MPITestResult, self).addError(test, err) + super().addError(test, err) if self.comm is None: self.stream.write("error ") else: @@ -87,7 +87,7 @@ def addError(self, test, err): return def addFailure(self, test, err): - super(MPITestResult, self).addFailure(test, err) + super().addFailure(test, err) if self.comm is None: self.stream.write("fail ") else: @@ -96,7 +96,7 @@ def addFailure(self, test, err): return def addSkip(self, test, reason): - super(MPITestResult, self).addSkip(test, reason) + super().addSkip(test, reason) if self.comm is None: self.stream.write("skipped({}) ".format(reason)) else: @@ -105,7 +105,7 @@ def addSkip(self, test, reason): return def addExpectedFailure(self, test, err): - super(MPITestResult, self).addExpectedFailure(test, err) + super().addExpectedFailure(test, err) if self.comm is None: self.stream.write("expected-fail ") else: @@ -114,11 +114,11 @@ def addExpectedFailure(self, test, err): return def addUnexpectedSuccess(self, test): - super(MPITestResult, self).addUnexpectedSuccess(test) + super().addUnexpectedSuccess(test) if self.comm is None: - self.stream.writeln("unexpected-success ") + self.stream.write("unexpected-success ") else: - self.stream.writeln("[{}]unexpected-success ".format(self.comm.rank)) + self.stream.write("[{}]unexpected-success ".format(self.comm.rank)) return def printErrorList(self, flavour, errors): @@ -142,7 +142,6 @@ def printErrorList(self, flavour, errors): def printErrors(self): if self.comm is None: self.stream.writeln() - self.stream.flush() self.printErrorList("ERROR", self.errors) self.printErrorList("FAIL", self.failures) self.stream.flush() @@ -150,7 +149,6 @@ def printErrors(self): self.comm.barrier() if self.comm.rank == 0: self.stream.writeln() - self.stream.flush() for p in range(self.comm.size): if p == self.comm.rank: self.printErrorList("ERROR", self.errors) @@ -203,15 +201,15 @@ class MPITestRunner(object): resultclass = MPITestResult - def __init__( - self, comm, stream=None, descriptions=True, verbosity=2, warnings=None - ): + def __init__(self, stream=None, descriptions=True, verbosity=2, warnings=None): """Construct a MPITestRunner. Subclasses should accept **kwargs to ensure compatibility as the interface changes. """ - self.comm = comm + self.comm = None + if use_mpi: + self.comm = MPI.COMM_WORLD if stream is None: stream = sys.stderr self.stream = _WritelnDecorator(stream) @@ -221,9 +219,7 @@ def __init__( def run(self, test): "Run the given test case or test suite." - result = MPITestResult( - self.comm, self.stream, self.descriptions, self.verbosity - ) + result = MPITestResult(self.stream, self.descriptions, self.verbosity) registerResult(result) with warnings.catch_warnings(): if self.warnings: diff --git a/src/toast/tests/ops_sim_tod_noise.py b/src/toast/tests/ops_sim_tod_noise.py index c74ebf4f2..6d3ce3064 100644 --- a/src/toast/tests/ops_sim_tod_noise.py +++ b/src/toast/tests/ops_sim_tod_noise.py @@ -6,130 +6,35 @@ import numpy as np +from astropy import units as u + from .mpi import MPITestCase -from ..tod import Noise, sim_noise_timestream, AnalyticNoise, OpSimNoise -from ..todmap import TODHpixSpiral +from ..vis import set_matplotlib_backend from .. import rng as rng -from ._helpers import ( - create_outdir, - create_distdata, - boresight_focalplane, - uniform_chunks, -) +from ..noise import Noise +from .. import future_ops as ops -class OpSimNoiseTest(MPITestCase): - def setUp(self): - fixture_name = os.path.splitext(os.path.basename(__file__))[0] - self.outdir = create_outdir(self.comm, fixture_name) +from ..future_ops.sim_tod_noise import sim_noise_timestream - # Create one observation per group, and each observation will have - # a fixed number of detectors and one chunk per process. - - # We create two data sets- one for testing uncorrelated noise and - # one for testing correlated noise. - - self.data = create_distdata(self.comm, obs_per_group=1) - self.data_corr = create_distdata(self.comm, obs_per_group=1) - - self.ndet = 4 - self.rate = 20.0 - - # Create detectors with a range of knee frequencies. - ( - dnames, - dquat, - depsilon, - drate, - dnet, - dfmin, - dfknee, - dalpha, - ) = boresight_focalplane( - self.ndet, - samplerate=self.rate, - net=10.0, - fmin=1.0e-5, - fknee=np.linspace(0.0, 0.1, num=self.ndet), - ) - - # Total samples per observation - self.totsamp = 200000 +from ._helpers import create_outdir, create_satellite_data - # Chunks - chunks = uniform_chunks(self.totsamp, nchunk=self.data.comm.group_size) - # Noise sim oversampling +class SimNoiseTest(MPITestCase): + def setUp(self): + fixture_name = os.path.splitext(os.path.basename(__file__))[0] + self.outdir = create_outdir(self.comm, fixture_name) self.oversample = 2 - - # MCs for testing statistics of simulated noise self.nmc = 100 - # Populate the observations (one per group) - - tod = TODHpixSpiral( - self.data.comm.comm_group, - dquat, - self.totsamp, - detranks=1, - firsttime=0.0, - rate=self.rate, - nside=512, - sampsizes=chunks, - ) - - # Construct an uncorrelated analytic noise model for the detectors - - nse = AnalyticNoise( - rate=drate, - fmin=dfmin, - detectors=dnames, - fknee=dfknee, - alpha=dalpha, - NET=dnet, - ) - - self.data.obs[0]["tod"] = tod - self.data.obs[0]["noise"] = nse - - # Construct a correlated analytic noise model for the detectors - - corr_freqs = { - "noise_{}".format(x): nse.freq(dnames[x]) for x in range(self.ndet) - } - - corr_psds = {"noise_{}".format(x): nse.psd(dnames[x]) for x in range(self.ndet)} - - corr_indices = {"noise_{}".format(x): 100 + x for x in range(self.ndet)} - - corr_mix = dict() - for x in range(self.ndet): - dmix = np.random.uniform(low=-1.0, high=1.0, size=self.ndet) - corr_mix[dnames[x]] = { - "noise_{}".format(y): dmix[y] for y in range(self.ndet) - } - - nse_corr = Noise( - detectors=dnames, - freqs=corr_freqs, - psds=corr_psds, - mixmatrix=corr_mix, - indices=corr_indices, - ) - - self.data_corr.obs[0]["tod"] = tod - self.data_corr.obs[0]["noise"] = nse_corr - - return - def test_gauss(self): # Test that the same samples from different calls are reproducible. # All processes run this identical test. - detindx = self.ndet - 1 + detindx = 99 telescope = 5 realization = 1000 component = 3 @@ -170,42 +75,50 @@ def test_gauss(self): ) return - def test_sim(self): + def test_sim_once(self): # Test the uncorrelated noise generation. - # Verify that the white noise part of the spectrum is normalized # correctly. - # We have purposely distributed the TOD data so that every process has - # a single stationary interval for all detectors. + # Create a fake satellite data set for testing + data = create_satellite_data(self.comm) + + # This is a simulation with the same focalplane for every obs... + sample_rate = data.obs[0].telescope.focalplane.sample_rate - rank = 0 - if self.comm is not None: - rank = self.comm.rank + # Create a noise model from focalplane detector properties + noise_model = ops.DefaultNoiseModel() + noise_model.apply(data) - for ob in self.data.obs: - tod = ob["tod"] - nse = ob["noise"] + # Simulate noise using this model + sim_noise = ops.SimNoise() + sim_noise.apply(data) - for det in tod.local_dets: + wrank = data.comm.world_rank + grank = data.comm.group_rank + + for ob in data.obs: + nse = ob[noise_model.noise_model] + for det in ob.local_detectors: + # Verify that the white noise level of the PSD is correctly normalized. + # Only check the high frequency part of the spectrum to avoid 1/f. fsamp = nse.rate(det) cutoff = 0.95 * (fsamp / 2.0) indx = np.where(nse.freq(det) > cutoff) - net = nse.NET(det) avg = np.mean(nse.psd(det)[indx]) netsq = net * net - # print("det {} NETsq = {}, average white noise level = {}" - # "".format(det, netsq, avg)) self.assertTrue((np.absolute(avg - netsq) / netsq) < 0.02) - if rank == 0: - # One process dumps debugging info + if wrank == 0: + set_matplotlib_backend() import matplotlib.pyplot as plt - for det in tod.local_dets: + # Just one process dumps out local noise model for debugging + + for det in ob.local_detectors: savefile = os.path.join( - self.outdir, "out_test_simnoise_rawpsd_{}.txt".format(det) + self.outdir, "out_{}_rawpsd_{}.txt".format(ob.name, det) ) np.savetxt( savefile, @@ -236,105 +149,50 @@ def test_sim(self): plt.title("Simulated PSD from toast.AnalyticNoise") savefile = os.path.join( - self.outdir, "out_test_simnoise_rawpsd_{}.png".format(det) + self.outdir, "out_{}_rawpsd_{}.pdf".format(ob.name, det) ) + plt.savefig(savefile) plt.close() - ntod = tod.local_samples[1] + # Now generate noise timestreams in python and compare to the results of + # running the operator. - # this replicates the calculation in sim_noise_timestream() + freqs = dict() + psds = dict() fftlen = 2 - while fftlen <= (self.oversample * ntod): + while fftlen <= (self.oversample * ob.n_local_samples): fftlen *= 2 - freqs = {} - psds = {} - psdnorm = {} - todvar = {} - - cfftlen = 2 - while cfftlen <= ntod: - cfftlen *= 2 - - # print("fftlen = ", fftlen) - # print("cfftlen = ", cfftlen) - - checkpsd = {} - binsamps = cfftlen // 4096 - nbins = binsamps - 1 - bstart = (self.rate / 2) / nbins - bins = np.linspace(bstart, self.rate / 2, num=(nbins - 1), endpoint=True) - # print("nbins = ",nbins) - # print(bins) - - checkfreq = np.fft.rfftfreq(cfftlen, d=1 / self.rate) - # print("checkfreq len = ",len(checkfreq)) - # print(checkfreq[:10]) - # print(checkfreq[-10:]) - checkbinmap = np.searchsorted(bins, checkfreq, side="left") - # print("checkbinmap len = ",len(checkbinmap)) - # print(checkbinmap[:10]) - # print(checkbinmap[-10:]) - bcount = np.bincount(checkbinmap) - # print("bcount len = ",len(bcount)) - # print(bcount) - - bintruth = {} - - idet = 0 - for det in tod.local_dets: - + for idet, det in enumerate(ob.local_detectors): dfreq = nse.rate(det) / float(fftlen) - (pytod, freqs[det], psds[det]) = sim_noise_timestream( - 0, - 0, - 0, - 0, - idet, - nse.rate(det), - 0, - ntod, - self.oversample, - nse.freq(det), - nse.psd(det), + realization=0, + telescope=ob.telescope.id, + component=0, + obsindx=ob.UID, + detindx=idet, + rate=nse.rate(det), + firstsamp=ob.local_index_offset, + samples=ob.n_local_samples, + oversample=self.oversample, + freq=nse.freq(det), + psd=nse.psd(det), py=True, ) - libtod = sim_noise_timestream( - 0, - 0, - 0, - 0, - idet, - nse.rate(det), - 0, - ntod, - self.oversample, - nse.freq(det), - nse.psd(det), - py=False, + np.testing.assert_array_almost_equal( + pytod, ob.detdata[sim_noise.out][det], decimal=2 ) - np.testing.assert_array_almost_equal(pytod, libtod, decimal=2) - - # Factor of 2 comes from the negative frequency values. - psdnorm[det] = 2.0 * np.sum(psds[det] * dfreq) - # print("psd[{}] integral = {}".format(det, psdnorm[det])) - - todvar[det] = np.zeros(self.nmc, dtype=np.float64) - checkpsd[det] = np.zeros((nbins - 1, self.nmc), dtype=np.float64) - - idet += 1 - - if rank == 0: + if wrank == 0: + # One process dumps out interpolated PSD for debugging import matplotlib.pyplot as plt - for det in tod.local_dets: + for det in ob.local_detectors: savefile = os.path.join( - self.outdir, "out_test_simnoise_psd_{}.txt".format(det) + self.outdir, "out_{}_interppsd_{}.txt".format(ob.name, det) ) np.savetxt( savefile, np.transpose([freqs[det], psds[det]]), delimiter=" " @@ -362,69 +220,129 @@ def test_sim(self): ax.legend(loc=1) plt.title( "Interpolated PSD with High-pass from {:0.1f} " - "second Simulation Interval".format((float(ntod) / self.rate)) + "second Simulation Interval".format( + (float(ob.n_local_samples) / sample_rate) + ) ) savefile = os.path.join( - self.outdir, "out_test_simnoise_psd_{}.png".format(det) + self.outdir, "out_{}_interppsd_{}.pdf".format(ob.name, det) ) plt.savefig(savefile) plt.close() - tmap = np.searchsorted(bins, freqs[det], side="left") - tcount = np.bincount(tmap) - tpsd = np.bincount(tmap, weights=psds[det]) - good = tcount > 0 - tpsd[good] /= tcount[good] - bintruth[det] = tpsd - - hpy = None - if rank == 0: - if "TOAST_TEST_BIGTOD" in os.environ.keys(): - try: - import h5py as hpy - except ImportError: - # just write the first realization as usual - hpy = None - - # if we have the h5py module and a special environment variable is set, - # then process zero will dump out its full timestream data for more - # extensive sharing / tests. Just dump a few detectors to keep the - # file size reasonable. - - hfile = None - dset = {} - if hpy is not None: - hfile = hpy.File( - os.path.join(self.outdir, "out_test_simnoise_tod.hdf5"), "w" - ) - for det in tod.detectors: - dset[det] = hfile.create_dataset( - det, (self.nmc, ntod), dtype="float64" - ) + if ob.comm is not None: + ob.comm.barrier() + + # For some reason not deleting here (and relying on garbage collection) causes + # a hang in the case of multiple groups. Removed after this is understood. + del data + + def test_sim_mc(self): + # Create a fake satellite data set for testing. We explicitly generate + # only one observation per group. + data = create_satellite_data( + self.comm, + obs_per_group=1, + sample_rate=100.0 * u.Hz, + obs_time=10.0 * u.minute, + ) - # Run both the numpy FFT case and the toast FFT case. + # This is a simulation with the same focalplane for every obs... + sample_rate = data.obs[0].telescope.focalplane.sample_rate + + # Create a noise model from focalplane detector properties + noise_model = ops.DefaultNoiseModel() + noise_model.apply(data) + + wrank = data.comm.world_rank + + # First we make one pass through the data and examine the noise model. + # We interpolate the PSD using pure python code and compute some normalization + # factors and also bin the true PSD to the final binning we will use for the + # PSDs made from the timestreams. + + todvar = dict() + ntod_var = None + psd_norm = dict() + checkpsd = dict() + freqs = dict() + psds = dict() + + cfftlen = 2 + while cfftlen <= data.obs[0].n_local_samples: + cfftlen *= 2 + binsamps = cfftlen // 2048 + nbins = binsamps - 1 + bstart = (sample_rate / 2) / nbins + bins = np.linspace(bstart, sample_rate / 2, num=(nbins - 1), endpoint=True) + checkfreq = np.fft.rfftfreq(cfftlen, d=(1 / sample_rate)) + checkbinmap = np.searchsorted(bins, checkfreq, side="left") + bcount = np.bincount(checkbinmap) + bintruth = dict() + tpsd = None + good = None + + for ob in data.obs[:1]: + ntod_var = ob.n_local_samples + nse = ob[noise_model.noise_model] + fftlen = 2 + while fftlen <= (self.oversample * ob.n_local_samples): + fftlen *= 2 + for idet, det in enumerate(ob.local_detectors): + dfreq = nse.rate(det) / float(fftlen) + (pytod, freqs[det], psds[det]) = sim_noise_timestream( + realization=0, + telescope=ob.telescope.id, + component=0, + obsindx=ob.UID, + detindx=idet, + rate=nse.rate(det), + firstsamp=ob.local_index_offset, + samples=ob.n_local_samples, + oversample=self.oversample, + freq=nse.freq(det), + psd=nse.psd(det), + py=True, + ) + # Factor of 2 comes from the negative frequency values. + psd_norm[det] = 2.0 * np.sum(psds[det] * dfreq) - for realization in range(self.nmc): + # Allocate buffers for MC loop + todvar[det] = np.zeros(self.nmc, dtype=np.float64) + checkpsd[det] = np.zeros((nbins - 1, self.nmc), dtype=np.float64) - # generate timestreams + # Bin the true high-resolution PSD. + tmap = np.searchsorted(bins, freqs[det], side="left") + tcount = np.bincount(tmap) + tpsd = np.bincount(tmap, weights=psds[det]) + good = tcount > 0 + tpsd[good] /= tcount[good] + bintruth[det] = tpsd - opnoise = OpSimNoise(realization=realization) - opnoise.exec(self.data) + # Perform noise realizations and accumulation statistics. - if realization == 0: - # write timestreams to disk for debugging + for realization in range(self.nmc): + # Clear any previously generated data + for ob in data.obs: + del ob.detdata["noise"] - if rank == 0: - import matplotlib.pyplot as plt + # Simulate noise using the model, with a different realization each time + sim_noise = ops.SimNoise(realization=realization) + sim_noise.apply(data) - for det in tod.local_dets: + if realization == 0: + # write timestreams to disk for debugging + if wrank == 0: + import matplotlib.pyplot as plt - check = tod.cache.reference("noise_{}".format(det)) + for ob in data.obs: + for det in ob.local_detectors: + check = ob.detdata["noise"][det] savefile = os.path.join( self.outdir, - "out_test_simnoise_tod_mc0_{}.txt" "".format(det), + "out_{}_tod-mc0_{}.txt" "".format(ob.name, det), ) np.savetxt(savefile, np.transpose([check]), delimiter=" ") @@ -438,165 +356,187 @@ def test_sim(self): ) ax.legend(loc=1) plt.title( - "First Realization of Simulated TOD " - "from toast.sim_noise_timestream()" + "Observation {}, First Realization of {}".format( + ob.name, det + ) ) savefile = os.path.join( self.outdir, - "out_test_simnoise_tod_mc0_{}.png" "".format(det), + "out_{}_tod-mc0_{}.pdf" "".format(ob.name, det), ) plt.savefig(savefile) plt.close() - for det in tod.local_dets: + for ob in data.obs[:1]: + for det in ob.local_detectors: # compute the TOD variance - ref = tod.cache.reference("noise_{}".format(det)) - dclevel = np.mean(ref) - variance = np.vdot(ref - dclevel, ref - dclevel) / ntod + tod = ob.detdata[sim_noise.out][det] + dclevel = np.mean(tod) + variance = np.vdot(tod - dclevel, tod - dclevel) / len(tod) todvar[det][realization] = variance - if hfile is not None: - if det in dset: - dset[det][realization, :] = ref[:] - # compute the PSD buffer = np.zeros(cfftlen, dtype=np.float64) - offset = (cfftlen - len(ref)) // 2 - buffer[offset : offset + len(ref)] = ref + offset = (cfftlen - len(tod)) // 2 + buffer[offset : offset + len(tod)] = tod rawpsd = np.fft.rfft(buffer) - norm = 1.0 / (self.rate * ntod) + norm = 1.0 / (sample_rate * ob.n_local_samples) rawpsd = norm * np.abs(rawpsd ** 2) bpsd = np.bincount(checkbinmap, weights=rawpsd) good = bcount > 0 bpsd[good] /= bcount[good] checkpsd[det][:, realization] = bpsd[:] - tod.cache.clear() - - if hfile is not None: - hfile.close() - - if rank == 0: - np.savetxt( - os.path.join(self.outdir, "out_test_simnoise_tod_var.txt"), - np.transpose([todvar[x] for x in tod.local_dets]), - delimiter=" ", + lds = sorted(todvar.keys()) + + if wrank == 0: + import matplotlib.pyplot as plt + + np.savetxt( + os.path.join(self.outdir, "out_tod_variance.txt"), + np.transpose([todvar[x] for x in lds]), + delimiter=" ", + ) + + for det in lds: + sig = np.mean(todvar[det]) * np.sqrt(2.0 / (ntod_var - 1)) + histrange = 5.0 * sig + histmin = psd_norm[det] - histrange + histmax = psd_norm[det] + histrange + + fig = plt.figure(figsize=(12, 8), dpi=72) + + ax = fig.add_subplot(1, 1, 1, aspect="auto") + plt.hist( + todvar[det], + 10, + range=(histmin, histmax), + facecolor="magenta", + alpha=0.75, + label="{}: PSD integral = {:0.1f} expected sigma = " + "{:0.1f}".format(det, psd_norm[det], sig), + ) + ax.legend(loc=1) + plt.title( + "Detector {} Distribution of TOD Variance for {} " + "Realizations".format(det, self.nmc) ) - if rank == 0: - import matplotlib.pyplot as plt - - for det in tod.local_dets: - savefile = os.path.join( - self.outdir, "out_test_simnoise_tod_var_{}.txt".format(det) - ) - np.savetxt(savefile, np.transpose([todvar[det]]), delimiter=" ") - - sig = np.mean(todvar[det]) * np.sqrt(2.0 / (ntod - 1)) - histrange = 5.0 * sig - histmin = psdnorm[det] - histrange - histmax = psdnorm[det] + histrange - - fig = plt.figure(figsize=(12, 8), dpi=72) - - ax = fig.add_subplot(1, 1, 1, aspect="auto") - plt.hist( - todvar[det], - 10, - range=(histmin, histmax), - facecolor="magenta", - alpha=0.75, - label="{}: PSD integral = {:0.1f} expected sigma = " - "{:0.1f}".format(det, psdnorm[det], sig), - ) - ax.legend(loc=1) - plt.title( - "Distribution of TOD Variance for {} " - "Realizations".format(self.nmc) - ) - - savefile = os.path.join( - self.outdir, "out_test_simnoise_tod_var_{}.png".format(det) - ) - plt.savefig(savefile) - plt.close() - - meanpsd = np.asarray( - [np.mean(checkpsd[det][x, :]) for x in range(nbins - 1)] - ) - - fig = plt.figure(figsize=(12, 8), dpi=72) + savefile = os.path.join( + self.outdir, "out_tod-variance_{}.pdf".format(det) + ) + plt.savefig(savefile) + plt.close() - ax = fig.add_subplot(1, 1, 1, aspect="auto") - ax.plot(bins, bintruth[det], c="k", label="Input Truth") - ax.plot(bins, meanpsd, c="b", marker="o", label="Mean Binned PSD") - ax.scatter( - np.repeat(bins, self.nmc), - checkpsd[det].flatten(), - marker="x", - color="r", - label="Binned PSD", - ) - # ax.set_xscale("log") - # ax.set_yscale("log") - ax.legend(loc=1) - plt.title( - "Detector {} Binned PSDs for {} Realizations" - "".format(det, self.nmc) - ) + meanpsd = np.asarray( + [np.mean(checkpsd[det][x, :]) for x in range(nbins - 1)] + ) - savefile = os.path.join( - self.outdir, "out_test_simnoise_binpsd_dist_{}.png".format(det) - ) - plt.savefig(savefile) - plt.close() + fig = plt.figure(figsize=(12, 8), dpi=72) + + ax = fig.add_subplot(1, 1, 1, aspect="auto") + ax.plot(bins, bintruth[det], c="k", label="Input Truth") + ax.plot(bins, meanpsd, c="b", marker="o", label="Mean Binned PSD") + ax.scatter( + np.repeat(bins, self.nmc), + checkpsd[det].flatten(), + marker="x", + color="r", + label="Binned PSD", + ) + # ax.set_xscale("log") + # ax.set_yscale("log") + ax.legend(loc=1) + plt.title( + "Detector {} Binned PSDs for {} Realizations" + "".format(det, self.nmc) + ) - # The data will likely not be gaussian distributed. - # Just check that the mean is "close enough" to the truth. - errest = np.absolute(np.mean((meanpsd - tpsd) / tpsd)) - # print("Det {} avg rel error = {}".format(det, errest), flush=True) - if nse.fknee(det) < 0.1: - self.assertTrue(errest < 0.1) - - # Verify that Parseval's theorem holds- that the variance of - # the TOD equals the integral of the PSD. We do this for an - # ensemble of realizations - # - # and compare the TOD variance to the integral of the PSD - # accounting for the error on the variance due to finite - # numbers of samples. - # - - for det in tod.local_dets: - sig = np.mean(todvar[det]) * np.sqrt(2.0 / (ntod - 1)) - over3sig = np.where( - np.absolute(todvar[det] - psdnorm[det]) > 3.0 * sig - )[0] - overfrac = float(len(over3sig)) / self.nmc - # print(det, " : ", overfrac, flush=True) - if nse.fknee(det) < 0.01: - self.assertTrue(overfrac < 0.1) - return + savefile = os.path.join( + self.outdir, "out_psd-histogram_{}.pdf".format(det) + ) + plt.savefig(savefile) + plt.close() + + # The data will likely not be gaussian distributed. + # Just check that the mean is "close enough" to the truth. + errest = np.absolute(np.mean((meanpsd - tpsd) / tpsd)) + # print("Det {} avg rel error = {}".format(det, errest), flush=True) + if nse.fknee(det) < 0.1: + self.assertTrue(errest < 0.1) + + # Verify that Parseval's theorem holds- that the variance of the TOD equals the + # integral of the PSD. We do this for an ensemble of realizations and compare + # the TOD variance to the integral of the PSD accounting for the error on the + # variance due to finite numbers of samples. + + ntod = data.obs[0].n_local_samples + for det in lds: + sig = np.mean(todvar[det]) * np.sqrt(2.0 / (ntod - 1)) + over3sig = np.where(np.absolute(todvar[det] - psd_norm[det]) > 3.0 * sig)[0] + overfrac = float(len(over3sig)) / self.nmc + # print(det, " : ", overfrac, flush=True) + if nse.fknee(det) < 0.1: + self.assertTrue(overfrac < 0.1) + + del data def test_sim_correlated(self): - # Test the correlated noise generation. - opnoise = OpSimNoise(realization=0) - opnoise.exec(self.data_corr) + # Create a fake satellite data set for testing + data = create_satellite_data(self.comm) + + # Create an uncorrelated noise model from focalplane detector properties + noise_model = ops.DefaultNoiseModel() + noise_model.apply(data) + + # Construct a correlated analytic noise model for the detectors for each + # observation. + for ob in data.obs: + nse = ob[noise_model.noise_model] + corr_freqs = { + "noise_{}".format(i): nse.freq(x) + for i, x in enumerate(ob.local_detectors) + } + corr_psds = { + "noise_{}".format(i): nse.psd(x) + for i, x in enumerate(ob.local_detectors) + } + corr_indices = { + "noise_{}".format(i): 100 + i for i, x in enumerate(ob.local_detectors) + } + corr_mix = dict() + for i, x in enumerate(ob.local_detectors): + dmix = np.random.uniform( + low=-1.0, high=1.0, size=len(ob.local_detectors) + ) + corr_mix[x] = { + "noise_{}".format(y): dmix[y] + for y in range(len(ob.local_detectors)) + } + ob["noise_model_corr"] = Noise( + detectors=ob.local_detectors, + freqs=corr_freqs, + psds=corr_psds, + mixmatrix=corr_mix, + indices=corr_indices, + ) + + # Simulate noise using this model + sim_noise = ops.SimNoise(noise_model="noise_model_corr") + sim_noise.apply(data) total = None - for ob in self.data.obs: - tod = ob["tod"] - for det in tod.local_dets: + for ob in data.obs: + for det in ob.local_detectors: # compute the TOD variance - ref = tod.cache.reference("noise_{}".format(det)) - self.assertTrue(np.std(ref) > 0) + tod = ob.detdata[sim_noise.out][det] + self.assertTrue(np.std(tod) > 0) if total is None: - total = ref.copy() - else: - total[:] += ref - del ref + total = np.zeros(ob.n_local_samples, dtype=np.float64) + total[:] += tod # np.testing.assert_almost_equal(np.std(total), 0) + del data return diff --git a/src/toast/tests/runner.py b/src/toast/tests/runner.py index f8056748a..83fed9128 100644 --- a/src/toast/tests/runner.py +++ b/src/toast/tests/runner.py @@ -6,6 +6,7 @@ import os import sys + import unittest from .mpi import MPITestRunner @@ -30,10 +31,9 @@ from . import config as test_config from . import ops_sim_satellite as test_ops_sim_satellite - from . import ops_memory_counter as test_ops_memory_counter - from . import ops_pointing_healpix as test_ops_pointing_healpix +from . import ops_sim_tod_noise as test_ops_sim_tod_noise # @@ -98,7 +98,7 @@ def test(name=None, verbosity=2): comm = MPI.COMM_WORLD rank = comm.rank - set_matplotlib_backend(backend="agg") + # set_matplotlib_backend(backend="agg") outdir = "toast_test_output" @@ -117,7 +117,7 @@ def test(name=None, verbosity=2): # Run python tests. loader = unittest.TestLoader() - mpirunner = MPITestRunner(comm, verbosity=verbosity, warnings="ignore") + mpirunner = MPITestRunner(verbosity=verbosity, warnings="ignore") suite = unittest.TestSuite() if name is None: @@ -141,13 +141,11 @@ def test(name=None, verbosity=2): suite.addTest(loader.loadTestsFromModule(test_ops_sim_satellite)) suite.addTest(loader.loadTestsFromModule(test_ops_memory_counter)) suite.addTest(loader.loadTestsFromModule(test_ops_pointing_healpix)) + suite.addTest(loader.loadTestsFromModule(test_ops_sim_tod_noise)) - # suite.addTest(loader.loadTestsFromModule(testcache)) # # suite.addTest(loader.loadTestsFromModule(testtod)) - # suite.addTest(loader.loadTestsFromModule(testtodsat)) # - # suite.addTest(loader.loadTestsFromModule(testopssimnoise)) # suite.addTest(loader.loadTestsFromModule(testopssimsss)) # suite.addTest(loader.loadTestsFromModule(testopsapplygain)) # suite.addTest(loader.loadTestsFromModule(testcov)) @@ -155,7 +153,6 @@ def test(name=None, verbosity=2): # suite.addTest(loader.loadTestsFromModule(testopsgroundfilter)) # suite.addTest(loader.loadTestsFromModule(testsimfocalplane)) # suite.addTest(loader.loadTestsFromModule(testopspolyfilter)) - # suite.addTest(loader.loadTestsFromModule(testopsmemorycounter)) # suite.addTest(loader.loadTestsFromModule(testopsgainscrambler)) # suite.addTest(loader.loadTestsFromModule(testpsdmath)) # suite.addTest(loader.loadTestsFromModule(testopsmadam))