From 8fea07bbc2af490a002c65aee3a52c67fd3f269a Mon Sep 17 00:00:00 2001 From: Imran Hasan Date: Tue, 16 Jun 2020 16:19:05 -0700 Subject: [PATCH 1/4] adding TXGammaTRandoms stage --- examples/laptop.yml | 2 + txpipe/twopoint.py | 162 +++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 162 insertions(+), 2 deletions(-) diff --git a/examples/laptop.yml b/examples/laptop.yml index ab4bcf863..88ef73819 100644 --- a/examples/laptop.yml +++ b/examples/laptop.yml @@ -18,6 +18,8 @@ stages: threads_per_process: 2 - name: TXGammaTBrightStars threads_per_process: 2 + - name: TXGammaTRandoms + threads_per_process: 2 - name: TXGammaTDimStars threads_per_process: 2 - name: TXRoweStatistics diff --git a/txpipe/twopoint.py b/txpipe/twopoint.py index ab4832618..e5757f81d 100755 --- a/txpipe/twopoint.py +++ b/txpipe/twopoint.py @@ -1160,7 +1160,7 @@ def load_lens_catalog(self, data): f = self.open_input('star_catalog') - mags = f['stars/r_mag'][:] + mags = f['stars/mag_r'][:] bright_cut = mags>14 bright_cut &= mags<18.3 @@ -1324,7 +1324,7 @@ def load_lens_catalog(self, data): print(f"Loading lens sample from {filename}") f = self.open_input('star_catalog') - mags = f['stars/r_mag'][:] + mags = f['stars/mag_r'][:] dim_cut = mags>18.2 dim_cut &= mags<22 @@ -1429,6 +1429,164 @@ def write_output_sacc(self, data, meta, results): # Also make a plot of the data +class TXGammaTRandoms(TXTwoPoint): + """ + This subclass of the standard TXTwoPoint uses the centers + of stars as "lenses", as a systematics test. + """ + name = "TXGammaTRandoms" + inputs = [ + ('shear_catalog', MetacalCatalog), + ('shear_tomography_catalog', TomographyCatalog), + ('shear_photoz_stack', HDFFile), + ('lens_tomography_catalog', TomographyCatalog), + ('lens_photoz_stack', HDFFile), + ('random_cats', RandomsCatalog), + ('star_catalog', HDFFile), + ('patch_centers', TextFile), + ] + outputs = [ + ('gammat_randoms', SACCFile), + ('gammat_randoms_plot', PNGFile), + ] + # Add values to the config file that are not previously defined + config_options = { + 'calcs':[0,1,2], + 'min_sep':2.5, + 'max_sep':100, + 'nbins':20, + 'bin_slop':0.1, + 'sep_units':'arcmin', + 'flip_g2':True, + 'cores_per_task':20, + 'verbose':1, + 'reduce_randoms_size':1.0, + 'var_methods': 'jackknife', + 'npatch': 5 + } + + def run(self): + # Before running the parent class we add source_bins and lens_bins + # options that it is expecting, both set to -1 to indicate that we + # will choose them automatically (below). + import matplotlib + matplotlib.use('agg') + self.config['source_bins'] = [-1] + self.config['lens_bins'] = [-1] + super().run() + + def read_nbin(self, data): + # We use only a single source and lens bin in this case - + # the source is the complete 2D field and the lens is the + # field centers + data['source_list'] = [0] + data['lens_list'] = [0] + + def load_lens_catalog(self, data): + # We load the randoms to use as lenses + f = self.open_input('random_cats') + group = f['randoms'] + data['lens_ra'] = group['ra'][:] + data['lens_dec'] = group['dec'][:] + f.close() + + npoint = data['lens_ra'].size + data['lens_bin'] = np.zeros(npoint) + + def load_tomography(self, data): + # We run the parent class tomography selection but then + # overrided it to squash all of the bins 0 .. nbin -1 + # down to the zero bin. This means that any selected + # objects (from any tomographic bin) are now in the same + # bin, and unselected objects still have bin -1 + super().load_tomography(data) + data['source_bin'][:] = data['source_bin'].clip(-1,0) + + def select_calculations(self, data): + # We only want a single calculation, the gamma_T around + # the field centers + return [(0,0,SHEAR_POS)] + + def write_output(self, data, meta, results): + # we write output both to file for later and to + # a plot + self.write_output_sacc(data, meta, results) + self.write_output_plot(results) + + def write_output_plot(self, results): + import matplotlib.pyplot as plt + d = results[0] + dvalue = d.object.xi + derror = np.sqrt(d.object.varxi) + dtheta = np.exp(d.object.meanlogr) + + fig = self.open_output('gammat_randoms_plot', wrapper=True) + + # compute the mean and the chi^2/dof + flat1 = 0 + z = (dvalue - flat1) / derror + chi2 = np.sum(z ** 2) + chi2dof = chi2 / (len(dtheta) - 1) + print('error,',derror) + + plt.errorbar(dtheta, dtheta*dvalue, dtheta*derror, fmt='ro', capsize=3,label='$\chi^2/dof = $'+str(chi2dof)) + plt.legend(loc='best') + plt.xscale('log') + + plt.xlabel(r"$\theta$ / arcmin") + plt.ylabel(r"$\theta \cdot \gamma_t(\theta)$") + plt.title("Randoms Tangential Shear") + + print('type',type(fig)) + fig.close() + + def write_output_sacc(self, data, meta, results): + # We write out the results slightly differently here + # beause they go to a different file and have different + # tracers and tags. + import sacc + dt = "galaxyRandoms_shearDensity_xi_t" + + S = sacc.Sacc() + + f = self.open_input('shear_photoz_stack') + z = f['n_of_z/source2d/z'][:] + Nz = f[f'n_of_z/source2d/bin_0'][:] + f.close() + + # Add the data points that we have one by one, recording which + # tracer they each require + S.add_tracer('misc', 'randoms') + S.add_tracer('NZ', 'source2d', z, Nz) + + d = results[0] + assert len(results)==1 + dvalue = d.object.xi + derror = np.sqrt(d.object.varxi) + dtheta = np.exp(d.object.meanlogr) + dnpair = d.object.npairs + dweight = d.object.weight + + + # Each of our Measurement objects contains various theta values, + # and we loop through and add them all + n = len(dvalue) + for i in range(n): + S.add_data_point(dt, ('source2d', 'randoms'), dvalue[i], + theta=dtheta[i], error=derror[i], npair=dnpair[i], weight=dweight[i]) + + self.write_metadata(S, meta) + + print(S) + # Our data points may currently be in any order depending on which processes + # ran which calculations. Re-order them. + S.to_canonical_order() + + # Finally, save the output to Sacc file + S.save_fits(self.get_output('gammat_randoms'), overwrite=True) + + # Also make a plot of the data + class TXJackknifeCenters(PipelineStage): """ This is the pipeline stage that is run to generate the patch centers for From 44fed969ca9070756ac9c504c58e97dc0b12bb6a Mon Sep 17 00:00:00 2001 From: Joe Zuntz Date: Thu, 25 Jun 2020 11:20:53 +0100 Subject: [PATCH 2/4] update for new shear cats --- txpipe/twopoint.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/txpipe/twopoint.py b/txpipe/twopoint.py index 3533b128c..3b332025e 100755 --- a/txpipe/twopoint.py +++ b/txpipe/twopoint.py @@ -1535,7 +1535,7 @@ class TXGammaTRandoms(TXTwoPoint): """ name = "TXGammaTRandoms" inputs = [ - ('shear_catalog', MetacalCatalog), + ('shear_catalog', ShearCatalog), ('shear_tomography_catalog', TomographyCatalog), ('shear_photoz_stack', HDFFile), ('lens_tomography_catalog', TomographyCatalog), @@ -1561,7 +1561,8 @@ class TXGammaTRandoms(TXTwoPoint): 'verbose':1, 'reduce_randoms_size':1.0, 'var_methods': 'jackknife', - 'npatch': 5 + 'npatch': 5, + 'shear_catalog_type': 'metacal', } def run(self): From f7f545f55c23cc0d866b963a04fc8f0d3081ce1b Mon Sep 17 00:00:00 2001 From: Joe Zuntz Date: Thu, 25 Jun 2020 11:21:14 +0100 Subject: [PATCH 3/4] stop subtracting randoms from randoms --- txpipe/twopoint.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/txpipe/twopoint.py b/txpipe/twopoint.py index 3b332025e..a6b8b9f76 100755 --- a/txpipe/twopoint.py +++ b/txpipe/twopoint.py @@ -1582,6 +1582,13 @@ def read_nbin(self, data): data['source_list'] = [0] data['lens_list'] = [0] + def load_random_catalog(self, data): + # override the parent method + # so that we don't load the randoms here, + # because if we subtract randoms from randoms + # we get nothing. + pass + def load_lens_catalog(self, data): # We load the randoms to use as lenses f = self.open_input('random_cats') From 20b154285706a9510d0e5b871c7999110165d724 Mon Sep 17 00:00:00 2001 From: Joe Zuntz Date: Thu, 25 Jun 2020 11:21:29 +0100 Subject: [PATCH 4/4] fix unrelated issue --- txpipe/random_cats.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/txpipe/random_cats.py b/txpipe/random_cats.py index 0ccc80ca6..57b65f8fc 100755 --- a/txpipe/random_cats.py +++ b/txpipe/random_cats.py @@ -137,6 +137,8 @@ def run(self): ### Interpolate those random values to a redshift value given by the cdf # z_photo_rand = np.interp(cdf_rand_val,z_cdf_norm,z_photo_arr) z_interp_func = scipy.interpolate.interp1d(z_cdf_norm,z_photo_arr) + # Sometimes we don't quite go down to z - deal with that + cdf_rand_val = cdf_rand_val.clip(z_cdf_norm.min(), z_cdf_norm.max()) z_photo_rand = z_interp_func(cdf_rand_val) # Save output