Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

adding TXGammaTRandoms stage #101

Merged
merged 5 commits into from
Jun 25, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions examples/laptop_pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ stages:
threads_per_process: 2
- name: TXGammaTBrightStars
threads_per_process: 2
- name: TXGammaTRandoms
threads_per_process: 2
- name: TXGammaTDimStars
threads_per_process: 2
- name: TXRoweStatistics
Expand Down
2 changes: 2 additions & 0 deletions txpipe/random_cats.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,8 @@ def run(self):
### Interpolate those random values to a redshift value given by the cdf
# z_photo_rand = np.interp(cdf_rand_val,z_cdf_norm,z_photo_arr)
z_interp_func = scipy.interpolate.interp1d(z_cdf_norm,z_photo_arr)
# Sometimes we don't quite go down to z - deal with that
cdf_rand_val = cdf_rand_val.clip(z_cdf_norm.min(), z_cdf_norm.max())
z_photo_rand = z_interp_func(cdf_rand_val)

# Save output
Expand Down
166 changes: 166 additions & 0 deletions txpipe/twopoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -1528,6 +1528,172 @@ def write_output_sacc(self, data, meta, results):

# Also make a plot of the data

class TXGammaTRandoms(TXTwoPoint):
"""
This subclass of the standard TXTwoPoint uses the centers
of stars as "lenses", as a systematics test.
"""
name = "TXGammaTRandoms"
inputs = [
('shear_catalog', ShearCatalog),
('shear_tomography_catalog', TomographyCatalog),
('shear_photoz_stack', HDFFile),
('lens_tomography_catalog', TomographyCatalog),
('lens_photoz_stack', HDFFile),
('random_cats', RandomsCatalog),
('star_catalog', HDFFile),
('patch_centers', TextFile),
]
outputs = [
('gammat_randoms', SACCFile),
('gammat_randoms_plot', PNGFile),
]
# Add values to the config file that are not previously defined
config_options = {
'calcs':[0,1,2],
'min_sep':2.5,
'max_sep':100,
'nbins':20,
'bin_slop':0.1,
'sep_units':'arcmin',
'flip_g2':True,
'cores_per_task':20,
'verbose':1,
'reduce_randoms_size':1.0,
joezuntz marked this conversation as resolved.
Show resolved Hide resolved
'var_methods': 'jackknife',
'npatch': 5,
'shear_catalog_type': 'metacal',
}

def run(self):
# Before running the parent class we add source_bins and lens_bins
# options that it is expecting, both set to -1 to indicate that we
# will choose them automatically (below).
import matplotlib
matplotlib.use('agg')
self.config['source_bins'] = [-1]
self.config['lens_bins'] = [-1]
super().run()

def read_nbin(self, data):
# We use only a single source and lens bin in this case -
# the source is the complete 2D field and the lens is the
# field centers
data['source_list'] = [0]
data['lens_list'] = [0]

def load_random_catalog(self, data):
# override the parent method
# so that we don't load the randoms here,
# because if we subtract randoms from randoms
# we get nothing.
pass

def load_lens_catalog(self, data):
# We load the randoms to use as lenses
f = self.open_input('random_cats')
group = f['randoms']
data['lens_ra'] = group['ra'][:]
data['lens_dec'] = group['dec'][:]
f.close()

npoint = data['lens_ra'].size
data['lens_bin'] = np.zeros(npoint)

def load_tomography(self, data):
# We run the parent class tomography selection but then
# overrided it to squash all of the bins 0 .. nbin -1
# down to the zero bin. This means that any selected
# objects (from any tomographic bin) are now in the same
# bin, and unselected objects still have bin -1
super().load_tomography(data)
data['source_bin'][:] = data['source_bin'].clip(-1,0)

def select_calculations(self, data):
# We only want a single calculation, the gamma_T around
# the field centers
return [(0,0,SHEAR_POS)]

def write_output(self, data, meta, results):
# we write output both to file for later and to
# a plot
self.write_output_sacc(data, meta, results)
self.write_output_plot(results)

def write_output_plot(self, results):
import matplotlib.pyplot as plt
d = results[0]
dvalue = d.object.xi
derror = np.sqrt(d.object.varxi)
dtheta = np.exp(d.object.meanlogr)

fig = self.open_output('gammat_randoms_plot', wrapper=True)

# compute the mean and the chi^2/dof
flat1 = 0
z = (dvalue - flat1) / derror
chi2 = np.sum(z ** 2)
chi2dof = chi2 / (len(dtheta) - 1)
print('error,',derror)

plt.errorbar(dtheta, dtheta*dvalue, dtheta*derror, fmt='ro', capsize=3,label='$\chi^2/dof = $'+str(chi2dof))
plt.legend(loc='best')
plt.xscale('log')

plt.xlabel(r"$\theta$ / arcmin")
plt.ylabel(r"$\theta \cdot \gamma_t(\theta)$")
plt.title("Randoms Tangential Shear")

print('type',type(fig))
fig.close()

def write_output_sacc(self, data, meta, results):
# We write out the results slightly differently here
# beause they go to a different file and have different
# tracers and tags.
import sacc
dt = "galaxyRandoms_shearDensity_xi_t"

S = sacc.Sacc()

f = self.open_input('shear_photoz_stack')
z = f['n_of_z/source2d/z'][:]
Nz = f[f'n_of_z/source2d/bin_0'][:]
f.close()

# Add the data points that we have one by one, recording which
# tracer they each require
S.add_tracer('misc', 'randoms')
S.add_tracer('NZ', 'source2d', z, Nz)

d = results[0]
assert len(results)==1
dvalue = d.object.xi
derror = np.sqrt(d.object.varxi)
dtheta = np.exp(d.object.meanlogr)
dnpair = d.object.npairs
dweight = d.object.weight


# Each of our Measurement objects contains various theta values,
# and we loop through and add them all
n = len(dvalue)
for i in range(n):
S.add_data_point(dt, ('source2d', 'randoms'), dvalue[i],
theta=dtheta[i], error=derror[i], npair=dnpair[i], weight=dweight[i])

self.write_metadata(S, meta)

print(S)
# Our data points may currently be in any order depending on which processes
# ran which calculations. Re-order them.
S.to_canonical_order()

# Finally, save the output to Sacc file
S.save_fits(self.get_output('gammat_randoms'), overwrite=True)

# Also make a plot of the data

class TXJackknifeCenters(PipelineStage):
"""
This is the pipeline stage that is run to generate the patch centers for
Expand Down