From 53e43b5abeeab760c63d5259ae82c2710a885805 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Mon, 30 Oct 2023 07:36:26 -0700 Subject: [PATCH 01/36] CADC-12805 - stop partial Function creation from keywords, so that Range by blueprint works. --- caom2utils/caom2utils/caom2blueprint.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index eba614ac..46196d9d 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -3958,7 +3958,9 @@ def augment_custom(self, chunk): delta = self.wcs.cd[custom_axis_index][custom_axis_index] else: delta = self.wcs.cdelt[custom_axis_index] - naxis.function = CoordFunction1D(custom_axis_length, delta, self._get_ref_coord(custom_axis_index)) + ref_coord = self._get_ref_coord(custom_axis_index) + if delta and ref_coord: + naxis.function = CoordFunction1D(custom_axis_length, delta, ref_coord) if not chunk.custom: chunk.custom = CustomWCS(naxis) else: @@ -3995,7 +3997,9 @@ def augment_energy(self, chunk): delta = self.wcs.cd[energy_axis_index][energy_axis_index] else: delta = self.wcs.cdelt[energy_axis_index] - naxis.function = CoordFunction1D(energy_axis_length, delta, self._get_ref_coord(energy_axis_index)) + ref_coord = self._get_ref_coord(energy_axis_index) + if delta and ref_coord: + naxis.function = CoordFunction1D(energy_axis_length, delta, ref_coord) specsys = _to_str(self.wcs.specsys) if not chunk.energy: @@ -4128,7 +4132,9 @@ def augment_polarization(self, chunk): delta = self.wcs.cd[polarization_axis_index][polarization_axis_index] else: delta = self.wcs.cdelt[polarization_axis_index] - naxis.function = CoordFunction1D(axis_length, delta, self._get_ref_coord(polarization_axis_index)) + ref_coord = self._get_ref_coord(polarization_axis_index) + if delta and ref_coord: + naxis.function = CoordFunction1D(axis_length, delta, ref_coord) if not chunk.polarization: chunk.polarization = PolarizationWCS(naxis) else: @@ -4263,7 +4269,7 @@ def _get_ref_coord(self, index): aug_crpix = _to_float(self._sanitize(self.wcs.crpix[index])) aug_crval = _to_float(self._sanitize(self.wcs.crval[index])) aug_ref_coord = None - if aug_crpix is not None and aug_crval is not None: + if aug_crpix and aug_crval: aug_ref_coord = RefCoord(aug_crpix, aug_crval) return aug_ref_coord @@ -4274,8 +4280,11 @@ def _get_spatial_axis(self, xindex, yindex): if aug_dimension is None: return None - aug_ref_coord = Coord2D(self._get_ref_coord(xindex), - self._get_ref_coord(yindex)) + x_ref_coord = self._get_ref_coord(xindex) + y_ref_coord = self._get_ref_coord(yindex) + aug_ref_coord = None + if x_ref_coord and y_ref_coord: + aug_ref_coord = Coord2D(x_ref_coord, y_ref_coord) aug_cd11, aug_cd12, aug_cd21, aug_cd22 = \ self._get_cd(xindex, yindex) From 6e3b94b7e51e48795f633f9f9433169db621949d Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Thu, 2 Nov 2023 16:27:34 -0700 Subject: [PATCH 02/36] CADC-12805 - interim commit. --- .../data/brite/HD36486/HD36486.blueprint | 93 ++++++++++++ .../tests/data/brite/HD36486/HD36486.module | 13 ++ .../data/brite/HD36486/HD36486.orig.header | 0 .../tests/data/brite/HD36486/HD36486.py | 13 ++ .../tests/data/brite/HD36486/HD36486.xml | 135 ++++++++++++++++++ .../caom2utils/tests/test_collections.py | 45 ++++-- .../caom2utils/tests/test_fits2caom2.py | 2 +- 7 files changed, 285 insertions(+), 16 deletions(-) create mode 100644 caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.blueprint create mode 100644 caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.module create mode 100644 caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.orig.header create mode 100644 caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.py create mode 100644 caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml diff --git a/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.blueprint b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.blueprint new file mode 100644 index 00000000..2a0c672d --- /dev/null +++ b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.blueprint @@ -0,0 +1,93 @@ +Observation.observationID = ['OBSID'], default = None +Observation.type = object +Observation.intent = science +Observation.metaRelease = 2023-10-31T00:45:40.102340 +Observation.metaProducer = brite2caom2/0.1.1 +Observation.algorithm.name = exposure +Observation.instrument.name = UniBRITE +Observation.instrument.keywords = None +Observation.target.name = HD37043 +Observation.target.standard = False +Observation.telescope.geoLocationX = None +Observation.telescope.geoLocationY = None +Observation.telescope.geoLocationZ = None +Observation.telescope.name = UniBRITE +Observation.proposal.id = None +Observation.environment.ambientTemp = None +Plane.productID = timeseries +Plane.metaRelease = 2023-10-31T00:45:40.102340 +Plane.dataRelease = 2023-10-31T00:45:40.102340 +Plane.dataProductType = timeseries +Plane.calibrationLevel = 2 +Plane.metaProducer = brite2caom2/0.1.1 +Plane.provenance.name = BRITE Specific Aperture Photometry - chopping mode +Plane.provenance.version = APa3s2chop +Plane.provenance.project = BRITE-Constellation Nano-Satellites for Astrophysics +Plane.provenance.producer = Adam Popowicz +Plane.provenance.runID = R4 +Plane.provenance.reference = http://brite-wiki.astro.uni.wroc.pl/bwiki/doku.php?id=start +Plane.provenance.lastExecuted = 2023-12-31T00:45:40 +Artifact.productType = _get_artifact_product_type(uri) +Artifact.releaseType = data +Artifact.metaProducer = brite2caom2/0.1.1 +Chunk = include +Chunk.metaProducer = brite2caom2/0.1.1 +Chunk.position.coordsys = ICRS +Chunk.position.equinox = None +Chunk.position.axis.axis1.ctype = RA---TAN +Chunk.position.axis.axis1.cunit = deg +Chunk.position.axis.axis2.ctype = DEC--TAN +Chunk.position.axis.axis2.cunit = deg +Chunk.position.axis.error1.syser = None +Chunk.position.axis.error1.rnder = None +Chunk.position.axis.error2.syser = None +Chunk.position.axis.error2.rnder = None +Chunk.position.axis.function.cd11 = 0.0375 +Chunk.position.axis.function.cd12 = 0.0 +Chunk.position.axis.function.cd21 = 0.0 +Chunk.position.axis.function.cd22 = 0.0375 +Chunk.position.axis.function.dimension.naxis1 = 1 +Chunk.position.axis.function.dimension.naxis2 = 1 +Chunk.position.axis.function.refCoord.coord1.pix = 1.0 +Chunk.position.axis.function.refCoord.coord1.val = 83.85825794708 +Chunk.position.axis.function.refCoord.coord2.pix = 1.0 +Chunk.position.axis.function.refCoord.coord2.val = -5.9099009825 +Chunk.energy.specsys = TOPOCENT +Chunk.energy.ssysobs = None +Chunk.energy.restfrq = None +Chunk.energy.restwav = None +Chunk.energy.velosys = None +Chunk.energy.zsource = None +Chunk.energy.ssyssrc = None +Chunk.energy.velang = None +Chunk.energy.bandpassName = Red +Chunk.energy.resolvingPower = 3.8750000000000018 +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = m +Chunk.energy.axis.error.syser = None +Chunk.energy.axis.error.rnder = None +Chunk.energy.axis.function.naxis = None +Chunk.energy.axis.function.delta = None +Chunk.energy.axis.function.refCoord.pix = None +Chunk.energy.axis.function.refCoord.val = None +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = 5.4e-07 +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = 7e-07 +Chunk.time.exposure = 1.0 +Chunk.time.resolution = None +Chunk.time.timesys = UTC +Chunk.time.trefpos = None +Chunk.time.mjdref = None +Chunk.time.axis.axis.ctype = TIME +Chunk.time.axis.axis.cunit = d +Chunk.time.axis.error.syser = None +Chunk.time.axis.error.rnder = None +Chunk.time.axis.function.naxis = None +Chunk.time.axis.function.delta = None +Chunk.time.axis.function.refCoord.pix = None +Chunk.time.axis.function.refCoord.val = None +Chunk.time.axis.range.start.pix = 0.5 +Chunk.time.axis.range.start.val = _get_time_axis_range_start_val(uri) +Chunk.time.axis.range.end.pix = 1.5 +Chunk.time.axis.range.end.val = _get_time_axis_range_end_val(uri) \ No newline at end of file diff --git a/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.module b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.module new file mode 100644 index 00000000..243fcc91 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.module @@ -0,0 +1,13 @@ +from caom2 import ProductType + + +def _get_artifact_product_type(uri): + return ProductType.SCIENCE + + +def _get_time_axis_range_end_val(uri): + return 59640.65652099997 + + +def _get_time_axis_range_start_val(uri): + return 59468.76826100005 diff --git a/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.orig.header b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.orig.header new file mode 100644 index 00000000..e69de29b diff --git a/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.py b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.py new file mode 100644 index 00000000..243fcc91 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.py @@ -0,0 +1,13 @@ +from caom2 import ProductType + + +def _get_artifact_product_type(uri): + return ProductType.SCIENCE + + +def _get_time_axis_range_end_val(uri): + return 59640.65652099997 + + +def _get_time_axis_range_start_val(uri): + return 59468.76826100005 diff --git a/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml new file mode 100644 index 00000000..91b2dfe1 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml @@ -0,0 +1,135 @@ + + + BRITE-Constellation + HD36486_65-Ori-VIII-2021_BAb_1_5_A + 2023-10-31T00:45:40.102 + + exposure + + object + science + + HD37043 + + + UniBRITE + + + UniBRITE + + + + HD36486_65-Ori-VIII-2021_BAb_1_5_A + 2023-10-31T00:45:40.102 + 2023-10-31T00:45:40.102 + timeseries + 2 + + BRITE Specific Aperture Photometry - chopping mode + APa3s2chop + BRITE-Constellation Nano-Satellites for Astrophysics + Adam Popowicz<Adam.Popowicz@polsl.pl> + R4 + http://brite-wiki.astro.uni.wroc.pl/bwiki/doku.php?id=start + 2023-12-31T00:45:40.000 + + + + ad:BRITE-Constellation/HD36486.orig + science + data + text/plain + 1600290 + md5:4d3e7821c9ac7167349f999feec20536 + + + 0 + + + 4 + 1 + 2 + 3 + + + + RA---TAN + deg + + + DEC--TAN + deg + + + + 1 + 1 + + + + 1.0 + 83.85825794708 + + + 1.0 + -5.9099009825 + + + 0.0375 + 0.0 + 0.0 + 0.0375 + + + ICRS + + + + + WAVE + m + + + + 0.5 + 5.4e-07 + + + 1.5 + 7e-07 + + + + TOPOCENT + Red + 3.8750000000000018 + + + + + TIME + d + + + + 0.5 + 59468.76826100005 + + + 1.5 + 59640.65652099997 + + + + UTC + 1.0 + + + + + + + + + + diff --git a/caom2utils/caom2utils/tests/test_collections.py b/caom2utils/caom2utils/tests/test_collections.py index 802c9c06..916d5b5b 100644 --- a/caom2utils/caom2utils/tests/test_collections.py +++ b/caom2utils/caom2utils/tests/test_collections.py @@ -66,6 +66,8 @@ # *********************************************************************** # +from astropy.io import fits + from cadcdata import FileInfo from caom2utils import legacy, caom2blueprint, data_util from caom2 import ObservationReader, ObservationWriter @@ -105,7 +107,7 @@ def test_differences(directory): product_id = f'--productID {prod_id}' collection_id = expected.collection data_files = _get_files( - ['header', 'png', 'gif', 'cat', 'fits', 'h5'], directory) + ['header', 'png', 'gif', 'cat', 'fits', 'h5', 'orig'], directory) assert data_files file_meta = _get_uris(collection_id, data_files, expected) @@ -172,20 +174,31 @@ def _vos_client_meta(subject, uri): file_type='application/fits') def _header(fqn): - # during operation, want to use astropy on FITS files - # but during testing want to use headers and built-in Python file - # operations - from urllib.parse import urlparse - from astropy.io import fits - file_uri = urlparse(fqn) - try: - fits_header = open(file_uri.path).read() - headers = data_util.make_headers_from_string(fits_header) - except UnicodeDecodeError: - hdulist = fits.open(fqn, memmap=True, lazy_load_hdus=True) - hdulist.verify('fix') - hdulist.close() - headers = [h.header for h in hdulist] + if '.fits' in fqn: + # during operation, want to use astropy on FITS files + # but during testing want to use headers and built-in Python file + # operations + from urllib.parse import urlparse + file_uri = urlparse(fqn) + try: + fits_header = open(file_uri.path).read() + headers = data_util.make_headers_from_string(fits_header) + except UnicodeDecodeError: + hdulist = fits.open(fqn, memmap=True, lazy_load_hdus=True) + hdulist.verify('fix') + hdulist.close() + headers = [h.header for h in hdulist] + else: + # the BRITE record tests blueprint range declarations over-riding function declarations. Do the minimal + # header construction to get that portion of the test to run. + header = fits.Header() + header['BITPIX'] = 8 + header['NAXIS'] = 4 + header['NAXIS1'] = 1 + header['NAXIS2'] = 1 + header['NAXIS3'] = 1 + header['NAXIS4'] = 1 + headers = [header] return headers swc_si_mock.return_value.cadcinfo.side_effect = info_mock @@ -236,6 +249,8 @@ def _get_cardinality(directory): return '--lineage def/cadc:def/def.h5' else: return '--lineage star04239531/cadc:TAOSII/taos2_20220201T201317Z_star04239531.h5' + elif 'brite' in directory: + return '--lineage HD36486_65-Ori-VIII-2021_BAb_1_5_A/ad:BRITE-Constellation/HD36486.orig' else: return '' diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index 5a33083f..966f7c95 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -1545,7 +1545,7 @@ def get_time_exposure(self, ext): hdr2['BITPIX'] = -32 hdr2['CTYPE1'] = 'TIME' hdr2['CUNIT1'] = 'd' - hdr2['CRPIX1'] = '1' + hdr2['CRPIX1'] = 1.0 hdr2['CRVAL1'] = 590000.00000 test_blueprint = ObsBlueprint(instantiated_class=test_instantiated) test_blueprint.configure_time_axis(1) From e982c3e13a77bbfae80a07a759188c1ba0409a4b Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Fri, 3 Nov 2023 14:05:12 -0700 Subject: [PATCH 03/36] CADC-12805 - interim commit. --- caom2utils/caom2utils/caom2blueprint.py | 133 ++++++++++++------ .../tests/data/brite/HD36486/HD36486.xml | 1 - .../caom2utils/tests/test_fits2caom2.py | 2 +- 3 files changed, 90 insertions(+), 46 deletions(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index 46196d9d..04e9a3c2 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -2268,42 +2268,54 @@ def augment_artifact(self, artifact, index): 'Chunk.metaProducer', index=0, current=chunk.meta_producer) self._get_chunk_naxis(chunk, index) + + # order by which the blueprint is used to set WCS information: + # 1 - try to construct the information for an axis from WCS information + # 2 - if the WCS information is insufficient, try to construct the information from the blueprint + # 3 - Always try to fill the range metadata from the blueprint. if self.blueprint._pos_axes_configed: self._wcs_parser.augment_position(chunk) - if chunk.position is None: + if chunk.position is None or chunk.position.axis is None or chunk.position.axis.function is None: self._try_position_with_blueprint(chunk, index) - if chunk.position: - chunk.position.resolution = _to_float(self._get_from_list( - 'Chunk.position.resolution', index=index, current=chunk.position.resolution)) + if self.blueprint._energy_axis_configed: self._wcs_parser.augment_energy(chunk) - if chunk.energy: - chunk.energy.bandpass_name = self._get_from_list( - 'Chunk.energy.bandpassName', index=index) - chunk.energy.transition = self._get_energy_transition( - chunk.energy.transition) - chunk.energy.resolving_power = _to_float(self._get_from_list( - 'Chunk.energy.resolvingPower', index=index)) - else: - if self.blueprint._energy_axis_configed: + if chunk.energy is None or chunk.energy.axis is None or chunk.energy.axis.function is None: self._try_energy_with_blueprint(chunk, index) + if self.blueprint._time_axis_configed: self._wcs_parser.augment_temporal(chunk) - if chunk.time is None: + if chunk.time is None or chunk.time.axis is None or chunk.time.axis.function is None: self._try_time_with_blueprint(chunk, index) + if self.blueprint._polarization_axis_configed: self._wcs_parser.augment_polarization(chunk) - if chunk.polarization is None: + if chunk.polarization is None or chunk.polarization.axis is None or chunk.polarization.axis.function is None: self._try_polarization_with_blueprint(chunk, index) + if self.blueprint._obs_axis_configed: self._wcs_parser.augment_observable(chunk) + # ObservableAxis is defined with dependent, independent, instead of one of the CoordAxis* + # so the checks for "already created" are inconsistent with other axes if chunk.observable is None and chunk.observable_axis is None: self._try_observable_with_blueprint(chunk, index) + if self.blueprint._custom_axis_configed: self._wcs_parser.augment_custom(chunk) + if chunk.custom is None or chunk.custom.axis is None or chunk.custom.axis.function is None: + self._try_custom_with_blueprint(chunk, index) + + if chunk.position: + chunk.position.resolution = _to_float(self._get_from_list( + 'Chunk.position.resolution', index=index, current=chunk.position.resolution)) + if chunk.energy: + chunk.energy.bandpass_name = self._get_from_list( + 'Chunk.energy.bandpassName', index=index) + chunk.energy.transition = self._get_energy_transition( + chunk.energy.transition) + chunk.energy.resolving_power = _to_float(self._get_from_list( + 'Chunk.energy.resolvingPower', index=index)) - # try to set smaller bits of the chunk WCS elements from the - # blueprint self._try_range_with_blueprint(chunk, index) self.logger.debug( @@ -2587,17 +2599,16 @@ def _get_metrics(self, current): self.logger.debug('End Metrics augmentation.') return metrics - def _get_naxis(self, label, index): + def _get_axis(self, label, index): """Helper function to construct a CoordAxis1D instance, with all it's members, from the blueprint. - :param label: axis name - must be one of 'energy', 'time', or - 'polarization', as it's used for the blueprint lookup. + :param label: axis name - must be one of 'custom', 'energy', 'time', or 'polarization', as it's used for the + blueprint lookup. :param index: which blueprint index to find a value in :return an instance of CoordAxis1D """ - self.logger.debug( - f'Begin {label} naxis construction from blueprint.') + self.logger.debug(f'Begin {label} axis construction from blueprint.') aug_axis_ctype = self._get_from_list( f'Chunk.{label}.axis.axis.ctype', index) @@ -2633,6 +2644,16 @@ def _get_naxis(self, label, index): f'Creating {label} function for {self.uri} from blueprint') aug_naxis = None + # aug_range = self._try_range_return(index, label) + # if aug_axis is not None: + # if aug_range is not None: + # aug_naxis = CoordAxis1D(axis=aug_axis, error=aug_error, range=aug_range) + # aug_naxis.function = None + # self.logger.error(f'Creating range {label} CoordAxis1D for {self.uri} from blueprint') + # elif aug_function is not None: + # aug_naxis = CoordAxis1D(aug_axis, aug_error, None, None, aug_function) + # self.logger.error(f'Creating function {label} CoordAxis1D for {self.uri} from blueprint') + if aug_function is None: aug_range = self._try_range_return(index, label) if aug_axis is not None and aug_range is not None: @@ -2648,8 +2669,7 @@ def _get_naxis(self, label, index): self.logger.debug( f'Creating function {label} CoordAxis1D for {self.uri} ' f'from blueprint') - self.logger.debug( - f'End {label} naxis construction from blueprint.') + self.logger.debug(f'End {label} axis construction from blueprint.') return aug_naxis def _get_observable(self, current): @@ -2887,6 +2907,25 @@ def _cast_as_bool(self, from_value): result = True return result + def _try_custom_with_blueprint(self, chunk, index): + """ + A mechanism to augment the Custom WCS completely from the blueprint. Do nothing if the WCS information cannot + be correctly created. + + :param chunk: The chunk to modify with the addition of custom information. + :param index: The index in the blueprint for looking up plan information. + """ + self.logger.debug('Begin augmentation with blueprint for custom.') + aug_naxis = self._get_naxis('custom', index) + if aug_naxis is None: + self.logger.debug('No blueprint custom information.') + else: + if chunk.custom: + chunk.custom.axis = aug_naxis + else: + chunk.custom = CustomWCS(aug_naxis) + self.logger.debug('End augmentation with blueprint for custom.') + def _try_energy_with_blueprint(self, chunk, index): """ A mechanism to augment the Energy WCS completely from the blueprint. @@ -2898,16 +2937,15 @@ def _try_energy_with_blueprint(self, chunk, index): information. """ self.logger.debug('Begin augmentation with blueprint for energy.') - aug_naxis = self._get_naxis('energy', index) - + aug_axis = self._get_axis('energy', index) specsys = _to_str(self._get_from_list('Chunk.energy.specsys', index)) - if aug_naxis is None: + if aug_axis is None: self.logger.debug('No blueprint energy information.') else: if not chunk.energy: - chunk.energy = SpectralWCS(aug_naxis, specsys) + chunk.energy = SpectralWCS(aug_axis, specsys) else: - chunk.energy.naxis = aug_naxis + chunk.energy.axis = aug_axis chunk.energy.specsys = specsys if chunk.energy is not None: @@ -2972,12 +3010,12 @@ def _try_polarization_with_blueprint(self, chunk, index): 'polarization.') chunk.polarization_axis = _to_int( self._get_from_list('Chunk.polarizationAxis', index)) - aug_naxis = self._get_naxis('polarization', index) - if aug_naxis is not None: + aug_axis = self._get_axis('polarization', index) + if aug_axis is not None: if chunk.polarization: - chunk.polarization.naxis = aug_naxis + chunk.polarization.axis = aug_axis else: - chunk.polarization = PolarizationWCS(aug_naxis) + chunk.polarization = PolarizationWCS(aug_axis) self.logger.debug( f'Creating PolarizationWCS for {self.uri} from blueprint') @@ -2985,8 +3023,7 @@ def _try_polarization_with_blueprint(self, chunk, index): def _try_position_range(self, chunk, index): self.logger.debug('Try to set the range for position from blueprint, since there is no function') - if (self.blueprint._pos_axes_configed and chunk.position is not None - and chunk.position.axis is not None and chunk.position.axis.function is None): + if self.blueprint._pos_axes_configed and chunk.position is not None and chunk.position.axis is not None: aug_range_c1_start = self._two_param_constructor( 'Chunk.position.axis.range.start.coord1.pix', 'Chunk.position.axis.range.start.coord1.val', @@ -3130,16 +3167,24 @@ def _try_range_with_blueprint(self, chunk, index): not covered by the *WcsParser classes. Per PD 19/04/18, bounds and range are not covered by WCS keywords.""" - for i in ['energy', 'time', 'polarization']: + for i in ['custom', 'energy', 'time', 'polarization']: axis_configed = getattr(self.blueprint, f'_{i}_axis_configed') if axis_configed: wcs = getattr(chunk, i) if wcs is not None and wcs.axis is not None: - # only try to set the Range information if the Function doesn't exist - if wcs.axis.range is None and wcs.axis.function is None: - self._try_range(wcs, index, i) + self._try_range(wcs, index, i) + if wcs.axis.function is not None and wcs.axis.range is not None: + # prefer range if set by the blueprint, as that is meant to override WCS + wcs.axis.function = None + setattr(chunk, f'{i}_axis', None) + self._try_position_range(chunk, index) + if (chunk.position is not None and chunk.position.axis is not None + and chunk.position.axis.function is not None and chunk.position.axis.range is not None): + chunk.position.axis.function = None + chunk.position_axis_1 = None + chunk.position_axis_2 = None def _try_time_with_blueprint(self, chunk, index): """ @@ -3154,12 +3199,12 @@ def _try_time_with_blueprint(self, chunk, index): self.logger.debug('Begin augmentation with blueprint for temporal.') chunk.time_axis = _to_int(self._get_from_list('Chunk.timeAxis', index)) - aug_naxis = self._get_naxis('time', index) - if aug_naxis is not None: + aug_axis = self._get_axis('time', index) + if aug_axis is not None: if chunk.time: - chunk.time.naxis = aug_naxis + chunk.time.axis = aug_axis else: - chunk.time = TemporalWCS(aug_naxis) + chunk.time = TemporalWCS(aug_axis) self.logger.debug('Creating TemporalWCS for {} from blueprint'. format(self.uri)) if chunk.time is not None: @@ -4269,7 +4314,7 @@ def _get_ref_coord(self, index): aug_crpix = _to_float(self._sanitize(self.wcs.crpix[index])) aug_crval = _to_float(self._sanitize(self.wcs.crval[index])) aug_ref_coord = None - if aug_crpix and aug_crval: + if aug_crpix is not None and aug_crval is not None: aug_ref_coord = RefCoord(aug_crpix, aug_crval) return aug_ref_coord diff --git a/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml index 91b2dfe1..0ceef5e9 100644 --- a/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml +++ b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml @@ -49,7 +49,6 @@ 4 1 2 - 3 diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index 966f7c95..e6b4a211 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -156,7 +156,7 @@ def test_augment_energy(): ex = _get_from_str_xml(EXPECTED_ENERGY_XML, ObservationReader()._get_spectral_wcs, 'energy') result = get_differences(ex, energy) - assert result is None, repr(energy) + assert result is None, result def test_hdf5_wcs_parser_set_wcs(): From bced5686707e40dee4123da00bd690726a53b77c Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Sat, 4 Nov 2023 17:16:50 -0700 Subject: [PATCH 04/36] CADC-12805 - interim commit - clean up finished, regression testing to occur. --- caom2utils/caom2utils/caom2blueprint.py | 395 +++++++----------- .../caom2utils/tests/test_fits2caom2.py | 67 ++- 2 files changed, 207 insertions(+), 255 deletions(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index 04e9a3c2..c281c752 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -2280,43 +2280,23 @@ def augment_artifact(self, artifact, index): if self.blueprint._energy_axis_configed: self._wcs_parser.augment_energy(chunk) - if chunk.energy is None or chunk.energy.axis is None or chunk.energy.axis.function is None: - self._try_energy_with_blueprint(chunk, index) + self._try_energy_with_blueprint(chunk, index) if self.blueprint._time_axis_configed: self._wcs_parser.augment_temporal(chunk) - if chunk.time is None or chunk.time.axis is None or chunk.time.axis.function is None: - self._try_time_with_blueprint(chunk, index) + self._try_time_with_blueprint(chunk, index) if self.blueprint._polarization_axis_configed: self._wcs_parser.augment_polarization(chunk) - if chunk.polarization is None or chunk.polarization.axis is None or chunk.polarization.axis.function is None: - self._try_polarization_with_blueprint(chunk, index) + self._try_polarization_with_blueprint(chunk, index) if self.blueprint._obs_axis_configed: self._wcs_parser.augment_observable(chunk) - # ObservableAxis is defined with dependent, independent, instead of one of the CoordAxis* - # so the checks for "already created" are inconsistent with other axes - if chunk.observable is None and chunk.observable_axis is None: - self._try_observable_with_blueprint(chunk, index) + self._try_observable_with_blueprint(chunk, index) if self.blueprint._custom_axis_configed: self._wcs_parser.augment_custom(chunk) - if chunk.custom is None or chunk.custom.axis is None or chunk.custom.axis.function is None: - self._try_custom_with_blueprint(chunk, index) - - if chunk.position: - chunk.position.resolution = _to_float(self._get_from_list( - 'Chunk.position.resolution', index=index, current=chunk.position.resolution)) - if chunk.energy: - chunk.energy.bandpass_name = self._get_from_list( - 'Chunk.energy.bandpassName', index=index) - chunk.energy.transition = self._get_energy_transition( - chunk.energy.transition) - chunk.energy.resolving_power = _to_float(self._get_from_list( - 'Chunk.energy.resolvingPower', index=index)) - - self._try_range_with_blueprint(chunk, index) + self._try_custom_with_blueprint(chunk, index) self.logger.debug( f'End content artifact augmentation for {artifact.uri}.') @@ -2599,7 +2579,7 @@ def _get_metrics(self, current): self.logger.debug('End Metrics augmentation.') return metrics - def _get_axis(self, label, index): + def _get_axis_wcs(self, label, wcs, index): """Helper function to construct a CoordAxis1D instance, with all it's members, from the blueprint. @@ -2610,65 +2590,44 @@ def _get_axis(self, label, index): """ self.logger.debug(f'Begin {label} axis construction from blueprint.') - aug_axis_ctype = self._get_from_list( - f'Chunk.{label}.axis.axis.ctype', index) - aug_axis_cunit = self._get_from_list( - f'Chunk.{label}.axis.axis.cunit', index) aug_axis = None - if aug_axis_ctype is not None: - aug_axis = Axis(aug_axis_ctype, aug_axis_cunit) - self.logger.debug( - f'Creating {label} Axis for {self.uri} from blueprint') - - aug_error = self._two_param_constructor( - f'Chunk.{label}.axis.error.syser', - f'Chunk.{label}.axis.error.rnder', - index, _to_float, CoordError) - aug_ref_coord = self._two_param_constructor( - f'Chunk.{label}.axis.function.refCoord.pix', - f'Chunk.{label}.axis.function.refCoord.val', - index, _to_float, RefCoord) - aug_delta = _to_float( - self._get_from_list(f'Chunk.{label}.axis.function.delta', - index)) - aug_length = _to_int( - self._get_from_list(f'Chunk.{label}.axis.function.naxis', - index)) - - aug_function = None - if (aug_length is not None and aug_delta is not None and - aug_ref_coord is not None): - aug_function = \ - CoordFunction1D(aug_length, aug_delta, aug_ref_coord) - self.logger.debug( - f'Creating {label} function for {self.uri} from blueprint') + aug_error = None + if wcs is not None and wcs.axis is not None and wcs.axis.axis is not None: + aug_axis = wcs.axis.axis + aug_error = wcs.axis.error + else: + aug_axis_ctype = self._get_from_list(f'Chunk.{label}.axis.axis.ctype', index) + aug_axis_cunit = self._get_from_list(f'Chunk.{label}.axis.axis.cunit', index) + if aug_axis_ctype is not None: + aug_axis = Axis(aug_axis_ctype, aug_axis_cunit) + self.logger.debug(f'Creating {label} Axis for {self.uri} from blueprint') + + aug_error = self._two_param_constructor( + f'Chunk.{label}.axis.error.syser', + f'Chunk.{label}.axis.error.rnder', + index, _to_float, CoordError) aug_naxis = None - # aug_range = self._try_range_return(index, label) - # if aug_axis is not None: - # if aug_range is not None: - # aug_naxis = CoordAxis1D(axis=aug_axis, error=aug_error, range=aug_range) - # aug_naxis.function = None - # self.logger.error(f'Creating range {label} CoordAxis1D for {self.uri} from blueprint') - # elif aug_function is not None: - # aug_naxis = CoordAxis1D(aug_axis, aug_error, None, None, aug_function) - # self.logger.error(f'Creating function {label} CoordAxis1D for {self.uri} from blueprint') - - if aug_function is None: - aug_range = self._try_range_return(index, label) - if aug_axis is not None and aug_range is not None: - aug_naxis = CoordAxis1D( - axis=aug_axis, error=aug_error, range=aug_range) - self.logger.debug( - f'Creating range {label} CoordAxis1D for {self.uri} from ' - f'blueprint') - else: - if aug_axis is not None and aug_function is not None: - aug_naxis = CoordAxis1D(aug_axis, aug_error, None, None, - aug_function) - self.logger.debug( - f'Creating function {label} CoordAxis1D for {self.uri} ' - f'from blueprint') + aug_range = self._try_range(index, label) + if aug_axis is not None: + if aug_range is None: + if wcs is None or wcs.axis is None or wcs.axis.function is None: + aug_ref_coord = self._two_param_constructor( + f'Chunk.{label}.axis.function.refCoord.pix', + f'Chunk.{label}.axis.function.refCoord.val', + index, _to_float, RefCoord) + aug_delta = _to_float(self._get_from_list(f'Chunk.{label}.axis.function.delta', index)) + aug_length = _to_int(self._get_from_list(f'Chunk.{label}.axis.function.naxis', index)) + aug_function = None + if aug_length is not None and aug_delta is not None and aug_ref_coord is not None: + aug_function = CoordFunction1D(aug_length, aug_delta, aug_ref_coord) + aug_naxis = CoordAxis1D(aug_axis, aug_error, None, None, aug_function) + self.logger.debug(f'Creating function {label} CoordAxis1D for {self.uri} from blueprint') + else: + aug_naxis = CoordAxis1D(axis=aug_axis, error=aug_error, range=aug_range) + aug_naxis.function = None + self.logger.debug(f'Creating range {label} CoordAxis1D for {self.uri} from blueprint') + self.logger.debug(f'End {label} axis construction from blueprint.') return aug_naxis @@ -2916,14 +2875,13 @@ def _try_custom_with_blueprint(self, chunk, index): :param index: The index in the blueprint for looking up plan information. """ self.logger.debug('Begin augmentation with blueprint for custom.') - aug_naxis = self._get_naxis('custom', index) + aug_naxis = self._get_axis_wcs('custom', chunk.custom, index) if aug_naxis is None: self.logger.debug('No blueprint custom information.') else: - if chunk.custom: - chunk.custom.axis = aug_naxis - else: - chunk.custom = CustomWCS(aug_naxis) + # always create a new CustomWCS instance because there's no setter for 'axis' parameter + chunk.custom = CustomWCS(aug_naxis) + chunk.custom_axis = _to_int(self._get_from_list('Chunk.customAxis', index)) self.logger.debug('End augmentation with blueprint for custom.') def _try_energy_with_blueprint(self, chunk, index): @@ -2937,7 +2895,7 @@ def _try_energy_with_blueprint(self, chunk, index): information. """ self.logger.debug('Begin augmentation with blueprint for energy.') - aug_axis = self._get_axis('energy', index) + aug_axis = self._get_axis_wcs('energy', chunk.energy, index) specsys = _to_str(self._get_from_list('Chunk.energy.specsys', index)) if aug_axis is None: self.logger.debug('No blueprint energy information.') @@ -2947,28 +2905,19 @@ def _try_energy_with_blueprint(self, chunk, index): else: chunk.energy.axis = aug_axis chunk.energy.specsys = specsys + chunk.energy_axis = _to_int(self._get_from_list('Chunk.energyAxis', index)) - if chunk.energy is not None: - chunk.energy.ssysobs = self._get_from_list( - 'Chunk.energy.ssysobs', index) - chunk.energy.restfrq = self._get_from_list( - 'Chunk.energy.restfrq', index) - chunk.energy.restwav = self._get_from_list( - 'Chunk.energy.restwav', index) - chunk.energy.velosys = self._get_from_list( - 'Chunk.energy.velosys', index) - chunk.energy.zsource = self._get_from_list( - 'Chunk.energy.zsource', index) - chunk.energy.ssyssrc = self._get_from_list( - 'Chunk.energy.ssyssrc', index) - chunk.energy.velang = self._get_from_list( - 'Chunk.energy.velang', index) - chunk.energy.bandpass_name = self._get_from_list( - 'Chunk.energy.bandpassName', index) - chunk.energy.transition = self._get_from_list( - 'Chunk.energy.transition', index) - chunk.energy.resolving_power = _to_float(self._get_from_list( - 'Chunk.energy.resolvingPower', index)) + if chunk.energy: + chunk.energy.ssysobs = self._get_from_list('Chunk.energy.ssysobs', index) + chunk.energy.restfrq = self._get_from_list('Chunk.energy.restfrq', index) + chunk.energy.restwav = self._get_from_list('Chunk.energy.restwav', index) + chunk.energy.velosys = self._get_from_list('Chunk.energy.velosys', index) + chunk.energy.zsource = self._get_from_list('Chunk.energy.zsource', index) + chunk.energy.ssyssrc = self._get_from_list('Chunk.energy.ssyssrc', index) + chunk.energy.velang = self._get_from_list('Chunk.energy.velang', index) + chunk.energy.bandpass_name = self._get_from_list( 'Chunk.energy.bandpassName', index) + chunk.energy.transition = self._get_energy_transition(chunk.energy.transition) + chunk.energy.resolving_power = _to_float(self._get_from_list('Chunk.energy.resolvingPower', index)) self.logger.debug('End augmentation with blueprint for energy.') def _try_observable_with_blueprint(self, chunk, index): @@ -2984,8 +2933,6 @@ def _try_observable_with_blueprint(self, chunk, index): """ self.logger.debug('Begin augmentation with blueprint for ' 'observable.') - chunk.observable_axis = _to_int( - self._get_from_list('Chunk.observableAxis', index)) aug_axis = self._two_param_constructor( 'Chunk.observable.dependent.axis.ctype', 'Chunk.observable.dependent.axis.cunit', index, _to_str, Axis) @@ -2993,6 +2940,7 @@ def _try_observable_with_blueprint(self, chunk, index): self._get_from_list('Chunk.observable.dependent.bin', index)) if aug_axis is not None and aug_bin is not None: chunk.observable = ObservableAxis(Slice(aug_axis, aug_bin)) + chunk.observable_axis = _to_int(self._get_from_list('Chunk.observableAxis', index)) self.logger.debug('End augmentation with blueprint for polarization.') def _try_polarization_with_blueprint(self, chunk, index): @@ -3008,44 +2956,43 @@ def _try_polarization_with_blueprint(self, chunk, index): """ self.logger.debug('Begin augmentation with blueprint for ' 'polarization.') - chunk.polarization_axis = _to_int( - self._get_from_list('Chunk.polarizationAxis', index)) - aug_axis = self._get_axis('polarization', index) + aug_axis = self._get_axis_wcs('polarization', chunk.polarization, index) if aug_axis is not None: if chunk.polarization: chunk.polarization.axis = aug_axis else: chunk.polarization = PolarizationWCS(aug_axis) - self.logger.debug( - f'Creating PolarizationWCS for {self.uri} from blueprint') + chunk.polarization_axis = _to_int(self._get_from_list('Chunk.polarizationAxis', index)) + self.logger.debug(f'Creating PolarizationWCS for {self.uri} from blueprint') self.logger.debug('End augmentation with blueprint for polarization.') - def _try_position_range(self, chunk, index): + def _try_position_range(self, index): self.logger.debug('Try to set the range for position from blueprint, since there is no function') - if self.blueprint._pos_axes_configed and chunk.position is not None and chunk.position.axis is not None: - aug_range_c1_start = self._two_param_constructor( - 'Chunk.position.axis.range.start.coord1.pix', - 'Chunk.position.axis.range.start.coord1.val', - index, _to_float, RefCoord) - aug_range_c1_end = self._two_param_constructor( - 'Chunk.position.axis.range.end.coord1.pix', - 'Chunk.position.axis.range.end.coord1.val', - index, _to_float, RefCoord) - aug_range_c2_start = self._two_param_constructor( - 'Chunk.position.axis.range.start.coord2.pix', - 'Chunk.position.axis.range.start.coord2.val', - index, _to_float, RefCoord) - aug_range_c2_end = self._two_param_constructor( - 'Chunk.position.axis.range.end.coord2.pix', - 'Chunk.position.axis.range.end.coord2.val', - index, _to_float, RefCoord) - if (aug_range_c1_start and aug_range_c1_end and aug_range_c2_start - and aug_range_c2_end): - chunk.position.axis.range = CoordRange2D( - Coord2D(aug_range_c1_start, aug_range_c1_end), - Coord2D(aug_range_c2_start, aug_range_c2_end)) - self.logger.debug('Completed setting range for position') + aug_range = None + aug_range_c1_start = self._two_param_constructor( + 'Chunk.position.axis.range.start.coord1.pix', + 'Chunk.position.axis.range.start.coord1.val', + index, _to_float, RefCoord) + aug_range_c1_end = self._two_param_constructor( + 'Chunk.position.axis.range.end.coord1.pix', + 'Chunk.position.axis.range.end.coord1.val', + index, _to_float, RefCoord) + aug_range_c2_start = self._two_param_constructor( + 'Chunk.position.axis.range.start.coord2.pix', + 'Chunk.position.axis.range.start.coord2.val', + index, _to_float, RefCoord) + aug_range_c2_end = self._two_param_constructor( + 'Chunk.position.axis.range.end.coord2.pix', + 'Chunk.position.axis.range.end.coord2.val', + index, _to_float, RefCoord) + if (aug_range_c1_start and aug_range_c1_end and aug_range_c2_start + and aug_range_c2_end): + aug_range = CoordRange2D( + Coord2D(aug_range_c1_start, aug_range_c1_end), + Coord2D(aug_range_c2_start, aug_range_c2_end)) + self.logger.debug('Completed setting range for position') + return aug_range def _try_position_with_blueprint(self, chunk, index): """ @@ -3058,63 +3005,69 @@ def _try_position_with_blueprint(self, chunk, index): information. """ self.logger.debug('Begin augmentation with blueprint for position.') - - aug_x_axis = self._two_param_constructor( - 'Chunk.position.axis.axis1.ctype', - 'Chunk.position.axis.axis1.cunit', index, _to_str, Axis) - aug_y_axis = self._two_param_constructor( - 'Chunk.position.axis.axis2.ctype', - 'Chunk.position.axis.axis2.cunit', index, _to_str, Axis) - aug_x_error = self._two_param_constructor( - 'Chunk.position.axis.error1.syser', - 'Chunk.position.axis.error1.rnder', index, _to_float, CoordError) - aug_y_error = self._two_param_constructor( - 'Chunk.position.axis.error2.syser', - 'Chunk.position.axis.error2.rnder', index, _to_float, CoordError) - aug_dimension = self._two_param_constructor( - 'Chunk.position.axis.function.dimension.naxis1', - 'Chunk.position.axis.function.dimension.naxis2', - index, _to_int, Dimension2D) - aug_x_ref_coord = self._two_param_constructor( - 'Chunk.position.axis.function.refCoord.coord1.pix', - 'Chunk.position.axis.function.refCoord.coord1.val', - index, _to_float, RefCoord) - aug_y_ref_coord = self._two_param_constructor( - 'Chunk.position.axis.function.refCoord.coord2.pix', - 'Chunk.position.axis.function.refCoord.coord2.val', - index, _to_float, RefCoord) - aug_cd11 = _to_float(self._get_from_list( - 'Chunk.position.axis.function.cd11', index)) - aug_cd12 = _to_float(self._get_from_list( - 'Chunk.position.axis.function.cd12', index)) - aug_cd21 = _to_float(self._get_from_list( - 'Chunk.position.axis.function.cd21', index)) - aug_cd22 = _to_float(self._get_from_list( - 'Chunk.position.axis.function.cd22', index)) - - aug_ref_coord = None - if aug_x_ref_coord is not None and aug_y_ref_coord is not None: - aug_ref_coord = Coord2D(aug_x_ref_coord, aug_y_ref_coord) - self.logger.debug( - f'Creating position Coord2D for {self.uri}') - - aug_function = None - if (aug_dimension is not None and aug_ref_coord is not None and - aug_cd11 is not None and aug_cd12 is not None and - aug_cd21 is not None and aug_cd22 is not None): - aug_function = CoordFunction2D(aug_dimension, aug_ref_coord, - aug_cd11, aug_cd12, aug_cd21, - aug_cd22) - self.logger.debug( - f'Creating position CoordFunction2D for {self.uri}') - - aug_axis = None - if (aug_x_axis is not None and aug_y_axis is not None and - aug_function is not None): - aug_axis = CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, - aug_y_error, None, None, aug_function) - self.logger.debug( - f'Creating position CoordAxis2D for {self.uri}') + if (chunk.position is not None and chunk.position.axis is not None and chunk.position.axis.axis1 is not None + and chunk.position.axis.axis2 is not None): + aug_x_axis = chunk.position.axis.axis1 + aug_y_axis = chunk.position.axis.axis2 + aug_x_error = chunk.position.axis.error1 + aug_y_error = chunk.position.axis.error2 + else: + aug_x_axis = self._two_param_constructor( + 'Chunk.position.axis.axis1.ctype', + 'Chunk.position.axis.axis1.cunit', index, _to_str, Axis) + aug_y_axis = self._two_param_constructor( + 'Chunk.position.axis.axis2.ctype', + 'Chunk.position.axis.axis2.cunit', index, _to_str, Axis) + aug_x_error = self._two_param_constructor( + 'Chunk.position.axis.error1.syser', + 'Chunk.position.axis.error1.rnder', index, _to_float, CoordError) + aug_y_error = self._two_param_constructor( + 'Chunk.position.axis.error2.syser', + 'Chunk.position.axis.error2.rnder', index, _to_float, CoordError) + aug_range = self._try_position_range(index) + if aug_range is None: + if chunk.position is None or chunk.position.axis is None or chunk.position.axis.function is None: + aug_dimension = self._two_param_constructor( + 'Chunk.position.axis.function.dimension.naxis1', + 'Chunk.position.axis.function.dimension.naxis2', + index, _to_int, Dimension2D) + aug_x_ref_coord = self._two_param_constructor( + 'Chunk.position.axis.function.refCoord.coord1.pix', + 'Chunk.position.axis.function.refCoord.coord1.val', + index, _to_float, RefCoord) + aug_y_ref_coord = self._two_param_constructor( + 'Chunk.position.axis.function.refCoord.coord2.pix', + 'Chunk.position.axis.function.refCoord.coord2.val', + index, _to_float, RefCoord) + aug_cd11 = _to_float(self._get_from_list('Chunk.position.axis.function.cd11', index)) + aug_cd12 = _to_float(self._get_from_list('Chunk.position.axis.function.cd12', index)) + aug_cd21 = _to_float(self._get_from_list('Chunk.position.axis.function.cd21', index)) + aug_cd22 = _to_float(self._get_from_list('Chunk.position.axis.function.cd22', index)) + + aug_ref_coord = None + if aug_x_ref_coord is not None and aug_y_ref_coord is not None: + aug_ref_coord = Coord2D(aug_x_ref_coord, aug_y_ref_coord) + self.logger.debug(f'Creating position Coord2D for {self.uri}') + + aug_function = None + if (aug_dimension is not None and aug_ref_coord is not None and + aug_cd11 is not None and aug_cd12 is not None and + aug_cd21 is not None and aug_cd22 is not None): + aug_function = CoordFunction2D(aug_dimension, aug_ref_coord, aug_cd11, aug_cd12, aug_cd21, + aug_cd22) + self.logger.debug(f'Creating position CoordFunction2D for {self.uri}') + + aug_axis = None + if (aug_x_axis is not None and aug_y_axis is not None and + aug_function is not None): + aug_axis = CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, + aug_y_error, None, None, aug_function) + self.logger.debug(f'Creating position CoordAxis2D for {self.uri}') + + chunk.position_axis_1 = _to_int(self._get_from_list('Chunk.positionAxis1', index)) + chunk.position_axis_2 = _to_int(self._get_from_list('Chunk.positionAxis2', index)) + else: + aug_axis = CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, aug_y_error, range=aug_range) if aug_axis is not None: if chunk.position: @@ -3131,21 +3084,7 @@ def _try_position_with_blueprint(self, chunk, index): 'Chunk.position.resolution', index) self.logger.debug('End augmentation with blueprint for position.') - def _try_range(self, wcs, index, lookup): - self.logger.debug(f'Try to set the range for {lookup}') - aug_range_start = self._two_param_constructor( - f'Chunk.{lookup}.axis.range.start.pix', - f'Chunk.{lookup}.axis.range.start.val', - index, _to_float, RefCoord) - aug_range_end = self._two_param_constructor( - f'Chunk.{lookup}.axis.range.end.pix', - f'Chunk.{lookup}.axis.range.end.val', - index, _to_float, RefCoord) - if aug_range_start and aug_range_end: - wcs.axis.range = CoordRange1D(aug_range_start, aug_range_end) - self.logger.debug(f'Completed setting range for {lookup}') - - def _try_range_return(self, index, lookup): + def _try_range(self, index, lookup): self.logger.debug(f'Try to set the range for {lookup}') result = None aug_range_start = self._two_param_constructor( @@ -3161,31 +3100,6 @@ def _try_range_return(self, index, lookup): self.logger.debug(f'Completed setting range with return for {lookup}') return result - def _try_range_with_blueprint(self, chunk, index): - """Use the blueprint to set elements and attributes that - are not in the scope of astropy and files content, and therefore are - not covered by the *WcsParser classes. Per PD 19/04/18, bounds and - range are not covered by WCS keywords.""" - - for i in ['custom', 'energy', 'time', 'polarization']: - axis_configed = getattr(self.blueprint, - f'_{i}_axis_configed') - if axis_configed: - wcs = getattr(chunk, i) - if wcs is not None and wcs.axis is not None: - self._try_range(wcs, index, i) - if wcs.axis.function is not None and wcs.axis.range is not None: - # prefer range if set by the blueprint, as that is meant to override WCS - wcs.axis.function = None - setattr(chunk, f'{i}_axis', None) - - self._try_position_range(chunk, index) - if (chunk.position is not None and chunk.position.axis is not None - and chunk.position.axis.function is not None and chunk.position.axis.range is not None): - chunk.position.axis.function = None - chunk.position_axis_1 = None - chunk.position_axis_2 = None - def _try_time_with_blueprint(self, chunk, index): """ A mechanism to augment the Time WCS completely from the blueprint. @@ -3198,8 +3112,7 @@ def _try_time_with_blueprint(self, chunk, index): """ self.logger.debug('Begin augmentation with blueprint for temporal.') - chunk.time_axis = _to_int(self._get_from_list('Chunk.timeAxis', index)) - aug_axis = self._get_axis('time', index) + aug_axis = self._get_axis_wcs('time', chunk.time, index) if aug_axis is not None: if chunk.time: chunk.time.axis = aug_axis @@ -3207,7 +3120,9 @@ def _try_time_with_blueprint(self, chunk, index): chunk.time = TemporalWCS(aug_axis) self.logger.debug('Creating TemporalWCS for {} from blueprint'. format(self.uri)) - if chunk.time is not None: + chunk.time_axis = _to_int(self._get_from_list('Chunk.timeAxis', index)) + + if chunk.time: chunk.time.exposure = _to_float( self._get_from_list('Chunk.time.exposure', index)) chunk.time.resolution = _to_float( diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index e6b4a211..86355a95 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -79,11 +79,9 @@ from caom2utils.caom2blueprint import _visit, _load_plugin from caom2utils.caom2blueprint import _get_and_update_artifact_meta -from caom2 import ObservationWriter, SimpleObservation, Algorithm -from caom2 import Artifact, ProductType, ReleaseType, ObservationIntentType -from caom2 import get_differences, obs_reader_writer, ObservationReader, Chunk -from caom2 import SpectralWCS, TemporalWCS, PolarizationWCS, SpatialWCS -from caom2 import Axis, CoordAxis1D, CoordAxis2D, ChecksumURI, DataProductType +from caom2 import ObservationWriter, SimpleObservation, Algorithm, Artifact, ProductType, ReleaseType, DataProductType +from caom2 import get_differences, obs_reader_writer, ObservationReader, Chunk, ObservationIntentType, ChecksumURI +from caom2 import CustomWCS, SpectralWCS, TemporalWCS, PolarizationWCS, SpatialWCS, Axis, CoordAxis1D, CoordAxis2D from caom2 import CalibrationLevel import logging @@ -226,7 +224,7 @@ def test_augment_artifact_energy_from_blueprint(): ObservationReader()._get_spectral_wcs, 'energy') result = get_differences(ex, test_chunk.energy) - assert result is None + assert result is None, result EXPECTED_POLARIZATION_XML = \ @@ -1171,6 +1169,27 @@ def test_visit(): _visit(test_class_plugin_module, test_fitsparser, test_obs, visit_local=None, **kwargs) +EXPECTED_CUSTOM_RANGE_BOUNDS_XML = ''' + + + + RM + m / s ** 2 + + + + 145.0 + -60000.0 + + + -824.46002 + 1 + + + + + +''' EXPECTED_ENERGY_RANGE_BOUNDS_XML = ''' @@ -1190,7 +1209,8 @@ def test_visit(): - TOPOCENT + LSRK + 1420406000.0 ''' @@ -1278,9 +1298,12 @@ def test_visit(): def test_augment_artifact_bounds_range_from_blueprint(): - test_blueprint = ObsBlueprint(energy_axis=1, time_axis=2, - polarization_axis=3, - position_axes=(4, 5)) + test_blueprint = ObsBlueprint( + energy_axis=1, time_axis=2, polarization_axis=3, position_axes=(4, 5), custom_axis=6) + test_blueprint.set('Chunk.custom.axis.range.start.pix', '145.0') + test_blueprint.set('Chunk.custom.axis.range.start.val', '-60000.0') + test_blueprint.set('Chunk.custom.axis.range.end.pix', '-824.46002') + test_blueprint.set('Chunk.custom.axis.range.end.val', '1') test_blueprint.set('Chunk.energy.axis.range.start.pix', '145.0') test_blueprint.set('Chunk.energy.axis.range.start.val', '-60000.0') test_blueprint.set('Chunk.energy.axis.range.end.pix', '-824.46002') @@ -1308,12 +1331,18 @@ def test_augment_artifact_bounds_range_from_blueprint(): test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, uri='ad:TEST/test_blueprint') test_chunk = Chunk() + test_chunk.custom = CustomWCS(CoordAxis1D(Axis('RM', 'm / s ** 2'))) test_chunk.energy = SpectralWCS(CoordAxis1D(Axis('WAVE', 'm')), 'TOPOCENT') test_chunk.time = TemporalWCS(CoordAxis1D(Axis('TIME', 'd'))) test_chunk.polarization = PolarizationWCS(CoordAxis1D(Axis('STOKES'))) test_chunk.position = SpatialWCS(CoordAxis2D(Axis('RA', 'deg'), Axis('DEC', 'deg'))) - test_fitsparser._try_range_with_blueprint(test_chunk, 0) + test_fitsparser._try_position_with_blueprint(test_chunk, 0) + test_fitsparser._try_energy_with_blueprint(test_chunk, 0) + test_fitsparser._try_time_with_blueprint(test_chunk, 0) + test_fitsparser._try_polarization_with_blueprint(test_chunk, 0) + test_fitsparser._try_observable_with_blueprint(test_chunk, 0) + test_fitsparser._try_custom_with_blueprint(test_chunk, 0) assert test_chunk.energy.axis.range is not None, \ 'chunk.energy.axis.range should be declared' @@ -1323,13 +1352,14 @@ def test_augment_artifact_bounds_range_from_blueprint(): 'chunk.polarization.axis.range should be declared' assert test_chunk.position.axis.range is not None, \ 'chunk.position.axis.range should be declared' + assert test_chunk.custom.axis.range is not None, 'chunk.custom.axis.range should be declared' ex = _get_from_str_xml(EXPECTED_ENERGY_RANGE_BOUNDS_XML, ObservationReader()._get_spectral_wcs, 'energy') assert ex is not None, \ 'energy string from expected output should be declared' result = get_differences(ex, test_chunk.energy) - assert result is None + assert result is None, f'energy\n{result}' ex = _get_from_str_xml(EXPECTED_TIME_RANGE_BOUNDS_XML, ObservationReader()._get_temporal_wcs, @@ -1337,7 +1367,7 @@ def test_augment_artifact_bounds_range_from_blueprint(): assert ex is not None, \ 'time string from expected output should be declared' result = get_differences(ex, test_chunk.time) - assert result is None + assert result is None, f'time\n{result}' ex = _get_from_str_xml(EXPECTED_POL_RANGE_BOUNDS_XML, ObservationReader()._get_polarization_wcs, @@ -1345,7 +1375,7 @@ def test_augment_artifact_bounds_range_from_blueprint(): assert ex is not None, \ 'polarization string from expected output should be declared' result = get_differences(ex, test_chunk.polarization) - assert result is None + assert result is None, f'polarization\n{result}' ex = _get_from_str_xml(EXPECTED_POS_RANGE_BOUNDS_XML, ObservationReader()._get_spatial_wcs, @@ -1353,7 +1383,14 @@ def test_augment_artifact_bounds_range_from_blueprint(): assert ex is not None, \ 'position string from expected output should be declared' result = get_differences(ex, test_chunk.position) - assert result is None + assert result is None, f'position\n{result}' + + ex = _get_from_str_xml(EXPECTED_CUSTOM_RANGE_BOUNDS_XML, + ObservationReader()._get_custom_wcs, + 'custom') + assert ex is not None, 'custom string from expected output should be declared' + result = get_differences(ex, test_chunk.custom) + assert result is None, f'custom\n{result}' def test_visit_generic_parser(): From e82a5e6f79432e5c3d46cf948b3b54cbbcafda8d Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Sun, 5 Nov 2023 15:02:28 -0800 Subject: [PATCH 05/36] CADC-12805 - interim commit. --- caom2utils/caom2utils/caom2blueprint.py | 83 +++++---- .../N20030325S0098/N20030325S0098.blueprint | 63 +++++++ .../N20030325S0098.expected.xml | 122 ++++++++++++ .../N20030325S0098/N20030325S0098.fits.header | 176 ++++++++++++++++++ .../N20030325S0098/N20030325S0098.module | 12 ++ .../gemini/N20030325S0098/N20030325S0098.py | 12 ++ .../20220201T200117/20220201T200117.xml | 3 - .../20220201T200117/taos.blueprint | 1 - .../caom2utils/tests/test_collections.py | 2 + 9 files changed, 430 insertions(+), 44 deletions(-) create mode 100644 caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.blueprint create mode 100644 caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.expected.xml create mode 100644 caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.fits.header create mode 100644 caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.module create mode 100644 caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.py diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index c281c752..770b95ed 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -2275,8 +2275,7 @@ def augment_artifact(self, artifact, index): # 3 - Always try to fill the range metadata from the blueprint. if self.blueprint._pos_axes_configed: self._wcs_parser.augment_position(chunk) - if chunk.position is None or chunk.position.axis is None or chunk.position.axis.function is None: - self._try_position_with_blueprint(chunk, index) + self._try_position_with_blueprint(chunk, index) if self.blueprint._energy_axis_configed: self._wcs_parser.augment_energy(chunk) @@ -2609,6 +2608,7 @@ def _get_axis_wcs(self, label, wcs, index): aug_naxis = None aug_range = self._try_range(index, label) + aug_naxis_index = None if aug_axis is not None: if aug_range is None: if wcs is None or wcs.axis is None or wcs.axis.function is None: @@ -2622,14 +2622,16 @@ def _get_axis_wcs(self, label, wcs, index): if aug_length is not None and aug_delta is not None and aug_ref_coord is not None: aug_function = CoordFunction1D(aug_length, aug_delta, aug_ref_coord) aug_naxis = CoordAxis1D(aug_axis, aug_error, None, None, aug_function) + if aug_function is not None: + # if the WCS is described with a Function, cutouts can be supported, so specify an axis + aug_naxis_index = _to_int(self._get_from_list(f'Chunk.{label}Axis', index)) self.logger.debug(f'Creating function {label} CoordAxis1D for {self.uri} from blueprint') else: aug_naxis = CoordAxis1D(axis=aug_axis, error=aug_error, range=aug_range) - aug_naxis.function = None self.logger.debug(f'Creating range {label} CoordAxis1D for {self.uri} from blueprint') self.logger.debug(f'End {label} axis construction from blueprint.') - return aug_naxis + return aug_naxis, aug_naxis_index def _get_observable(self, current): """ @@ -2875,13 +2877,14 @@ def _try_custom_with_blueprint(self, chunk, index): :param index: The index in the blueprint for looking up plan information. """ self.logger.debug('Begin augmentation with blueprint for custom.') - aug_naxis = self._get_axis_wcs('custom', chunk.custom, index) + aug_naxis, aug_naxis_index = self._get_axis_wcs('custom', chunk.custom, index) if aug_naxis is None: self.logger.debug('No blueprint custom information.') else: # always create a new CustomWCS instance because there's no setter for 'axis' parameter chunk.custom = CustomWCS(aug_naxis) - chunk.custom_axis = _to_int(self._get_from_list('Chunk.customAxis', index)) + chunk.custom_axis = aug_naxis_index + self.logger.debug(f'Updating CustomWCS for {self.uri}.') self.logger.debug('End augmentation with blueprint for custom.') def _try_energy_with_blueprint(self, chunk, index): @@ -2895,29 +2898,32 @@ def _try_energy_with_blueprint(self, chunk, index): information. """ self.logger.debug('Begin augmentation with blueprint for energy.') - aug_axis = self._get_axis_wcs('energy', chunk.energy, index) + aug_axis, aug_naxis_index = self._get_axis_wcs('energy', chunk.energy, index) specsys = _to_str(self._get_from_list('Chunk.energy.specsys', index)) if aug_axis is None: self.logger.debug('No blueprint energy information.') else: - if not chunk.energy: - chunk.energy = SpectralWCS(aug_axis, specsys) - else: + if chunk.energy: chunk.energy.axis = aug_axis chunk.energy.specsys = specsys - chunk.energy_axis = _to_int(self._get_from_list('Chunk.energyAxis', index)) + else: + chunk.energy = SpectralWCS(aug_axis, specsys) + self.logger.debug(f'Creating SpectralWCS for {self.uri} from blueprint') + chunk.energy_axis = aug_naxis_index if chunk.energy: - chunk.energy.ssysobs = self._get_from_list('Chunk.energy.ssysobs', index) - chunk.energy.restfrq = self._get_from_list('Chunk.energy.restfrq', index) - chunk.energy.restwav = self._get_from_list('Chunk.energy.restwav', index) - chunk.energy.velosys = self._get_from_list('Chunk.energy.velosys', index) - chunk.energy.zsource = self._get_from_list('Chunk.energy.zsource', index) - chunk.energy.ssyssrc = self._get_from_list('Chunk.energy.ssyssrc', index) - chunk.energy.velang = self._get_from_list('Chunk.energy.velang', index) - chunk.energy.bandpass_name = self._get_from_list( 'Chunk.energy.bandpassName', index) + chunk.energy.ssysobs = self._get_from_list('Chunk.energy.ssysobs', index, chunk.energy.ssysobs) + chunk.energy.restfrq = self._get_from_list('Chunk.energy.restfrq', index, chunk.energy.restfrq) + chunk.energy.restwav = self._get_from_list('Chunk.energy.restwav', index, chunk.energy.restwav) + chunk.energy.velosys = self._get_from_list('Chunk.energy.velosys', index, chunk.energy.velosys) + chunk.energy.zsource = self._get_from_list('Chunk.energy.zsource', index, chunk.energy.zsource) + chunk.energy.ssyssrc = self._get_from_list('Chunk.energy.ssyssrc', index, chunk.energy.ssyssrc) + chunk.energy.velang = self._get_from_list('Chunk.energy.velang', index, chunk.energy.velang) + chunk.energy.bandpass_name = self._get_from_list( + 'Chunk.energy.bandpassName', index, chunk.energy.bandpass_name) chunk.energy.transition = self._get_energy_transition(chunk.energy.transition) - chunk.energy.resolving_power = _to_float(self._get_from_list('Chunk.energy.resolvingPower', index)) + chunk.energy.resolving_power = _to_float( + self._get_from_list('Chunk.energy.resolvingPower', index, chunk.energy.resolving_power)) self.logger.debug('End augmentation with blueprint for energy.') def _try_observable_with_blueprint(self, chunk, index): @@ -2956,14 +2962,14 @@ def _try_polarization_with_blueprint(self, chunk, index): """ self.logger.debug('Begin augmentation with blueprint for ' 'polarization.') - aug_axis = self._get_axis_wcs('polarization', chunk.polarization, index) + aug_axis, aug_naxis_index = self._get_axis_wcs('polarization', chunk.polarization, index) if aug_axis is not None: if chunk.polarization: chunk.polarization.axis = aug_axis else: chunk.polarization = PolarizationWCS(aug_axis) - chunk.polarization_axis = _to_int(self._get_from_list('Chunk.polarizationAxis', index)) - self.logger.debug(f'Creating PolarizationWCS for {self.uri} from blueprint') + self.logger.debug(f'Creating PolarizationWCS for {self.uri} from blueprint') + chunk.polarization_axis = aug_naxis_index self.logger.debug('End augmentation with blueprint for polarization.') @@ -3005,8 +3011,10 @@ def _try_position_with_blueprint(self, chunk, index): information. """ self.logger.debug('Begin augmentation with blueprint for position.') + aug_axis = None if (chunk.position is not None and chunk.position.axis is not None and chunk.position.axis.axis1 is not None and chunk.position.axis.axis2 is not None): + # preserve the values obtained from file data aug_x_axis = chunk.position.axis.axis1 aug_y_axis = chunk.position.axis.axis2 aug_x_error = chunk.position.axis.error1 @@ -3057,7 +3065,6 @@ def _try_position_with_blueprint(self, chunk, index): aug_cd22) self.logger.debug(f'Creating position CoordFunction2D for {self.uri}') - aug_axis = None if (aug_x_axis is not None and aug_y_axis is not None and aug_function is not None): aug_axis = CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, @@ -3074,14 +3081,14 @@ def _try_position_with_blueprint(self, chunk, index): chunk.position.axis = aug_axis else: chunk.position = SpatialWCS(aug_axis) + self.logger.debug(f'Creating SpatialWCS for {self.uri} from blueprint') if chunk.position: - chunk.position.coordsys = self._get_from_list( - 'Chunk.position.coordsys', index) + chunk.position.coordsys = self._get_from_list('Chunk.position.coordsys', index, chunk.position.coordsys) chunk.position.equinox = _to_float(self._get_from_list( - 'Chunk.position.equinox', index)) + 'Chunk.position.equinox', index, chunk.position.equinox)) chunk.position.resolution = self._get_from_list( - 'Chunk.position.resolution', index) + 'Chunk.position.resolution', index, chunk.position.resolution) self.logger.debug('End augmentation with blueprint for position.') def _try_range(self, index, lookup): @@ -3112,26 +3119,22 @@ def _try_time_with_blueprint(self, chunk, index): """ self.logger.debug('Begin augmentation with blueprint for temporal.') - aug_axis = self._get_axis_wcs('time', chunk.time, index) + aug_axis, aug_axis_index = self._get_axis_wcs('time', chunk.time, index) if aug_axis is not None: if chunk.time: chunk.time.axis = aug_axis else: chunk.time = TemporalWCS(aug_axis) - self.logger.debug('Creating TemporalWCS for {} from blueprint'. - format(self.uri)) - chunk.time_axis = _to_int(self._get_from_list('Chunk.timeAxis', index)) + self.logger.debug(f'Creating TemporalWCS for {self.uri} from blueprint') + chunk.time_axis = aug_axis_index if chunk.time: - chunk.time.exposure = _to_float( - self._get_from_list('Chunk.time.exposure', index)) + chunk.time.exposure = _to_float( self._get_from_list('Chunk.time.exposure', index, chunk.time.exposure)) chunk.time.resolution = _to_float( - self._get_from_list('Chunk.time.resolution', index)) - chunk.time.timesys = _to_str( - self._get_from_list('Chunk.time.timesys', index)) - chunk.time.trefpos = self._get_from_list('Chunk.time.trefpos', - index) - chunk.time.mjdref = self._get_from_list('Chunk.time.mjdref', index) + self._get_from_list('Chunk.time.resolution', index, chunk.time.resolution)) + chunk.time.timesys = _to_str(self._get_from_list('Chunk.time.timesys', index, chunk.time.timesys)) + chunk.time.trefpos = self._get_from_list('Chunk.time.trefpos', index, chunk.time.trefpos) + chunk.time.mjdref = self._get_from_list('Chunk.time.mjdref', index, chunk.time.mjdref) self.logger.debug('End augmentation with blueprint for temporal.') diff --git a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.blueprint b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.blueprint new file mode 100644 index 00000000..01a66474 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.blueprint @@ -0,0 +1,63 @@ +Observation.observationID = ['OBSID'], default = None +Observation.type = OBJECT +Observation.intent = science +Observation.metaRelease = 2003-03-25T06:01:59.700 +Observation.metaProducer = gemini2caom2/0.0.0 +Observation.algorithm.name = exposure +Observation.instrument.name = NIRI +Observation.instrument.keywords = ['INSTMODE'], default = None +Observation.telescope.name = ['TELESCOP'], default = None +Observation.telescope.geoLocationX = -5464284.386715498 +Observation.telescope.geoLocationY = -2493782.309107667 +Observation.telescope.geoLocationZ = 2150786.380689657 +Observation.environment.ambientTemp = ['TEMPERAT'], default = None +Plane.productID = N20030325S0098 +Plane.metaRelease = 2003-03-25T06:01:59.700 +Plane.dataRelease = 2004-09-25T00:00:00.000 +Plane.dataProductType = image +Plane.calibrationLevel = 1 +Plane.metaProducer = gemini2caom2/0.0.0 +Plane.provenance.name = Gemini Observatory Data +Plane.provenance.project = Gemini Archive +Plane.provenance.producer = ['ORIGIN'], default = Gemini Observatory +Plane.provenance.reference = http://archive.gemini.edu/searchform/GN-2003A-Q-51-2-004 +Plane.provenance.lastExecuted = ['DATE-FTS'], default = None +Artifact.productType = science +Artifact.releaseType = data +Artifact.uri = gemini:GEMINI/N20030325S0098.fits +Artifact.metaProducer = gemini2caom2/0.0.0 +Chunk = include +Chunk.metaProducer = gemini2caom2/0.0.0 +Chunk.position.coordsys = ['RADESYS'], default = None +Chunk.position.equinox = ['EPOCH'], default = 2000.0 +Chunk.position.axis.axis1.ctype = ['CTYPE1'], default = None +Chunk.position.axis.axis1.cunit = ['CUNIT1'], default = None +Chunk.position.axis.axis2.ctype = ['CTYPE2'], default = None +Chunk.position.axis.axis2.cunit = ['CUNIT2'], default = None +Chunk.position.axis.error1.syser = ['CSYER1'], default = None +Chunk.position.axis.error1.rnder = ['CRDER1'], default = None +Chunk.position.axis.error2.syser = ['CSYER2'], default = None +Chunk.position.axis.error2.rnder = ['CRDER2'], default = None +Chunk.position.axis.function.cd11 = ['CD1_1'], default = None +Chunk.position.axis.function.cd12 = ['CD1_2'], default = None +Chunk.position.axis.function.cd21 = ['CD2_1'], default = None +Chunk.position.axis.function.cd22 = ['CD2_2'], default = None +Chunk.position.axis.function.dimension.naxis1 = ['NAXIS1'], default = None +Chunk.position.axis.function.dimension.naxis2 = ['NAXIS2'], default = None +Chunk.position.axis.function.refCoord.coord1.pix = ['CRPIX1'], default = None +Chunk.position.axis.function.refCoord.coord1.val = ['CRVAL1'], default = None +Chunk.position.axis.function.refCoord.coord2.pix = ['CRPIX2'], default = None +Chunk.position.axis.function.refCoord.coord2.val = ['CRVAL2'], default = None +Chunk.time.exposure = get_exposure(uri) +Chunk.time.resolution = get_exposure(uri) +Chunk.time.timesys = ['TIMESYS'], default = None +Chunk.time.trefpos = ['TREFPOS'], default = None +Chunk.time.mjdref = ['MJDREF'], default = None +Chunk.time.axis.axis.ctype = TIME +Chunk.time.axis.axis.cunit = d +Chunk.time.axis.error.syser = 1e-07 +Chunk.time.axis.error.rnder = 1e-07 +Chunk.time.axis.function.naxis = 1 +Chunk.time.axis.function.delta = get_time_delta() +Chunk.time.axis.function.refCoord.pix = 0.5 +Chunk.time.axis.function.refCoord.val = 52723.25138541667 diff --git a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.expected.xml b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.expected.xml new file mode 100644 index 00000000..d952d774 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.expected.xml @@ -0,0 +1,122 @@ + + + GEMINI + GN-2003A-Q-51-2-004 + 2003-03-25T06:01:59.700 + + exposure + + OBJECT + science + + B2 0902+34--NE + + + Gemini-North + -5464284.386715498 + -2493782.3091076654 + 2150786.380689657 + + + NIRI + + + + GN-2003A-Q-51-2-004 + 2003-03-25T06:01:59.700 + 2004-09-25T00:00:00.000 + image + 1 + + Gemini Observatory Data + Gemini Archive + NOAO-IRAF FITS Image Kernel July 1999 + http://archive.gemini.edu/searchform/GN-2003A-Q-51-2-004 + + + + ad:GEMINI/N20030325S0098.fits + science + data + application/fits + 4210560 + md5:e385beb60c74bccf297813c26e35fa9b + + + 0 + + + + 1 + + + 2 + 1 + 2 + 4 + + + + RA---TAN + deg + + + DEC--TAN + deg + + + + 1024 + 1024 + + + + 545.1860865851 + 136.37590805622 + + + 489.04922960385 + 34.130334909061 + + + -3.2524270027565e-55 + 1.2869585606175e-07 + -8.9987106577858e-09 + 3.2335265472292e-05 + + + FK5 + 2000.0 + + + + + TIME + d + + + 1e-07 + 1e-07 + + + 1 + 0.000520868055555555 + + 0.5 + 52723.25138541667 + + + + UTC + 45.003 + 45.003 + + + + + + + + + + diff --git a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.fits.header b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.fits.header new file mode 100644 index 00000000..351b2fa5 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.fits.header @@ -0,0 +1,176 @@ +Filename: N20030325S0098.fits.bz2 + +AstroData Tags: {'UNPREPARED', 'SIDEREAL', 'RAW', 'NIRI', 'IMAGE', 'GEMINI', 'NORTH'} + + +--- PHU --- +SIMPLE = T / Fits standard +BITPIX = 16 / Bits per pixel +NAXIS = 0 / Number of axes +EXTEND = T / FITS dataset may contain extensions +ORIGIN = 'NOAO-IRAF FITS Image Kernel July 1999' / FITS file originator +DATE = '2003-04-07T20:23:14' / Date FITS file was generated +IRAF-TLM= '13:25:08 (04/04/2003)' / Time of last modification +COMMENT FITS (Flexible Image Transport System) format defined in Astronomy and +COMMENT Astrophysics Supplement Series v44/p363, v44/p371, v73/p359, v73/p365. +COMMENT Contact the NASA Science Office of Standards and Technology for the +COMMENT FITS Definition document #100 and other FITS information. +INSTRUME= 'NIRI ' / Instrument used to acquire data. +OBJECT = 'B2 0902+34--NE' / Object Name +OBSTYPE = 'OBJECT ' / Observation type +GEMPRGID= 'GN-2003A-Q-51' / Gemini Science Program ID +OBSID = 'GN-2003A-Q-51-2' / Observation ID / Data label +DATALAB = 'GN-2003A-Q-51-2-004' / Datalabel +OBSERVER= 'F.Rigaut' / Observer +OBSERVAT= 'Gemini-North' / Observatory +TELESCOP= 'Gemini-North' / Telescope +PARALLAX= 0. / Parallax of Target +RADVEL = 0. / Heliocentric Radial Velocity +EPOCH = 2000. / Epoch for Target coordinates +EQUINOX = 2000. / Equinox for Target coordinates +TRKEQUIN= 2000. / Tracking equinox +SSA = 'B.Walls ' / SSA +RA = 136.37583333 / RA of Target +DEC = 34.12916667 / Declination of Target +ELEVATIO= 67.8446069444445 / Current Elevation +AZIMUTH = 45.988375 / Current Azimuth +CRPA = -144.412040662382 / Current Cass Rotator Position Angle +HA = '-01:16:26.35' / Telescope hour angle +LT = '20:01:59.2' / Local time at start of observation +TRKFRAME= ' ' / Tracking co-ordinate +DECTRACK= 0. / Differential tracking rate Dec +TRKEPOCH= 0. / Differential tracking reference epoch +RATRACK = 0. / Differential tracking rate RA +FRAME = 'FK5 ' / Target coordinate system +PMDEC = 0. / Proper Motion in Declination +PMRA = 0. / Proper Motion in RA +WAVELENG= 21200. / Effective Target Wavelength +RAWIQ = '70-percentile' / Raw Image Quality +RAWCC = '70-percentile' / Raw Cloud Cover +RAWWV = '80-percentile' / Raw Water Vapour/Transparency +RAWBG = '80-percentile' / Raw Background +RAWPIREQ= 'YES ' / PI Requirements Met +RAWGEMQA= 'USABLE ' / Gemini Quality Assessment +CGUIDMOD= 'Basic ' / Driving mode for carousel +UT = '06:01:59.7' / Beginning of Observation (UT) +M2BAFFLE= 'NEAR IR ' / Position of M2 baffle +M2CENBAF= 'CLOSED ' / Position of M2 central hole baffle +ST = '07:49:16.4' / Sidereal time at the start of the exposure +XOFFSET = -3.999945169 / Telescope offset in x in arcsec +YOFFSET = -0.0209438553276 / Telescope offset in y in arcsec +RAOFFSET= -7.65706942296E-14 / Telescope offset in RA in arcsec +DECOFFSE= -4.00000000001 / Telescope offset in DEC in arcsec +PA = 0. / Sky Position Angle at start of exposure +SFRT2 = -1.391 / Science fold rotation angle (degrees) +SFTILT = 45.03 / Science fold tilt angle (degrees) +SFLINEAR= 8. / Science fold linear position (mm) +P2ARA = 136.39191667 / RA of PWFS2 guide star +P2ARV = 0. / PWFS2 Heliocentric Radial Velocity +P2AWAVEL= 5000. / PWFS2 Effective Target Wavelength +P2ADEC = 34.01855556 / Declination of PWFS2 guide star +P2AEPOCH= 2000. / Epoch for PWFS2 guide star coordinates +P2AEQUIN= 2000. / Equinox for PWFS2 guide star coordinates +P2AFRAME= 'FK5 ' / PWFS2 Target co-ordinate system +P2AOBJEC= 'GSC249500968' / Object Name for PWFS 2, Chop A +P2APMDEC= 0. / PWFS2 Proper Motion in Declination +P2APMRA = 0. / PWFS2 Proper Motion in RA +P2APARAL= 0. / PWFS2 Parallax of Target +ARRAYID = '47911 ' / Array identification +ARRAYTYP= 'ALADDIN_II' / Array type +CAMERA = 'f6 ' / NIRI camera (f6|f14|f32) +COADDS = 3 / Number of coadds summed +DATE-OBS= '2003-03-25' / Observation date (UT) +EXPTIME = 15.001 / Exposure time (s) for each frame +FILTER1 = 'Kprime_G0206' / Filter 1 name +FILTER2 = 'open ' / Filter 2 name +FILTER3 = 'pupil38_G5207' / Pupil mask, grism, or filter name +FOCUSNAM= 'INDEF ' / Focus position name +FOCUSPOS= -3.35 / Focus stage position in mm +FPMASK = 'f6-cam_G5208' / Focal plane mask name +BEAMSPLT= 'f6 ' / Beam splitter position name +WINDCOVR= 'open ' / Window cover position name (open|closed) +FRMSPCYC= 2 / Frames per cycle,1=sep,2=stare +HDRTIMIN= 'INDEF ' / Header info updated before, after or both +INPORT = 3 / Number of ISS port where NIRI was located +LNRS = 1 / Number of non-destructive read pairs +MODE = 'STARE ' / Stare or sep +NDAVGS = 16 / Number of digital averages +PVIEW = 'out ' / Pupil viewer position name (in|out) +TDETABS = 33.086 / Science detector temperature (K) +TIME = '06:02:00.2' / Beginning of Observation (UT) +TIME-OBS= '06:02:00.2' / Beginning of Observation (UT) +TMOUNT = 22.978 / Mount temperature (K) +UCODENAM= 'gnAII_1024xSU01_4u' / Array microcode file name +UCODETYP= 'RDD ' / Array microcode version +VDDCL1 = -1.157 / VDDCL hi detector clock voltage +VDDCL2 = -3.496 / VDDCL lo detector clock voltage +VDDUC = -3.491 / VDDUC detector bias voltage +VDET = -2.907 / VDETCOM detector bias voltage +VGGCL1 = -4.871 / VGGCL lo detector clock voltage +VGGCL2 = -2.704 / VGGCL hi detector clock voltage +VSET = -1.871 / Vset detector bias voltage +A_TDETAB= 33.086 / Science detector temperature (K) (post exposure +A_TMOUNT= 22.978 / Mount temperature (K) (post exposure) +A_VDDCL1= -1.157 / VDDCL hi detector clock voltage (post exposure) +A_VDDCL2= -3.496 / VDDCL lo detector clock voltage (post exposure) +A_VDDUC = -3.491 / VDDUC detector bias voltage (post exposure) +A_VDET = -2.907 / VDETCOM detector bias voltage (post exposure) +A_VGGCL1= -4.871 / VGGCL lo detector clock voltage (post exposure) +A_VGGCL2= -2.704 / VGGCL hi detector clock voltage (post exposure) +A_VSET = -1.871 / Vset detector bias voltage (post exposure) +UTEND = '06:02:49.2' / End of Observation (UT) +OBSEPOCH= 2003.22724547 / Epoch at start of exposure +AIRMASS = 1.079 / Mean airmass for the observation +AMSTART = 1.08 / Airmass at start of exposure +AMEND = 1.079 / Airmass at end of exposure +CTYPE1 = 'RA---TAN' / the coordinate type for the first axis +CRPIX1 = 545.186086585101 / x-coordinate of reference pixel +CRVAL1 = 136.375908056217 / first axis value at ref pixel +CTYPE2 = 'DEC--TAN' / the coordinate type for the second axis +CRPIX2 = 489.049229603846 / y-coordinate of reference pixel +CRVAL2 = 34.1303349090605 / second axis value at ref pixel +CD1_1 = -3.25242700275654E-05 / partial of first axis coord w.r.t. x +CD1_2 = 1.28695856061755E-07 / partial of first axis coord w.r.t. y +CD2_1 = -8.99871065778577E-09 / partial of second axis coord w.r.t. x +CD2_2 = 3.23352654722922E-05 / partial of second axis coord w.r.t. y +MJD_OBS = 52723.2526998586 / Mean Julian day of observation +INTEGRIT= 'OK ' / Observation status (OK|STOP|ABORT) +FRAME = 'FK5 ' / Target coordinate system +RELEASE = '2004-09-25' / End of proprietary period YYYY-MM-DD +ORIGNAME= 'N20030325S0098.fits' / Original filename prior to processing +HISTORY Corrected metadata: automated fixes from PyFITS + +--- HDU 0 --- +XTENSION= 'IMAGE ' / Image extension +BITPIX = 32 / Bits per pixel +NAXIS = 2 / Number of axes +NAXIS1 = 1024 / Axis length +NAXIS2 = 1024 / Axis length +PCOUNT = 0 / No 'random' parameters +GCOUNT = 1 / Only one group +EXTVER = 1 / Added by AstroData +ORIGIN = 'NOAO-IRAF FITS Image Kernel July 1999' / FITS file originator +INHERIT = F / Inherits global header +DATE = '2003-04-04T23:25:10' / Date FITS file was generated +IRAF-TLM= '13:25:08 (04/04/2003)' / Time of last modification +OBJECT = 'B2 0902+34--NE' / Name of the object observed +CTYPE1 = 'RA---TAN' / the coordinate type for the first axis +CRPIX1 = 545.1860865851 / x-coordinate of reference pixel +CRVAL1 = 136.37590805622 / first axis value at ref pixel +CTYPE2 = 'DEC--TAN' / the coordinate type for the second axis +CRPIX2 = 489.04922960385 / y-coordinate of reference pixel +CRVAL2 = 34.130334909061 / second axis value at ref pixel +CD1_1 = -3.2524270027565E-55 / partial of first axis coord w.r.t. x +CD1_2 = 1.2869585606175E-7 / partial of first axis coord w.r.t. y +CD2_1 = -8.9987106577858E-9 / partial of second axis coord w.r.t. x +CD2_2 = 3.2335265472292E-5 / partial of second axis coord w.r.t. y +MJD_OBS = 52723.252699859 / Mean Julian day of observation +DROINUM = 0 / Which Region of Interest +LOWROW = 0 / Start row of region of interest +LOWCOL = 0 / Start col of region of interest +HIROW = 1023 / End row of region of interest +HICOL = 1023 / End col of region of interest +FRMNAME = 'N20030325S0099:0' / Frame name +FRAMEID = '0 ' / Frame ID +DATATYP = ' ' / Data type +EXTNAME = 'SCI ' / Added by AstroData diff --git a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.module b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.module new file mode 100644 index 00000000..dbca3713 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.module @@ -0,0 +1,12 @@ +from caom2pipe.manage_composable import convert_to_days, make_datetime, to_float + +def get_exposure(uri): + return 45.003 + + +def get_time_delta(uri): + result = None + exptime = get_exposure(0) + if exptime is not None: + result = convert_to_days(to_float(exptime)) + return result diff --git a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.py b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.py new file mode 100644 index 00000000..dbca3713 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.py @@ -0,0 +1,12 @@ +from caom2pipe.manage_composable import convert_to_days, make_datetime, to_float + +def get_exposure(uri): + return 45.003 + + +def get_time_delta(uri): + result = None + exptime = get_exposure(0) + if exptime is not None: + result = convert_to_days(to_float(exptime)) + return result diff --git a/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/20220201T200117.xml b/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/20220201T200117.xml index c1cb7bed..895e8b5f 100644 --- a/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/20220201T200117.xml +++ b/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/20220201T200117.xml @@ -40,7 +40,6 @@ 1 2 - 3 @@ -102,7 +101,6 @@ 1 2 - 3 @@ -164,7 +162,6 @@ 1 2 - 3 diff --git a/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/taos.blueprint b/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/taos.blueprint index 98f209a7..ee38e282 100644 --- a/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/taos.blueprint +++ b/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/taos.blueprint @@ -29,7 +29,6 @@ Chunk.position.axis.error2.syser = None Chunk.position.axis.error2.rnder = None Chunk.position.coordsys = None -Chunk.timeAxis = 3 Chunk.time.axis.axis.ctype = TIME Chunk.time.axis.axis.cunit = s Chunk.time.axis.range.start.pix = 0 diff --git a/caom2utils/caom2utils/tests/test_collections.py b/caom2utils/caom2utils/tests/test_collections.py index 916d5b5b..692382f6 100644 --- a/caom2utils/caom2utils/tests/test_collections.py +++ b/caom2utils/caom2utils/tests/test_collections.py @@ -251,6 +251,8 @@ def _get_cardinality(directory): return '--lineage star04239531/cadc:TAOSII/taos2_20220201T201317Z_star04239531.h5' elif 'brite' in directory: return '--lineage HD36486_65-Ori-VIII-2021_BAb_1_5_A/ad:BRITE-Constellation/HD36486.orig' + elif 'gemini' in directory: + return '--lineage GN-2003A-Q-51-2-004/ad:GEMINI/N20030325S0098.fits' else: return '' From ad79c3a2108748c7686bd7662fdcc3a944358625 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Mon, 6 Nov 2023 10:34:09 -0800 Subject: [PATCH 06/36] CADC-12805 - remove dependency on deprecated imp package. --- caom2repo/caom2repo/core.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/caom2repo/caom2repo/core.py b/caom2repo/caom2repo/core.py index c7cec5e3..031d8980 100755 --- a/caom2repo/caom2repo/core.py +++ b/caom2repo/caom2repo/core.py @@ -67,7 +67,8 @@ # *********************************************************************** # import argparse -import imp +import importlib.util +import importlib.machinery import logging import multiprocessing from multiprocessing import Pool @@ -440,9 +441,14 @@ def _load_plugin_class(self, filepath): mod_name, file_ext = os.path.splitext(os.path.split(filepath)[-1]) if file_ext.lower() == '.pyc': - py_mod = imp.load_compiled(mod_name, filepath) + loader = importlib.machinery.SourcelessFileLoader(mod_name, filepath) else: - py_mod = imp.load_source(mod_name, filepath) + loader = importlib.machinery.SourceFileLoader(mod_name, filepath) + spec = importlib.util.spec_from_file_location(mod_name, filepath, loader=loader) + py_mod = importlib.util.module_from_spec(spec) + # cache the module + sys.modules[py_mod.__name__] = py_mod + spec.loader.exec_module(py_mod) if hasattr(py_mod, expected_class): self.plugin = getattr(py_mod, expected_class)() From d8140fe2f5da6769189f6a9a019d1dc0808d9a33 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Mon, 6 Nov 2023 13:09:18 -0800 Subject: [PATCH 07/36] CADC-12805 - remove dependency that is not installed in test environment. --- .../tests/data/gemini/N20030325S0098/N20030325S0098.module | 6 ++---- .../tests/data/gemini/N20030325S0098/N20030325S0098.py | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.module b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.module index dbca3713..1d4382b5 100644 --- a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.module +++ b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.module @@ -1,5 +1,3 @@ -from caom2pipe.manage_composable import convert_to_days, make_datetime, to_float - def get_exposure(uri): return 45.003 @@ -7,6 +5,6 @@ def get_exposure(uri): def get_time_delta(uri): result = None exptime = get_exposure(0) - if exptime is not None: - result = convert_to_days(to_float(exptime)) + if exptime: + result = exptime / ( 24.0 * 3600.0 ) return result diff --git a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.py b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.py index dbca3713..1d4382b5 100644 --- a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.py +++ b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.py @@ -1,5 +1,3 @@ -from caom2pipe.manage_composable import convert_to_days, make_datetime, to_float - def get_exposure(uri): return 45.003 @@ -7,6 +5,6 @@ def get_exposure(uri): def get_time_delta(uri): result = None exptime = get_exposure(0) - if exptime is not None: - result = convert_to_days(to_float(exptime)) + if exptime: + result = exptime / ( 24.0 * 3600.0 ) return result From cbcbb832dc809dd06b9e25f1738b667ec015fc93 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Tue, 7 Nov 2023 14:19:21 -0800 Subject: [PATCH 08/36] CADC-12805 - flake8. --- caom2utils/caom2utils/caom2blueprint.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index 770b95ed..31afcc99 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -3013,7 +3013,7 @@ def _try_position_with_blueprint(self, chunk, index): self.logger.debug('Begin augmentation with blueprint for position.') aug_axis = None if (chunk.position is not None and chunk.position.axis is not None and chunk.position.axis.axis1 is not None - and chunk.position.axis.axis2 is not None): + and chunk.position.axis.axis2 is not None): # preserve the values obtained from file data aug_x_axis = chunk.position.axis.axis1 aug_y_axis = chunk.position.axis.axis2 @@ -3062,13 +3062,13 @@ def _try_position_with_blueprint(self, chunk, index): aug_cd11 is not None and aug_cd12 is not None and aug_cd21 is not None and aug_cd22 is not None): aug_function = CoordFunction2D(aug_dimension, aug_ref_coord, aug_cd11, aug_cd12, aug_cd21, - aug_cd22) + aug_cd22) self.logger.debug(f'Creating position CoordFunction2D for {self.uri}') if (aug_x_axis is not None and aug_y_axis is not None and aug_function is not None): aug_axis = CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, - aug_y_error, None, None, aug_function) + aug_y_error, None, None, aug_function) self.logger.debug(f'Creating position CoordAxis2D for {self.uri}') chunk.position_axis_1 = _to_int(self._get_from_list('Chunk.positionAxis1', index)) @@ -3129,7 +3129,7 @@ def _try_time_with_blueprint(self, chunk, index): chunk.time_axis = aug_axis_index if chunk.time: - chunk.time.exposure = _to_float( self._get_from_list('Chunk.time.exposure', index, chunk.time.exposure)) + chunk.time.exposure = _to_float(self._get_from_list('Chunk.time.exposure', index, chunk.time.exposure)) chunk.time.resolution = _to_float( self._get_from_list('Chunk.time.resolution', index, chunk.time.resolution)) chunk.time.timesys = _to_str(self._get_from_list('Chunk.time.timesys', index, chunk.time.timesys)) @@ -4583,7 +4583,8 @@ def _finish_chunk_position(self, chunk): if chunk.position.resolution is None: try: # JJK 30-01-23 - # In a spatial data chunk the resolution is 2 times the pixel size. We can get the pixel size from the wcs + # In a spatial data chunk the resolution is 2 times the pixel size. We can get the pixel size from + # the wcs temp = utils.proj_plane_pixel_scales(self._wcs) chunk.position.resolution = temp[0] except SingularMatrixError as e: @@ -4650,7 +4651,6 @@ def _finish_time(self): self._wcs.wcs.trefpos = x x = self._blueprint._get('Chunk.time.mjdref', self._extension) if x and not ObsBlueprint.needs_lookup(x): -# logging.error(f'{x} {self._wcs.wcs.mjdref}') self._wcs.wcs.mjdref = [x, x] From 5fcbb7ece1786fae9f05c4bfdd47cd04aedcd7d4 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Tue, 7 Nov 2023 14:27:07 -0800 Subject: [PATCH 09/36] CADC-12805 - flake8. --- caom2utils/caom2utils/tests/test_fits2caom2.py | 1 + 1 file changed, 1 insertion(+) diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index 86355a95..d494e385 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -1169,6 +1169,7 @@ def test_visit(): _visit(test_class_plugin_module, test_fitsparser, test_obs, visit_local=None, **kwargs) + EXPECTED_CUSTOM_RANGE_BOUNDS_XML = ''' From 5ac4790c66d73541feb133059946dcc4fa5c8314 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Thu, 16 Nov 2023 15:27:56 -0800 Subject: [PATCH 10/36] CADC-12805 - code review comments. --- caom2utils/caom2utils/tests/test_collections.py | 1 - 1 file changed, 1 deletion(-) diff --git a/caom2utils/caom2utils/tests/test_collections.py b/caom2utils/caom2utils/tests/test_collections.py index 692382f6..7555c9b5 100644 --- a/caom2utils/caom2utils/tests/test_collections.py +++ b/caom2utils/caom2utils/tests/test_collections.py @@ -178,7 +178,6 @@ def _header(fqn): # during operation, want to use astropy on FITS files # but during testing want to use headers and built-in Python file # operations - from urllib.parse import urlparse file_uri = urlparse(fqn) try: fits_header = open(file_uri.path).read() From 7809eb1ca797b677aefb4515e74900427626da85 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Thu, 16 Nov 2023 15:49:32 -0800 Subject: [PATCH 11/36] CADC-12805 - bump versions. --- caom2repo/setup.cfg | 2 +- caom2utils/setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/caom2repo/setup.cfg b/caom2repo/setup.cfg index bef1d626..c2596500 100644 --- a/caom2repo/setup.cfg +++ b/caom2repo/setup.cfg @@ -32,7 +32,7 @@ url = http://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/caom2 edit_on_github = False github_project = opencadc/caom2tools # version should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386) -version = 1.6 +version = 1.6.1 [options] install_requires = diff --git a/caom2utils/setup.cfg b/caom2utils/setup.cfg index 233ccd50..fc6813bb 100644 --- a/caom2utils/setup.cfg +++ b/caom2utils/setup.cfg @@ -33,7 +33,7 @@ url = https://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/caom2 edit_on_github = False github_project = opencadc/caom2tools # version should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386) -version = 1.7.1 +version = 1.7.2 [options] install_requires = From 2dafefcc9abf99eacf2de4f0ed3e6bb388e31ead Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Thu, 16 Nov 2023 16:37:17 -0800 Subject: [PATCH 12/36] CADC-12805 - remove 'import imp' from setup.py --- caom2/setup.py | 9 ++------- caom2repo/setup.py | 8 +++----- caom2utils/setup.py | 8 +++----- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/caom2/setup.py b/caom2/setup.py index e3e4f467..cd89a16e 100755 --- a/caom2/setup.py +++ b/caom2/setup.py @@ -4,16 +4,11 @@ import glob import os import sys -import imp from setuptools.command.test import test as TestCommand from setuptools import find_packages from setuptools import setup -import distutils.cmd -import distutils.log -import subprocess - # read the README.rst file and return as string. def readme(): with open('README.rst') as r_obj: @@ -41,7 +36,7 @@ def readme(): # generate the version file with open(os.path.join(PACKAGENAME, 'version.py'), 'w') as f: - f.write('version = \'{}\'\n'.format(VERSION)) + f.write('version = \'{}\'\n'.format(VERSION)) # Treat everything in scripts except README.rst as a script to be installed scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) @@ -71,7 +66,7 @@ def run_tests(self): import pytest err_no = pytest.main(self.pytest_args) sys.exit(err_no) - + # Note that requires and provides should not be included in the call to # ``setup``, since these are now deprecated. See this link for more details: # https://groups.google.com/forum/#!topic/astropy-dev/urYO8ckB2uM diff --git a/caom2repo/setup.py b/caom2repo/setup.py index 1a56f1f2..6a81a311 100755 --- a/caom2repo/setup.py +++ b/caom2repo/setup.py @@ -4,7 +4,6 @@ import glob import os import sys -import imp from setuptools.command.test import test as TestCommand from setuptools import find_packages @@ -12,7 +11,6 @@ import distutils.cmd import distutils.log -import subprocess # read the README.rst file and return as string. def readme(): @@ -42,7 +40,7 @@ def readme(): # generate the version file with open(os.path.join(PACKAGENAME, 'version.py'), 'w') as f: - f.write('version = \'{}\'\n'.format(VERSION)) + f.write('version = \'{}\'\n'.format(VERSION)) # Treat everything in scripts except README.rst as a script to be installed scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) @@ -72,13 +70,13 @@ def run_tests(self): import pytest err_no = pytest.main(self.pytest_args) sys.exit(err_no) - + class IntTestCommand(distutils.cmd.Command): """A custom command to run integration tests.""" description = 'Integration tests for caom2repo' user_options = [] - + def initialize_options(self): """Set default values for options.""" # Each user option must be listed here with their default value. diff --git a/caom2utils/setup.py b/caom2utils/setup.py index 2553885e..e44aeaf1 100755 --- a/caom2utils/setup.py +++ b/caom2utils/setup.py @@ -4,7 +4,6 @@ import glob import os import sys -import imp from setuptools.command.test import test as TestCommand from setuptools import find_packages @@ -12,7 +11,6 @@ import distutils.cmd import distutils.log -import subprocess # read the README.rst file and return as string. def readme(): @@ -42,7 +40,7 @@ def readme(): # generate the version file with open(os.path.join(PACKAGENAME, 'version.py'), 'w') as f: - f.write('version = \'{}\'\n'.format(VERSION)) + f.write('version = \'{}\'\n'.format(VERSION)) # Treat everything in scripts except README.rst as a script to be installed scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) @@ -72,13 +70,13 @@ def run_tests(self): import pytest err_no = pytest.main(self.pytest_args) sys.exit(err_no) - + class IntTestCommand(distutils.cmd.Command): """A custom command to run integration tests.""" description = 'Integration tests' user_options = [] - + def initialize_options(self): """Set default values for options.""" # Each user option must be listed here with their default value. From 4d6886e38451bff6638dd70152870857cb5f4e4a Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Mon, 20 Nov 2023 16:31:43 -0800 Subject: [PATCH 13/36] CADC-12805 - update the default resource id. --- caom2utils/caom2utils/caom2blueprint.py | 2 +- caom2utils/caom2utils/tests/data/help.txt | 2 +- caom2utils/caom2utils/tests/test_fits2caom2.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index 31afcc99..d5cf13bb 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -5189,7 +5189,7 @@ def _get_common_arg_parser(): fits2caom2 and caom2gen :return: args parser """ - resource_id = "ivo://cadc.nrc.ca/fits2caom2" + resource_id = "ivo://cadc.nrc.ca/global/raven" parser = util.get_base_parser(subparsers=False, version=version.version, default_resource_id=resource_id) diff --git a/caom2utils/caom2utils/tests/data/help.txt b/caom2utils/caom2utils/tests/data/help.txt index 129f2cb0..f89c8f5a 100644 --- a/caom2utils/caom2utils/tests/data/help.txt +++ b/caom2utils/caom2utils/tests/data/help.txt @@ -40,7 +40,7 @@ optional arguments: -q, --quiet run quietly --resource-id RESOURCE_ID resource identifier (default - ivo://cadc.nrc.ca/fits2caom2) + ivo://cadc.nrc.ca/global/raven) -u, --user USER name of user to authenticate. Note: application prompts for the corresponding password! -v, --verbose verbose messages diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index d494e385..9eaacbc0 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -1118,7 +1118,7 @@ def _get_obs(from_xml_string): thumbnail data text/plain - 2484 + 2486 md5:e6c08f3b8309f05a5a3330e27e3b44eb file://""" + text_file + """ From cb48cd52d7b096069013ddcdcb104ce1e23eb1e8 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Wed, 3 Jan 2024 10:31:45 -0800 Subject: [PATCH 14/36] CADC-12858 - comma in keywords for splitting --- caom2utils/caom2utils/caom2blueprint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index d5cf13bb..b2d73e9a 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -3175,7 +3175,7 @@ def _add_keywords(keywords, current, to_set): if isinstance(keywords, set): to_set.keywords.update(keywords) else: - for k in keywords.split(): + for k in keywords.split(','): to_set.keywords.add(k) else: if current is not None: From bad24bf059ec38b15336d14ee4aa05c810f9d898 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Sat, 6 Jan 2024 11:32:51 -0800 Subject: [PATCH 15/36] CADC-12858 - add WcsParser handling for non-file based content. --- caom2utils/caom2utils/caom2blueprint.py | 843 ++++++++++++++++-------- 1 file changed, 573 insertions(+), 270 deletions(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index b2d73e9a..bdf548a7 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -66,6 +66,31 @@ # *********************************************************************** # +""" +There is a dual inheritance hierarchy in this module: + + + BlueprintParser + ^ + | + ContentParser <>--------------------------- WcsParser + ^ ^ + | | + --------------------- ------------------------ + | | | | + | Hdf5Parser <>----- Hdf5WcsParser | + | | + FitsParser <>--------------------------------------------- FitsWcsParser + +The *WcsParser hierarchy uses astropy.wcs for WCS construction and correctness when building CAOM records. + +When building CAOM records, use: +- the BlueprintParser for records with no WCS information, +- the ContentParser for populating records with WCS information from database queries, etc, +- the FitsParser for populating records with WCS information from FITS files, and +- the Hdf5Parser for populating with WCS information from HDF5 files. + +""" import argparse from datetime import datetime from logging.handlers import TimedRotatingFileHandler @@ -2229,11 +2254,12 @@ class ContentParser(BlueprintParser): def __init__(self, obs_blueprint=None, uri=None): super().__init__(obs_blueprint, uri) - self._wcs_parser = WcsParser() + self._wcs_parser = WcsParser(obs_blueprint, extension=0) def _get_chunk_naxis(self, chunk, index): chunk.naxis = self._get_from_list( - 'Chunk.naxis', index, self._wcs_parser.wcs.wcs.naxis) + 'Chunk.naxis', index, self.blueprint.get_configed_axes_count()) + # 'Chunk.naxis', index, self._wcs_parser.wcs.wcs.naxis) def augment_artifact(self, artifact, index): """ @@ -3161,6 +3187,14 @@ def _two_param_constructor(self, lookup1, lookup2, index, to_type, ctor): new_object = ctor(param1, param2) return new_object + # TODO - is this the right implementation? + def ignore_chunks(self, artifact, index=0): + result = True + if self.blueprint.has_chunk(index): + artifact.parts.add(Part(str(index))) + result = False + return result + @staticmethod def _add_keywords(keywords, current, to_set): """ @@ -3890,9 +3924,179 @@ class WcsParser: POLARIZATION_AXIS = 'polarization' TIME_AXIS = 'time' - def __init__(self): + def __init__(self, blueprint, extension): self.logger = logging.getLogger(self.__class__.__name__) + self._wcs = None self.wcs = None + self._blueprint = blueprint + self._axes = { + 'ra': [0, False], + 'dec': [0, False], + 'time': [0, False], + 'energy': [0, False], + 'polarization': [0, False], + 'observable': [0, False], + 'custom': [0, False], + } + self._extension = extension + self._set_wcs() + + # @property + # def wcs(self): + # return self._wcs.wcs + # # return self._wcs + + # @wcs.setter + # def wcs(self, value): + # self._wcs = value + + def _assign_cd(self, key, cd, count): + x = self._blueprint._get(key, self._extension) + if x is not None: + if ObsBlueprint.needs_lookup(x): + cd[count][count] = 1.0 + else: + cd[count][count] = x + + def assign_sanitize(self, assignee, index, key, sanitize=True): + """ + Do not want to blindly assign None to astropy.wcs attributes, so + use this method for conditional assignment. + + The current implementation is that ff there is a legitimate need to + assign None to a value, either use 'set' in the Hdf5ObsBlueprint, and + specifically assign None, or execute a function to set it to None + conditionally. There will be no support for a Default value of None + with HDF5 files. + """ + x = self._blueprint._get(key, self._extension) + if sanitize: + x = self._sanitize(x) + if x is not None and not ObsBlueprint.needs_lookup(x): + assignee[index] = x + + def _set_wcs(self): + self._wcs = WCS(naxis=self._blueprint.get_configed_axes_count()) + self.wcs = self._wcs.wcs + array_shape = [0] * self._blueprint.get_configed_axes_count() + crder = [0] * self._blueprint.get_configed_axes_count() + crpix = [0] * self._blueprint.get_configed_axes_count() + crval = [0] * self._blueprint.get_configed_axes_count() + csyer = [0] * self._blueprint.get_configed_axes_count() + ctype = [0] * self._blueprint.get_configed_axes_count() + cunit = [0] * self._blueprint.get_configed_axes_count() + temp = [0] * self._blueprint.get_configed_axes_count() + cd = [temp.copy() for _ in range(self._blueprint.get_configed_axes_count())] + count = 0 + if self._blueprint._pos_axes_configed: + self._axes['ra'][1] = True + self._axes['dec'][1] = True + self._axes['ra'][0] = count + self._axes['dec'][0] = count + 1 + self.assign_sanitize(ctype, count, 'Chunk.position.axis.axis1.ctype') + self.assign_sanitize(ctype, count + 1, 'Chunk.position.axis.axis2.ctype') + self.assign_sanitize(cunit, count, 'Chunk.position.axis.axis1.cunit') + self.assign_sanitize(cunit, count + 1, 'Chunk.position.axis.axis2.cunit') + self.assign_sanitize(array_shape, count, 'Chunk.position.axis.function.dimension.naxis1') + self.assign_sanitize(array_shape, count + 1, 'Chunk.position.axis.function.dimension.naxis2') + self.assign_sanitize(crpix, count, 'Chunk.position.axis.function.refCoord.coord1.pix') + self.assign_sanitize(crpix, count + 1, 'Chunk.position.axis.function.refCoord.coord2.pix') + self.assign_sanitize(crval, count, 'Chunk.position.axis.function.refCoord.coord1.val') + self.assign_sanitize(crval, count + 1, 'Chunk.position.axis.function.refCoord.coord2.val') + x = self._blueprint._get('Chunk.position.axis.function.cd11', + self._extension) + if x is not None and not ObsBlueprint.needs_lookup(x): + cd[count][0] = x + x = self._blueprint._get('Chunk.position.axis.function.cd12', + self._extension) + if x is not None and not ObsBlueprint.needs_lookup(x): + cd[count][1] = x + x = self._blueprint._get('Chunk.position.axis.function.cd21', + self._extension) + if x is not None and not ObsBlueprint.needs_lookup(x): + cd[count + 1][0] = x + x = self._blueprint._get('Chunk.position.axis.function.cd22', + self._extension) + if x is not None and not ObsBlueprint.needs_lookup(x): + cd[count + 1][1] = x + self.assign_sanitize(crder, count, 'Chunk.position.axis.error1.rnder') + self.assign_sanitize(crder, count + 1, 'Chunk.position.axis.error2.rnder') + self.assign_sanitize(csyer, count, 'Chunk.position.axis.error1.syser') + self.assign_sanitize(csyer, count + 1, 'Chunk.position.axis.error2.syser') + count += 2 + if self._blueprint._time_axis_configed: + self._axes['time'][1] = True + self._axes['time'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.time.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.time.axis.axis.cunit', False) + self.assign_sanitize(array_shape, count, 'Chunk.time.axis.function.naxis', False) + self.assign_sanitize(crpix, count, 'Chunk.time.axis.function.refCoord.pix', False) + self.assign_sanitize(crval, count, 'Chunk.time.axis.function.refCoord.val', False) + self.assign_sanitize(crder, count, 'Chunk.time.axis.error.rnder') + self.assign_sanitize(csyer, count, 'Chunk.time.axis.error.syser') + self._assign_cd('Chunk.time.axis.function.delta', cd, count) + count += 1 + if self._blueprint._energy_axis_configed: + self._axes['energy'][1] = True + self._axes['energy'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.energy.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.energy.axis.axis.cunit', False) + self.assign_sanitize(array_shape, count, 'Chunk.energy.axis.function.naxis', False) + self.assign_sanitize(crpix, count, 'Chunk.energy.axis.function.refCoord.pix', False) + self.assign_sanitize(crval, count, 'Chunk.energy.axis.function.refCoord.val', False) + self.assign_sanitize(crder, count, 'Chunk.energy.axis.error.rnder') + self.assign_sanitize(csyer, count, 'Chunk.energy.axis.error.syser') + self._assign_cd('Chunk.energy.axis.function.delta', cd, count) + count += 1 + if self._blueprint._polarization_axis_configed: + self._axes['polarization'][1] = True + self._axes['polarization'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.polarization.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.polarization.axis.axis.cunit', False) + self.assign_sanitize(array_shape, count, 'Chunk.polarization.axis.function.naxis', False) + self.assign_sanitize(crpix, count, 'Chunk.polarization.axis.function.refCoord.pix', False) + self.assign_sanitize(crval, count, 'Chunk.polarization.axis.function.refCoord.val', False) + self._assign_cd('Chunk.polarization.axis.function.delta', cd, count) + count += 1 + if self._blueprint._obs_axis_configed: + self._axes['observable'][1] = True + self._axes['observable'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.observable.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.observable.axis.axis.cunit', False) + array_shape[count] = 1.0 + self.assign_sanitize(crpix, count, 'Chunk.observable.axis.function.refCoord.pix', False) + crval[count] = 0.0 + cd[count][count] = 1.0 + count += 1 + if self._blueprint._custom_axis_configed: + self._axes['custom'][1] = True + self._axes['custom'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.custom.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.custom.axis.axis.cunit', False) + self.assign_sanitize(array_shape, count, 'Chunk.custom.axis.function.naxis', False) + self.assign_sanitize(crpix, count, 'Chunk.custom.axis.function.refCoord.pix', False) + self.assign_sanitize(crval, count, 'Chunk.custom.axis.function.refCoord.val', False) + self._assign_cd('Chunk.custom.axis.function.delta', cd, count) + count += 1 + + if not all(val == 0 for val in array_shape): + self._wcs.array_shape = array_shape + if not all(val == 0 for val in cunit): + self._wcs.wcs.cunit = cunit + if not all(val == 0 for val in ctype): + self._wcs.wcs.ctype = ctype + if not all(val == 0 for val in crpix): + self._wcs.wcs.crpix = crpix + if not all(val == 0 for val in crval): + self._wcs.wcs.crval = crval + if not all(val == 0 for val in crder): + self._wcs.wcs.crder = crder + if not all(val == 0 for val in csyer): + self._wcs.wcs.csyer = csyer + self._wcs.wcs.cd = cd + self._finish_position() + self._finish_time() + self._finish_energy() def augment_custom(self, chunk): """ @@ -4125,11 +4329,93 @@ def augment_observable(self, chunk): self._finish_chunk_observable(chunk) self.logger.debug('End Observable WCS augmentation.') + # def _finish_chunk_position(self, chunk): + # pass + + # def _finish_chunk_time(self, chunk): + # raise NotImplementedError + + def _finish_chunk_observable(self, chunk): + ctype = self._wcs.wcs.ctype[chunk.observable_axis-1] + cunit = self._wcs.wcs.ctype[chunk.observable_axis-1] + pix_bin = _to_int(self._wcs.wcs.crpix[chunk.observable_axis-1]) + if ctype is not None and cunit is not None and pix_bin is not None: + chunk.observable = ObservableAxis( + Slice(self._get_axis(0, ctype, cunit), pix_bin)) + def _finish_chunk_position(self, chunk): - pass + if chunk.position.resolution is None: + try: + # JJK 30-01-23 + # In a spatial data chunk the resolution is 2 times the pixel size. We can get the pixel size from + # the wcs + temp = utils.proj_plane_pixel_scales(self._wcs) + chunk.position.resolution = temp[0] + except SingularMatrixError as e: + # cannot calculate position.resolution, ignore and continue on + self.logger.warning(f'Not calculating resolution due to {e}') def _finish_chunk_time(self, chunk): - raise NotImplementedError + if not math.isnan(self._wcs.wcs.xposure): + chunk.time.exposure = self._wcs.wcs.xposure + if self._wcs.wcs.timesys is not None and self._wcs.wcs.timesys != '': + chunk.time.timesys = self._wcs.wcs.timesys + if self._wcs.wcs.trefpos is not None and self._wcs.wcs.trefpos != '': + chunk.time.trefpos = self._wcs.wcs.trefpos + if self._wcs.wcs.mjdref is not None and self._wcs.wcs.mjdref[0] != '' and self._wcs.wcs.mjdref[0] != 0.0: + # the astropy value is an array of length 2, use the first value + chunk.time.mjdref = self._wcs.wcs.mjdref[0] + + def _finish_energy(self): + if self._blueprint._energy_axis_configed: + x = self._blueprint._get('Chunk.energy.specsys', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.specsys = x + x = self._blueprint._get('Chunk.energy.ssysobs', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.ssysobs = x + x = self._blueprint._get('Chunk.energy.restfrq', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.restfrq = _to_float(x) + x = self._blueprint._get('Chunk.energy.restwav', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.restwav = x + x = self._blueprint._get('Chunk.energy.velosys', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.velosys = x + x = self._blueprint._get('Chunk.energy.zsource', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.zsource = x + x = self._blueprint._get('Chunk.energy.ssyssrc', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.ssyssrc = x + x = self._blueprint._get('Chunk.energy.velang', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.velangl = x + + def _finish_position(self): + if self._blueprint._pos_axes_configed: + x = self._blueprint._get('Chunk.position.coordsys', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.radesys = x + x = self._blueprint._get('Chunk.position.equinox', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.equinox = _to_float(x) + + def _finish_time(self): + if self._blueprint._time_axis_configed: + x = self._blueprint._get('Chunk.time.exposure', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.xposure = x + x = self._blueprint._get('Chunk.time.timesys', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.timesys = x + x = self._blueprint._get('Chunk.time.trefpos', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.trefpos = x + x = self._blueprint._get('Chunk.time.mjdref', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.mjdref = [x, x] def _get_axis(self, index, over_ctype=None, over_cunit=None): """ Assemble a generic axis """ @@ -4161,15 +4447,28 @@ def _get_axis_index(self, keywords): break return axis - def _get_axis_length(self, index): - raise NotImplementedError - - def _get_cd(self, x_index, y_index): - """ returns cd info""" + # def _get_axis_length(self, index): + # raise NotImplementedError - try: - if self.wcs.has_cd(): - cd11 = self.wcs.cd[x_index][x_index] + def _get_axis_length(self, for_axis): + if self._wcs.array_shape is None: + return 0 + else: + if len(self._wcs.array_shape) == 1: + result = self._wcs.array_shape[0] + else: + result = self._wcs.array_shape[for_axis-1] + if isinstance(result, tuple): + # the blueprint is incompletely configured + raise ValueError(f'Could not find axis length for axis {for_axis}') + return _to_int(result) + + def _get_cd(self, x_index, y_index): + """ returns cd info""" + + try: + if self.wcs.has_cd(): + cd11 = self.wcs.cd[x_index][x_index] cd12 = self.wcs.cd[x_index][y_index] cd21 = self.wcs.cd[y_index][x_index] cd22 = self.wcs.cd[y_index][y_index] @@ -4307,7 +4606,8 @@ def __init__(self, header, file, extension): :param extension: which HDU WCS axes methods of this class. """ - super().__init__() + # super().__init__() + self.logger = logging.getLogger(self.__class__.__name__) self.log_filter = HDULoggingFilter() self.log_filter.extension(extension) self.logger.addFilter(self.log_filter) @@ -4330,6 +4630,9 @@ def _finish_chunk_observable(self, chunk): chunk.observable = ObservableAxis( Slice(self._get_axis(0, ctype, cunit), pix_bin)) + def _finish_chunk_position(self, chunk): + pass + def _finish_chunk_time(self, chunk): """ The expected caom2 - FITS keywords mapping is: @@ -4373,27 +4676,27 @@ def __init__(self, blueprint, extension): """ super().__init__() self._wcs = None - self._axes = { - 'ra': [0, False], - 'dec': [0, False], - 'time': [0, False], - 'energy': [0, False], - 'polarization': [0, False], - 'observable': [0, False], - 'custom': [0, False], - } + # self._axes = { + # 'ra': [0, False], + # 'dec': [0, False], + # 'time': [0, False], + # 'energy': [0, False], + # 'polarization': [0, False], + # 'observable': [0, False], + # 'custom': [0, False], + # } self._blueprint = blueprint # int - index into blueprint._plan extensions self._extension = extension self._set_wcs() - @property - def wcs(self): - return self._wcs.wcs + # @property + # def wcs(self): + # return self._wcs.wcs - @wcs.setter - def wcs(self, value): - self._wcs = value + # @wcs.setter + # def wcs(self, value): + # self._wcs = value def _get_axis_index(self, keywords): result = self._axes['custom'][0] @@ -4411,247 +4714,247 @@ def _get_axis_index(self, keywords): result = self._axes['observable'][0] return result - def _get_axis_length(self, for_axis): - if self._wcs.array_shape is None: - return 0 - else: - if len(self._wcs.array_shape) == 1: - result = self._wcs.array_shape[0] - else: - result = self._wcs.array_shape[for_axis-1] - if isinstance(result, tuple): - # the blueprint is incompletely configured - raise ValueError(f'Could not find axis length for axis {for_axis}') - return _to_int(result) - - def assign_sanitize(self, assignee, index, key, sanitize=True): - """ - Do not want to blindly assign None to astropy.wcs attributes, so - use this method for conditional assignment. - - The current implementation is that ff there is a legitimate need to - assign None to a value, either use 'set' in the Hdf5ObsBlueprint, and - specifically assign None, or execute a function to set it to None - conditionally. There will be no support for a Default value of None - with HDF5 files. - """ - x = self._blueprint._get(key, self._extension) - if sanitize: - x = self._sanitize(x) - if x is not None and not ObsBlueprint.needs_lookup(x): - assignee[index] = x - - def _assign_cd(self, key, cd, count): - x = self._blueprint._get(key, self._extension) - if x is not None: - if ObsBlueprint.needs_lookup(x): - cd[count][count] = 1.0 - else: - cd[count][count] = x - - def _set_wcs(self): - self._wcs = WCS(naxis=self._blueprint.get_configed_axes_count()) - array_shape = [0] * self._blueprint.get_configed_axes_count() - crder = [0] * self._blueprint.get_configed_axes_count() - crpix = [0] * self._blueprint.get_configed_axes_count() - crval = [0] * self._blueprint.get_configed_axes_count() - csyer = [0] * self._blueprint.get_configed_axes_count() - ctype = [0] * self._blueprint.get_configed_axes_count() - cunit = [0] * self._blueprint.get_configed_axes_count() - temp = [0] * self._blueprint.get_configed_axes_count() - cd = [temp.copy() for _ in range(self._blueprint.get_configed_axes_count())] - count = 0 - if self._blueprint._pos_axes_configed: - self._axes['ra'][1] = True - self._axes['dec'][1] = True - self._axes['ra'][0] = count - self._axes['dec'][0] = count + 1 - self.assign_sanitize(ctype, count, 'Chunk.position.axis.axis1.ctype') - self.assign_sanitize(ctype, count + 1, 'Chunk.position.axis.axis2.ctype') - self.assign_sanitize(cunit, count, 'Chunk.position.axis.axis1.cunit') - self.assign_sanitize(cunit, count + 1, 'Chunk.position.axis.axis2.cunit') - self.assign_sanitize(array_shape, count, 'Chunk.position.axis.function.dimension.naxis1') - self.assign_sanitize(array_shape, count + 1, 'Chunk.position.axis.function.dimension.naxis2') - self.assign_sanitize(crpix, count, 'Chunk.position.axis.function.refCoord.coord1.pix') - self.assign_sanitize(crpix, count + 1, 'Chunk.position.axis.function.refCoord.coord2.pix') - self.assign_sanitize(crval, count, 'Chunk.position.axis.function.refCoord.coord1.val') - self.assign_sanitize(crval, count + 1, 'Chunk.position.axis.function.refCoord.coord2.val') - x = self._blueprint._get('Chunk.position.axis.function.cd11', - self._extension) - if x is not None and not ObsBlueprint.needs_lookup(x): - cd[count][0] = x - x = self._blueprint._get('Chunk.position.axis.function.cd12', - self._extension) - if x is not None and not ObsBlueprint.needs_lookup(x): - cd[count][1] = x - x = self._blueprint._get('Chunk.position.axis.function.cd21', - self._extension) - if x is not None and not ObsBlueprint.needs_lookup(x): - cd[count + 1][0] = x - x = self._blueprint._get('Chunk.position.axis.function.cd22', - self._extension) - if x is not None and not ObsBlueprint.needs_lookup(x): - cd[count + 1][1] = x - self.assign_sanitize(crder, count, 'Chunk.position.axis.error1.rnder') - self.assign_sanitize(crder, count + 1, 'Chunk.position.axis.error2.rnder') - self.assign_sanitize(csyer, count, 'Chunk.position.axis.error1.syser') - self.assign_sanitize(csyer, count + 1, 'Chunk.position.axis.error2.syser') - count += 2 - if self._blueprint._time_axis_configed: - self._axes['time'][1] = True - self._axes['time'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.time.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.time.axis.axis.cunit', False) - self.assign_sanitize(array_shape, count, 'Chunk.time.axis.function.naxis', False) - self.assign_sanitize(crpix, count, 'Chunk.time.axis.function.refCoord.pix', False) - self.assign_sanitize(crval, count, 'Chunk.time.axis.function.refCoord.val', False) - self.assign_sanitize(crder, count, 'Chunk.time.axis.error.rnder') - self.assign_sanitize(csyer, count, 'Chunk.time.axis.error.syser') - self._assign_cd('Chunk.time.axis.function.delta', cd, count) - count += 1 - if self._blueprint._energy_axis_configed: - self._axes['energy'][1] = True - self._axes['energy'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.energy.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.energy.axis.axis.cunit', False) - self.assign_sanitize(array_shape, count, 'Chunk.energy.axis.function.naxis', False) - self.assign_sanitize(crpix, count, 'Chunk.energy.axis.function.refCoord.pix', False) - self.assign_sanitize(crval, count, 'Chunk.energy.axis.function.refCoord.val', False) - self.assign_sanitize(crder, count, 'Chunk.energy.axis.error.rnder') - self.assign_sanitize(csyer, count, 'Chunk.energy.axis.error.syser') - self._assign_cd('Chunk.energy.axis.function.delta', cd, count) - count += 1 - if self._blueprint._polarization_axis_configed: - self._axes['polarization'][1] = True - self._axes['polarization'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.polarization.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.polarization.axis.axis.cunit', False) - self.assign_sanitize(array_shape, count, 'Chunk.polarization.axis.function.naxis', False) - self.assign_sanitize(crpix, count, 'Chunk.polarization.axis.function.refCoord.pix', False) - self.assign_sanitize(crval, count, 'Chunk.polarization.axis.function.refCoord.val', False) - self._assign_cd('Chunk.polarization.axis.function.delta', cd, count) - count += 1 - if self._blueprint._obs_axis_configed: - self._axes['observable'][1] = True - self._axes['observable'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.observable.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.observable.axis.axis.cunit', False) - array_shape[count] = 1.0 - self.assign_sanitize(crpix, count, 'Chunk.observable.axis.function.refCoord.pix', False) - crval[count] = 0.0 - cd[count][count] = 1.0 - count += 1 - if self._blueprint._custom_axis_configed: - self._axes['custom'][1] = True - self._axes['custom'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.custom.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.custom.axis.axis.cunit', False) - self.assign_sanitize(array_shape, count, 'Chunk.custom.axis.function.naxis', False) - self.assign_sanitize(crpix, count, 'Chunk.custom.axis.function.refCoord.pix', False) - self.assign_sanitize(crval, count, 'Chunk.custom.axis.function.refCoord.val', False) - self._assign_cd('Chunk.custom.axis.function.delta', cd, count) - count += 1 - - if not all(val == 0 for val in array_shape): - self._wcs.array_shape = array_shape - if not all(val == 0 for val in cunit): - self._wcs.wcs.cunit = cunit - if not all(val == 0 for val in ctype): - self._wcs.wcs.ctype = ctype - if not all(val == 0 for val in crpix): - self._wcs.wcs.crpix = crpix - if not all(val == 0 for val in crval): - self._wcs.wcs.crval = crval - if not all(val == 0 for val in crder): - self._wcs.wcs.crder = crder - if not all(val == 0 for val in csyer): - self._wcs.wcs.csyer = csyer - self._wcs.wcs.cd = cd - self._finish_position() - self._finish_time() - self._finish_energy() - - def _finish_chunk_observable(self, chunk): - ctype = self._wcs.wcs.ctype[chunk.observable_axis-1] - cunit = self._wcs.wcs.ctype[chunk.observable_axis-1] - pix_bin = _to_int(self._wcs.wcs.crpix[chunk.observable_axis-1]) - if ctype is not None and cunit is not None and pix_bin is not None: - chunk.observable = ObservableAxis( - Slice(self._get_axis(0, ctype, cunit), pix_bin)) - - def _finish_chunk_position(self, chunk): - if chunk.position.resolution is None: - try: - # JJK 30-01-23 - # In a spatial data chunk the resolution is 2 times the pixel size. We can get the pixel size from - # the wcs - temp = utils.proj_plane_pixel_scales(self._wcs) - chunk.position.resolution = temp[0] - except SingularMatrixError as e: - # cannot calculate position.resolution, ignore and continue on - self.logger.warning(f'Not calculating resolution due to {e}') - - def _finish_chunk_time(self, chunk): - if not math.isnan(self._wcs.wcs.xposure): - chunk.time.exposure = self._wcs.wcs.xposure - if self._wcs.wcs.timesys is not None and self._wcs.wcs.timesys != '': - chunk.time.timesys = self._wcs.wcs.timesys - if self._wcs.wcs.trefpos is not None and self._wcs.wcs.trefpos != '': - chunk.time.trefpos = self._wcs.wcs.trefpos - if self._wcs.wcs.mjdref is not None and self._wcs.wcs.mjdref[0] != '' and self._wcs.wcs.mjdref[0] != 0.0: - # the astropy value is an array of length 2, use the first value - chunk.time.mjdref = self._wcs.wcs.mjdref[0] - - def _finish_energy(self): - if self._blueprint._energy_axis_configed: - x = self._blueprint._get('Chunk.energy.specsys', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.specsys = x - x = self._blueprint._get('Chunk.energy.ssysobs', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.ssysobs = x - x = self._blueprint._get('Chunk.energy.restfrq', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.restfrq = _to_float(x) - x = self._blueprint._get('Chunk.energy.restwav', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.restwav = x - x = self._blueprint._get('Chunk.energy.velosys', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.velosys = x - x = self._blueprint._get('Chunk.energy.zsource', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.zsource = x - x = self._blueprint._get('Chunk.energy.ssyssrc', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.ssyssrc = x - x = self._blueprint._get('Chunk.energy.velang', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.velangl = x - - def _finish_position(self): - if self._blueprint._pos_axes_configed: - x = self._blueprint._get('Chunk.position.coordsys', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.radesys = x - x = self._blueprint._get('Chunk.position.equinox', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.equinox = _to_float(x) - - def _finish_time(self): - if self._blueprint._time_axis_configed: - x = self._blueprint._get('Chunk.time.exposure', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.xposure = x - x = self._blueprint._get('Chunk.time.timesys', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.timesys = x - x = self._blueprint._get('Chunk.time.trefpos', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.trefpos = x - x = self._blueprint._get('Chunk.time.mjdref', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.mjdref = [x, x] + # def _get_axis_length(self, for_axis): + # if self._wcs.array_shape is None: + # return 0 + # else: + # if len(self._wcs.array_shape) == 1: + # result = self._wcs.array_shape[0] + # else: + # result = self._wcs.array_shape[for_axis-1] + # if isinstance(result, tuple): + # # the blueprint is incompletely configured + # raise ValueError(f'Could not find axis length for axis {for_axis}') + # return _to_int(result) + + # def assign_sanitize(self, assignee, index, key, sanitize=True): + # """ + # Do not want to blindly assign None to astropy.wcs attributes, so + # use this method for conditional assignment. + + # The current implementation is that ff there is a legitimate need to + # assign None to a value, either use 'set' in the Hdf5ObsBlueprint, and + # specifically assign None, or execute a function to set it to None + # conditionally. There will be no support for a Default value of None + # with HDF5 files. + # """ + # x = self._blueprint._get(key, self._extension) + # if sanitize: + # x = self._sanitize(x) + # if x is not None and not ObsBlueprint.needs_lookup(x): + # assignee[index] = x + + # def _assign_cd(self, key, cd, count): + # x = self._blueprint._get(key, self._extension) + # if x is not None: + # if ObsBlueprint.needs_lookup(x): + # cd[count][count] = 1.0 + # else: + # cd[count][count] = x + + # def _set_wcs(self): + # self._wcs = WCS(naxis=self._blueprint.get_configed_axes_count()) + # array_shape = [0] * self._blueprint.get_configed_axes_count() + # crder = [0] * self._blueprint.get_configed_axes_count() + # crpix = [0] * self._blueprint.get_configed_axes_count() + # crval = [0] * self._blueprint.get_configed_axes_count() + # csyer = [0] * self._blueprint.get_configed_axes_count() + # ctype = [0] * self._blueprint.get_configed_axes_count() + # cunit = [0] * self._blueprint.get_configed_axes_count() + # temp = [0] * self._blueprint.get_configed_axes_count() + # cd = [temp.copy() for _ in range(self._blueprint.get_configed_axes_count())] + # count = 0 + # if self._blueprint._pos_axes_configed: + # self._axes['ra'][1] = True + # self._axes['dec'][1] = True + # self._axes['ra'][0] = count + # self._axes['dec'][0] = count + 1 + # self.assign_sanitize(ctype, count, 'Chunk.position.axis.axis1.ctype') + # self.assign_sanitize(ctype, count + 1, 'Chunk.position.axis.axis2.ctype') + # self.assign_sanitize(cunit, count, 'Chunk.position.axis.axis1.cunit') + # self.assign_sanitize(cunit, count + 1, 'Chunk.position.axis.axis2.cunit') + # self.assign_sanitize(array_shape, count, 'Chunk.position.axis.function.dimension.naxis1') + # self.assign_sanitize(array_shape, count + 1, 'Chunk.position.axis.function.dimension.naxis2') + # self.assign_sanitize(crpix, count, 'Chunk.position.axis.function.refCoord.coord1.pix') + # self.assign_sanitize(crpix, count + 1, 'Chunk.position.axis.function.refCoord.coord2.pix') + # self.assign_sanitize(crval, count, 'Chunk.position.axis.function.refCoord.coord1.val') + # self.assign_sanitize(crval, count + 1, 'Chunk.position.axis.function.refCoord.coord2.val') + # x = self._blueprint._get('Chunk.position.axis.function.cd11', + # self._extension) + # if x is not None and not ObsBlueprint.needs_lookup(x): + # cd[count][0] = x + # x = self._blueprint._get('Chunk.position.axis.function.cd12', + # self._extension) + # if x is not None and not ObsBlueprint.needs_lookup(x): + # cd[count][1] = x + # x = self._blueprint._get('Chunk.position.axis.function.cd21', + # self._extension) + # if x is not None and not ObsBlueprint.needs_lookup(x): + # cd[count + 1][0] = x + # x = self._blueprint._get('Chunk.position.axis.function.cd22', + # self._extension) + # if x is not None and not ObsBlueprint.needs_lookup(x): + # cd[count + 1][1] = x + # self.assign_sanitize(crder, count, 'Chunk.position.axis.error1.rnder') + # self.assign_sanitize(crder, count + 1, 'Chunk.position.axis.error2.rnder') + # self.assign_sanitize(csyer, count, 'Chunk.position.axis.error1.syser') + # self.assign_sanitize(csyer, count + 1, 'Chunk.position.axis.error2.syser') + # count += 2 + # if self._blueprint._time_axis_configed: + # self._axes['time'][1] = True + # self._axes['time'][0] = count + # self.assign_sanitize(ctype, count, 'Chunk.time.axis.axis.ctype', False) + # self.assign_sanitize(cunit, count, 'Chunk.time.axis.axis.cunit', False) + # self.assign_sanitize(array_shape, count, 'Chunk.time.axis.function.naxis', False) + # self.assign_sanitize(crpix, count, 'Chunk.time.axis.function.refCoord.pix', False) + # self.assign_sanitize(crval, count, 'Chunk.time.axis.function.refCoord.val', False) + # self.assign_sanitize(crder, count, 'Chunk.time.axis.error.rnder') + # self.assign_sanitize(csyer, count, 'Chunk.time.axis.error.syser') + # self._assign_cd('Chunk.time.axis.function.delta', cd, count) + # count += 1 + # if self._blueprint._energy_axis_configed: + # self._axes['energy'][1] = True + # self._axes['energy'][0] = count + # self.assign_sanitize(ctype, count, 'Chunk.energy.axis.axis.ctype', False) + # self.assign_sanitize(cunit, count, 'Chunk.energy.axis.axis.cunit', False) + # self.assign_sanitize(array_shape, count, 'Chunk.energy.axis.function.naxis', False) + # self.assign_sanitize(crpix, count, 'Chunk.energy.axis.function.refCoord.pix', False) + # self.assign_sanitize(crval, count, 'Chunk.energy.axis.function.refCoord.val', False) + # self.assign_sanitize(crder, count, 'Chunk.energy.axis.error.rnder') + # self.assign_sanitize(csyer, count, 'Chunk.energy.axis.error.syser') + # self._assign_cd('Chunk.energy.axis.function.delta', cd, count) + # count += 1 + # if self._blueprint._polarization_axis_configed: + # self._axes['polarization'][1] = True + # self._axes['polarization'][0] = count + # self.assign_sanitize(ctype, count, 'Chunk.polarization.axis.axis.ctype', False) + # self.assign_sanitize(cunit, count, 'Chunk.polarization.axis.axis.cunit', False) + # self.assign_sanitize(array_shape, count, 'Chunk.polarization.axis.function.naxis', False) + # self.assign_sanitize(crpix, count, 'Chunk.polarization.axis.function.refCoord.pix', False) + # self.assign_sanitize(crval, count, 'Chunk.polarization.axis.function.refCoord.val', False) + # self._assign_cd('Chunk.polarization.axis.function.delta', cd, count) + # count += 1 + # if self._blueprint._obs_axis_configed: + # self._axes['observable'][1] = True + # self._axes['observable'][0] = count + # self.assign_sanitize(ctype, count, 'Chunk.observable.axis.axis.ctype', False) + # self.assign_sanitize(cunit, count, 'Chunk.observable.axis.axis.cunit', False) + # array_shape[count] = 1.0 + # self.assign_sanitize(crpix, count, 'Chunk.observable.axis.function.refCoord.pix', False) + # crval[count] = 0.0 + # cd[count][count] = 1.0 + # count += 1 + # if self._blueprint._custom_axis_configed: + # self._axes['custom'][1] = True + # self._axes['custom'][0] = count + # self.assign_sanitize(ctype, count, 'Chunk.custom.axis.axis.ctype', False) + # self.assign_sanitize(cunit, count, 'Chunk.custom.axis.axis.cunit', False) + # self.assign_sanitize(array_shape, count, 'Chunk.custom.axis.function.naxis', False) + # self.assign_sanitize(crpix, count, 'Chunk.custom.axis.function.refCoord.pix', False) + # self.assign_sanitize(crval, count, 'Chunk.custom.axis.function.refCoord.val', False) + # self._assign_cd('Chunk.custom.axis.function.delta', cd, count) + # count += 1 + + # if not all(val == 0 for val in array_shape): + # self._wcs.array_shape = array_shape + # if not all(val == 0 for val in cunit): + # self._wcs.wcs.cunit = cunit + # if not all(val == 0 for val in ctype): + # self._wcs.wcs.ctype = ctype + # if not all(val == 0 for val in crpix): + # self._wcs.wcs.crpix = crpix + # if not all(val == 0 for val in crval): + # self._wcs.wcs.crval = crval + # if not all(val == 0 for val in crder): + # self._wcs.wcs.crder = crder + # if not all(val == 0 for val in csyer): + # self._wcs.wcs.csyer = csyer + # self._wcs.wcs.cd = cd + # self._finish_position() + # self._finish_time() + # self._finish_energy() + + # def _finish_chunk_observable(self, chunk): + # ctype = self._wcs.wcs.ctype[chunk.observable_axis-1] + # cunit = self._wcs.wcs.ctype[chunk.observable_axis-1] + # pix_bin = _to_int(self._wcs.wcs.crpix[chunk.observable_axis-1]) + # if ctype is not None and cunit is not None and pix_bin is not None: + # chunk.observable = ObservableAxis( + # Slice(self._get_axis(0, ctype, cunit), pix_bin)) + + # def _finish_chunk_position(self, chunk): + # if chunk.position.resolution is None: + # try: + # # JJK 30-01-23 + # # In a spatial data chunk the resolution is 2 times the pixel size. We can get the pixel size from + # # the wcs + # temp = utils.proj_plane_pixel_scales(self._wcs) + # chunk.position.resolution = temp[0] + # except SingularMatrixError as e: + # # cannot calculate position.resolution, ignore and continue on + # self.logger.warning(f'Not calculating resolution due to {e}') + + # def _finish_chunk_time(self, chunk): + # if not math.isnan(self._wcs.wcs.xposure): + # chunk.time.exposure = self._wcs.wcs.xposure + # if self._wcs.wcs.timesys is not None and self._wcs.wcs.timesys != '': + # chunk.time.timesys = self._wcs.wcs.timesys + # if self._wcs.wcs.trefpos is not None and self._wcs.wcs.trefpos != '': + # chunk.time.trefpos = self._wcs.wcs.trefpos + # if self._wcs.wcs.mjdref is not None and self._wcs.wcs.mjdref[0] != '' and self._wcs.wcs.mjdref[0] != 0.0: + # # the astropy value is an array of length 2, use the first value + # chunk.time.mjdref = self._wcs.wcs.mjdref[0] + + # def _finish_energy(self): + # if self._blueprint._energy_axis_configed: + # x = self._blueprint._get('Chunk.energy.specsys', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.specsys = x + # x = self._blueprint._get('Chunk.energy.ssysobs', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.ssysobs = x + # x = self._blueprint._get('Chunk.energy.restfrq', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.restfrq = _to_float(x) + # x = self._blueprint._get('Chunk.energy.restwav', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.restwav = x + # x = self._blueprint._get('Chunk.energy.velosys', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.velosys = x + # x = self._blueprint._get('Chunk.energy.zsource', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.zsource = x + # x = self._blueprint._get('Chunk.energy.ssyssrc', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.ssyssrc = x + # x = self._blueprint._get('Chunk.energy.velang', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.velangl = x + + # def _finish_position(self): + # if self._blueprint._pos_axes_configed: + # x = self._blueprint._get('Chunk.position.coordsys', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.radesys = x + # x = self._blueprint._get('Chunk.position.equinox', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.equinox = _to_float(x) + + # def _finish_time(self): + # if self._blueprint._time_axis_configed: + # x = self._blueprint._get('Chunk.time.exposure', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.xposure = x + # x = self._blueprint._get('Chunk.time.timesys', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.timesys = x + # x = self._blueprint._get('Chunk.time.trefpos', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.trefpos = x + # x = self._blueprint._get('Chunk.time.mjdref', self._extension) + # if x and not ObsBlueprint.needs_lookup(x): + # self._wcs.wcs.mjdref = [x, x] def _to_str(value): From 73624e63efc7bae998de29e3b08856cea93a9590 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Mon, 8 Jan 2024 11:05:12 -0800 Subject: [PATCH 16/36] CADC-12858/CADC-13010 - refactor triggered by LoTSS collection, which uses metadata from a database query to create a record. --- caom2utils/caom2utils/caom2blueprint.py | 317 ++---------------- .../tests/data/brite/HD36486/HD36486.xml | 2 +- .../tests/data/cfht/1709071og/1709071og.xml | 4 +- .../data/cfht/1709071og/cfhtwircam.override | 18 +- .../tests/data/cfht/1709071p/1709071p.xml | 2 +- .../tests/data/cfht/1916216i/1916216i.xml | 2 +- .../tests/data/cfht/1916216o/1916216o.xml | 2 +- .../tests/data/cfht/1916216p/1916216p.xml | 2 +- .../tests/data/cfht/2087482o/2087482o.xml | 2 +- .../tests/data/cfht/2087482p/2087482p.xml | 2 +- .../tests/data/cfht/2136164o/2136164o.xml | 2 +- .../tests/data/cfht/2136164p/2136164p.xml | 2 +- .../tests/data/cfht/2216850f/2216850f.xml | 2 +- .../tests/data/cfht/2216860b/2216860b.xml | 2 +- .../caom2utils/tests/data/cfhtsg/mp9801/y.xml | 6 +- .../caom2utils/tests/data/cgps/myOBS/cgps.xml | 2 +- .../dao_c122_2016_007777.override | 6 +- .../dao_c122_2016_007777.xml | 6 +- .../dao_c122_2016_007830.override | 6 +- .../dao_c122_2016_007830.xml | 6 +- .../dao_c122_2016_007939.override | 14 +- .../dao_c122_2016_007939.xml | 6 +- .../dao_c122_2016_007942.override | 6 +- .../dao_c122_2016_007942.xml | 6 +- .../N20030325S0098.expected.xml | 2 +- .../data/lotss/P124+62/P124+62.fits.header | 32 ++ .../tests/data/lotss/P124+62/P124+62.xml | 138 ++++++++ .../data/lotss/P124+62/mosaic.fits.blueprint | 61 ++++ .../tests/data/lotss/P124+62/mosaic.module | 7 + .../tests/data/lotss/P124+62/mosaic.py | 7 + .../tests/data/omm/Cdemo_ext2_SCIRED/y.xml | 2 +- .../caom2utils/tests/test_collections.py | 29 +- 32 files changed, 342 insertions(+), 361 deletions(-) create mode 100644 caom2utils/caom2utils/tests/data/lotss/P124+62/P124+62.fits.header create mode 100644 caom2utils/caom2utils/tests/data/lotss/P124+62/P124+62.xml create mode 100644 caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.fits.blueprint create mode 100644 caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.module create mode 100644 caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.py diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index bdf548a7..01e06e83 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -3209,7 +3209,7 @@ def _add_keywords(keywords, current, to_set): if isinstance(keywords, set): to_set.keywords.update(keywords) else: - for k in keywords.split(','): + for k in keywords.split(): to_set.keywords.add(k) else: if current is not None: @@ -3925,7 +3925,6 @@ class WcsParser: TIME_AXIS = 'time' def __init__(self, blueprint, extension): - self.logger = logging.getLogger(self.__class__.__name__) self._wcs = None self.wcs = None self._blueprint = blueprint @@ -3938,18 +3937,11 @@ def __init__(self, blueprint, extension): 'observable': [0, False], 'custom': [0, False], } + # int - index into blueprint._plan extensions self._extension = extension + self.logger = logging.getLogger(self.__class__.__name__) self._set_wcs() - # @property - # def wcs(self): - # return self._wcs.wcs - # # return self._wcs - - # @wcs.setter - # def wcs(self, value): - # self._wcs = value - def _assign_cd(self, key, cd, count): x = self._blueprint._get(key, self._extension) if x is not None: @@ -4329,21 +4321,18 @@ def augment_observable(self, chunk): self._finish_chunk_observable(chunk) self.logger.debug('End Observable WCS augmentation.') - # def _finish_chunk_position(self, chunk): - # pass - - # def _finish_chunk_time(self, chunk): - # raise NotImplementedError - def _finish_chunk_observable(self, chunk): + self.logger.debug('Begin _finish_chunk_observable') ctype = self._wcs.wcs.ctype[chunk.observable_axis-1] cunit = self._wcs.wcs.ctype[chunk.observable_axis-1] pix_bin = _to_int(self._wcs.wcs.crpix[chunk.observable_axis-1]) if ctype is not None and cunit is not None and pix_bin is not None: chunk.observable = ObservableAxis( Slice(self._get_axis(0, ctype, cunit), pix_bin)) + self.logger.debug('End _finish_chunk_observable') def _finish_chunk_position(self, chunk): + self.logger.debug('Begin _finish_chunk_position') if chunk.position.resolution is None: try: # JJK 30-01-23 @@ -4354,8 +4343,10 @@ def _finish_chunk_position(self, chunk): except SingularMatrixError as e: # cannot calculate position.resolution, ignore and continue on self.logger.warning(f'Not calculating resolution due to {e}') + self.logger.debug('End _finish_chunk_position') def _finish_chunk_time(self, chunk): + self.logger.debug('Begin _finish_chunk_time') if not math.isnan(self._wcs.wcs.xposure): chunk.time.exposure = self._wcs.wcs.xposure if self._wcs.wcs.timesys is not None and self._wcs.wcs.timesys != '': @@ -4365,8 +4356,10 @@ def _finish_chunk_time(self, chunk): if self._wcs.wcs.mjdref is not None and self._wcs.wcs.mjdref[0] != '' and self._wcs.wcs.mjdref[0] != 0.0: # the astropy value is an array of length 2, use the first value chunk.time.mjdref = self._wcs.wcs.mjdref[0] + self.logger.debug('End _finish_chunk_time') def _finish_energy(self): + self.logger.debug('Begin _finish_energy') if self._blueprint._energy_axis_configed: x = self._blueprint._get('Chunk.energy.specsys', self._extension) if x and not ObsBlueprint.needs_lookup(x): @@ -4392,8 +4385,10 @@ def _finish_energy(self): x = self._blueprint._get('Chunk.energy.velang', self._extension) if x and not ObsBlueprint.needs_lookup(x): self._wcs.wcs.velangl = x + self.logger.debug('End _finish_energy') def _finish_position(self): + self.logger.debug('Begin _finish_position') if self._blueprint._pos_axes_configed: x = self._blueprint._get('Chunk.position.coordsys', self._extension) if x and not ObsBlueprint.needs_lookup(x): @@ -4401,12 +4396,14 @@ def _finish_position(self): x = self._blueprint._get('Chunk.position.equinox', self._extension) if x and not ObsBlueprint.needs_lookup(x): self._wcs.wcs.equinox = _to_float(x) + self.logger.debug('End _finish_position') def _finish_time(self): + self.logger.debug('Begin _finish_time') if self._blueprint._time_axis_configed: x = self._blueprint._get('Chunk.time.exposure', self._extension) if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.xposure = x + self._wcs.wcs.xposure = _to_float(x) x = self._blueprint._get('Chunk.time.timesys', self._extension) if x and not ObsBlueprint.needs_lookup(x): self._wcs.wcs.timesys = x @@ -4416,6 +4413,7 @@ def _finish_time(self): x = self._blueprint._get('Chunk.time.mjdref', self._extension) if x and not ObsBlueprint.needs_lookup(x): self._wcs.wcs.mjdref = [x, x] + self.logger.debug('End _finish_time') def _get_axis(self, index, over_ctype=None, over_cunit=None): """ Assemble a generic axis """ @@ -4447,9 +4445,6 @@ def _get_axis_index(self, keywords): break return axis - # def _get_axis_length(self, index): - # raise NotImplementedError - def _get_axis_length(self, for_axis): if self._wcs.array_shape is None: return 0 @@ -4606,7 +4601,6 @@ def __init__(self, header, file, extension): :param extension: which HDU WCS axes methods of this class. """ - # super().__init__() self.logger = logging.getLogger(self.__class__.__name__) self.log_filter = HDULoggingFilter() self.log_filter.extension(extension) @@ -4623,12 +4617,14 @@ def __init__(self, header, file, extension): self.extension = extension def _finish_chunk_observable(self, chunk): + self.logger.debug('Begin _finish_chunk_observable') ctype = self.header.get(f'CTYPE{chunk.observable_axis}') cunit = self.header.get(f'CUNIT{chunk.observable_axis}') pix_bin = self.header.get(f'CRPIX{chunk.observable_axis}') if ctype is not None and cunit is not None and pix_bin is not None: chunk.observable = ObservableAxis( Slice(self._get_axis(0, ctype, cunit), pix_bin)) + self.logger.debug('End _finish_chunk_observable') def _finish_chunk_position(self, chunk): pass @@ -4643,12 +4639,14 @@ def _finish_chunk_time(self, chunk): time.trefpos = TREFPOS time.mjdref = MJDREF | MJDDATE """ + self.logger.debug('Begin _finish_chunk_time') chunk.time.exposure = _to_float(self.header.get('EXPTIME')) chunk.time.resolution = _to_float(self.header.get('TIMEDEL')) chunk.time.timesys = str(self.header.get('TIMESYS', 'UTC')) chunk.time.trefpos = self.header.get('TREFPOS', None) chunk.time.mjdref = self.header.get('MJDREF', self.header.get('MJDDATE')) + self.logger.debug('End _finish_chunk_time') def _get_axis_length(self, for_axis): # try ZNAXIS first in order to get the size of the original @@ -4674,29 +4672,7 @@ def __init__(self, blueprint, extension): """ :param blueprint: ObsBlueprint """ - super().__init__() - self._wcs = None - # self._axes = { - # 'ra': [0, False], - # 'dec': [0, False], - # 'time': [0, False], - # 'energy': [0, False], - # 'polarization': [0, False], - # 'observable': [0, False], - # 'custom': [0, False], - # } - self._blueprint = blueprint - # int - index into blueprint._plan extensions - self._extension = extension - self._set_wcs() - - # @property - # def wcs(self): - # return self._wcs.wcs - - # @wcs.setter - # def wcs(self, value): - # self._wcs = value + super().__init__(blueprint, extension) def _get_axis_index(self, keywords): result = self._axes['custom'][0] @@ -4714,248 +4690,6 @@ def _get_axis_index(self, keywords): result = self._axes['observable'][0] return result - # def _get_axis_length(self, for_axis): - # if self._wcs.array_shape is None: - # return 0 - # else: - # if len(self._wcs.array_shape) == 1: - # result = self._wcs.array_shape[0] - # else: - # result = self._wcs.array_shape[for_axis-1] - # if isinstance(result, tuple): - # # the blueprint is incompletely configured - # raise ValueError(f'Could not find axis length for axis {for_axis}') - # return _to_int(result) - - # def assign_sanitize(self, assignee, index, key, sanitize=True): - # """ - # Do not want to blindly assign None to astropy.wcs attributes, so - # use this method for conditional assignment. - - # The current implementation is that ff there is a legitimate need to - # assign None to a value, either use 'set' in the Hdf5ObsBlueprint, and - # specifically assign None, or execute a function to set it to None - # conditionally. There will be no support for a Default value of None - # with HDF5 files. - # """ - # x = self._blueprint._get(key, self._extension) - # if sanitize: - # x = self._sanitize(x) - # if x is not None and not ObsBlueprint.needs_lookup(x): - # assignee[index] = x - - # def _assign_cd(self, key, cd, count): - # x = self._blueprint._get(key, self._extension) - # if x is not None: - # if ObsBlueprint.needs_lookup(x): - # cd[count][count] = 1.0 - # else: - # cd[count][count] = x - - # def _set_wcs(self): - # self._wcs = WCS(naxis=self._blueprint.get_configed_axes_count()) - # array_shape = [0] * self._blueprint.get_configed_axes_count() - # crder = [0] * self._blueprint.get_configed_axes_count() - # crpix = [0] * self._blueprint.get_configed_axes_count() - # crval = [0] * self._blueprint.get_configed_axes_count() - # csyer = [0] * self._blueprint.get_configed_axes_count() - # ctype = [0] * self._blueprint.get_configed_axes_count() - # cunit = [0] * self._blueprint.get_configed_axes_count() - # temp = [0] * self._blueprint.get_configed_axes_count() - # cd = [temp.copy() for _ in range(self._blueprint.get_configed_axes_count())] - # count = 0 - # if self._blueprint._pos_axes_configed: - # self._axes['ra'][1] = True - # self._axes['dec'][1] = True - # self._axes['ra'][0] = count - # self._axes['dec'][0] = count + 1 - # self.assign_sanitize(ctype, count, 'Chunk.position.axis.axis1.ctype') - # self.assign_sanitize(ctype, count + 1, 'Chunk.position.axis.axis2.ctype') - # self.assign_sanitize(cunit, count, 'Chunk.position.axis.axis1.cunit') - # self.assign_sanitize(cunit, count + 1, 'Chunk.position.axis.axis2.cunit') - # self.assign_sanitize(array_shape, count, 'Chunk.position.axis.function.dimension.naxis1') - # self.assign_sanitize(array_shape, count + 1, 'Chunk.position.axis.function.dimension.naxis2') - # self.assign_sanitize(crpix, count, 'Chunk.position.axis.function.refCoord.coord1.pix') - # self.assign_sanitize(crpix, count + 1, 'Chunk.position.axis.function.refCoord.coord2.pix') - # self.assign_sanitize(crval, count, 'Chunk.position.axis.function.refCoord.coord1.val') - # self.assign_sanitize(crval, count + 1, 'Chunk.position.axis.function.refCoord.coord2.val') - # x = self._blueprint._get('Chunk.position.axis.function.cd11', - # self._extension) - # if x is not None and not ObsBlueprint.needs_lookup(x): - # cd[count][0] = x - # x = self._blueprint._get('Chunk.position.axis.function.cd12', - # self._extension) - # if x is not None and not ObsBlueprint.needs_lookup(x): - # cd[count][1] = x - # x = self._blueprint._get('Chunk.position.axis.function.cd21', - # self._extension) - # if x is not None and not ObsBlueprint.needs_lookup(x): - # cd[count + 1][0] = x - # x = self._blueprint._get('Chunk.position.axis.function.cd22', - # self._extension) - # if x is not None and not ObsBlueprint.needs_lookup(x): - # cd[count + 1][1] = x - # self.assign_sanitize(crder, count, 'Chunk.position.axis.error1.rnder') - # self.assign_sanitize(crder, count + 1, 'Chunk.position.axis.error2.rnder') - # self.assign_sanitize(csyer, count, 'Chunk.position.axis.error1.syser') - # self.assign_sanitize(csyer, count + 1, 'Chunk.position.axis.error2.syser') - # count += 2 - # if self._blueprint._time_axis_configed: - # self._axes['time'][1] = True - # self._axes['time'][0] = count - # self.assign_sanitize(ctype, count, 'Chunk.time.axis.axis.ctype', False) - # self.assign_sanitize(cunit, count, 'Chunk.time.axis.axis.cunit', False) - # self.assign_sanitize(array_shape, count, 'Chunk.time.axis.function.naxis', False) - # self.assign_sanitize(crpix, count, 'Chunk.time.axis.function.refCoord.pix', False) - # self.assign_sanitize(crval, count, 'Chunk.time.axis.function.refCoord.val', False) - # self.assign_sanitize(crder, count, 'Chunk.time.axis.error.rnder') - # self.assign_sanitize(csyer, count, 'Chunk.time.axis.error.syser') - # self._assign_cd('Chunk.time.axis.function.delta', cd, count) - # count += 1 - # if self._blueprint._energy_axis_configed: - # self._axes['energy'][1] = True - # self._axes['energy'][0] = count - # self.assign_sanitize(ctype, count, 'Chunk.energy.axis.axis.ctype', False) - # self.assign_sanitize(cunit, count, 'Chunk.energy.axis.axis.cunit', False) - # self.assign_sanitize(array_shape, count, 'Chunk.energy.axis.function.naxis', False) - # self.assign_sanitize(crpix, count, 'Chunk.energy.axis.function.refCoord.pix', False) - # self.assign_sanitize(crval, count, 'Chunk.energy.axis.function.refCoord.val', False) - # self.assign_sanitize(crder, count, 'Chunk.energy.axis.error.rnder') - # self.assign_sanitize(csyer, count, 'Chunk.energy.axis.error.syser') - # self._assign_cd('Chunk.energy.axis.function.delta', cd, count) - # count += 1 - # if self._blueprint._polarization_axis_configed: - # self._axes['polarization'][1] = True - # self._axes['polarization'][0] = count - # self.assign_sanitize(ctype, count, 'Chunk.polarization.axis.axis.ctype', False) - # self.assign_sanitize(cunit, count, 'Chunk.polarization.axis.axis.cunit', False) - # self.assign_sanitize(array_shape, count, 'Chunk.polarization.axis.function.naxis', False) - # self.assign_sanitize(crpix, count, 'Chunk.polarization.axis.function.refCoord.pix', False) - # self.assign_sanitize(crval, count, 'Chunk.polarization.axis.function.refCoord.val', False) - # self._assign_cd('Chunk.polarization.axis.function.delta', cd, count) - # count += 1 - # if self._blueprint._obs_axis_configed: - # self._axes['observable'][1] = True - # self._axes['observable'][0] = count - # self.assign_sanitize(ctype, count, 'Chunk.observable.axis.axis.ctype', False) - # self.assign_sanitize(cunit, count, 'Chunk.observable.axis.axis.cunit', False) - # array_shape[count] = 1.0 - # self.assign_sanitize(crpix, count, 'Chunk.observable.axis.function.refCoord.pix', False) - # crval[count] = 0.0 - # cd[count][count] = 1.0 - # count += 1 - # if self._blueprint._custom_axis_configed: - # self._axes['custom'][1] = True - # self._axes['custom'][0] = count - # self.assign_sanitize(ctype, count, 'Chunk.custom.axis.axis.ctype', False) - # self.assign_sanitize(cunit, count, 'Chunk.custom.axis.axis.cunit', False) - # self.assign_sanitize(array_shape, count, 'Chunk.custom.axis.function.naxis', False) - # self.assign_sanitize(crpix, count, 'Chunk.custom.axis.function.refCoord.pix', False) - # self.assign_sanitize(crval, count, 'Chunk.custom.axis.function.refCoord.val', False) - # self._assign_cd('Chunk.custom.axis.function.delta', cd, count) - # count += 1 - - # if not all(val == 0 for val in array_shape): - # self._wcs.array_shape = array_shape - # if not all(val == 0 for val in cunit): - # self._wcs.wcs.cunit = cunit - # if not all(val == 0 for val in ctype): - # self._wcs.wcs.ctype = ctype - # if not all(val == 0 for val in crpix): - # self._wcs.wcs.crpix = crpix - # if not all(val == 0 for val in crval): - # self._wcs.wcs.crval = crval - # if not all(val == 0 for val in crder): - # self._wcs.wcs.crder = crder - # if not all(val == 0 for val in csyer): - # self._wcs.wcs.csyer = csyer - # self._wcs.wcs.cd = cd - # self._finish_position() - # self._finish_time() - # self._finish_energy() - - # def _finish_chunk_observable(self, chunk): - # ctype = self._wcs.wcs.ctype[chunk.observable_axis-1] - # cunit = self._wcs.wcs.ctype[chunk.observable_axis-1] - # pix_bin = _to_int(self._wcs.wcs.crpix[chunk.observable_axis-1]) - # if ctype is not None and cunit is not None and pix_bin is not None: - # chunk.observable = ObservableAxis( - # Slice(self._get_axis(0, ctype, cunit), pix_bin)) - - # def _finish_chunk_position(self, chunk): - # if chunk.position.resolution is None: - # try: - # # JJK 30-01-23 - # # In a spatial data chunk the resolution is 2 times the pixel size. We can get the pixel size from - # # the wcs - # temp = utils.proj_plane_pixel_scales(self._wcs) - # chunk.position.resolution = temp[0] - # except SingularMatrixError as e: - # # cannot calculate position.resolution, ignore and continue on - # self.logger.warning(f'Not calculating resolution due to {e}') - - # def _finish_chunk_time(self, chunk): - # if not math.isnan(self._wcs.wcs.xposure): - # chunk.time.exposure = self._wcs.wcs.xposure - # if self._wcs.wcs.timesys is not None and self._wcs.wcs.timesys != '': - # chunk.time.timesys = self._wcs.wcs.timesys - # if self._wcs.wcs.trefpos is not None and self._wcs.wcs.trefpos != '': - # chunk.time.trefpos = self._wcs.wcs.trefpos - # if self._wcs.wcs.mjdref is not None and self._wcs.wcs.mjdref[0] != '' and self._wcs.wcs.mjdref[0] != 0.0: - # # the astropy value is an array of length 2, use the first value - # chunk.time.mjdref = self._wcs.wcs.mjdref[0] - - # def _finish_energy(self): - # if self._blueprint._energy_axis_configed: - # x = self._blueprint._get('Chunk.energy.specsys', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.specsys = x - # x = self._blueprint._get('Chunk.energy.ssysobs', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.ssysobs = x - # x = self._blueprint._get('Chunk.energy.restfrq', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.restfrq = _to_float(x) - # x = self._blueprint._get('Chunk.energy.restwav', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.restwav = x - # x = self._blueprint._get('Chunk.energy.velosys', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.velosys = x - # x = self._blueprint._get('Chunk.energy.zsource', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.zsource = x - # x = self._blueprint._get('Chunk.energy.ssyssrc', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.ssyssrc = x - # x = self._blueprint._get('Chunk.energy.velang', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.velangl = x - - # def _finish_position(self): - # if self._blueprint._pos_axes_configed: - # x = self._blueprint._get('Chunk.position.coordsys', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.radesys = x - # x = self._blueprint._get('Chunk.position.equinox', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.equinox = _to_float(x) - - # def _finish_time(self): - # if self._blueprint._time_axis_configed: - # x = self._blueprint._get('Chunk.time.exposure', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.xposure = x - # x = self._blueprint._get('Chunk.time.timesys', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.timesys = x - # x = self._blueprint._get('Chunk.time.trefpos', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.trefpos = x - # x = self._blueprint._get('Chunk.time.mjdref', self._extension) - # if x and not ObsBlueprint.needs_lookup(x): - # self._wcs.wcs.mjdref = [x, x] - def _to_str(value): return str(value).strip() if value is not None else None @@ -5240,9 +4974,12 @@ def _augment(obs, product_id, uri, blueprint, subject, dumpconfig=False, if ('.header' in local or data_util.get_file_type(local) == 'application/fits'): - logging.debug( - f'Using a FitsParser for local file {local}') - parser = FitsParser(local, blueprint, uri=uri) + if uri.startswith('cadc'): + logging.debug(f'Using a FitsParser for local file {local}') + parser = FitsParser(local, blueprint, uri=uri) + else: + logging.debug(f'Using a ContentParser for local file {local}') + parser = ContentParser(blueprint, uri) elif '.h5' in local: logging.debug( f'Using an Hdf5Parser for local file {local}') diff --git a/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml index 0ceef5e9..1af10111 100644 --- a/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml +++ b/caom2utils/caom2utils/tests/data/brite/HD36486/HD36486.xml @@ -35,7 +35,7 @@ - ad:BRITE-Constellation/HD36486.orig + cadc:BRITE-Constellation/HD36486.orig science data text/plain diff --git a/caom2utils/caom2utils/tests/data/cfht/1709071og/1709071og.xml b/caom2utils/caom2utils/tests/data/cfht/1709071og/1709071og.xml index d4682e76..67569117 100644 --- a/caom2utils/caom2utils/tests/data/cfht/1709071og/1709071og.xml +++ b/caom2utils/caom2utils/tests/data/cfht/1709071og/1709071og.xml @@ -47,7 +47,7 @@ - ad:CFHT/1709071g.fits.gz + cadc:CFHT/1709071g.fits.gz auxiliary data application/fits @@ -571,7 +571,7 @@ - ad:CFHT/1709071o.fits.fz + cadc:CFHT/1709071o.fits.fz science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/1709071og/cfhtwircam.override b/caom2utils/caom2utils/tests/data/cfht/1709071og/cfhtwircam.override index aed9071f..8be195d7 100644 --- a/caom2utils/caom2utils/tests/data/cfht/1709071og/cfhtwircam.override +++ b/caom2utils/caom2utils/tests/data/cfht/1709071og/cfhtwircam.override @@ -19,7 +19,7 @@ resolvingPower = 5.64 Chunk = {ignore} -?ad:CFHT/1709071o.fits.fz +?cadc:CFHT/1709071o.fits.fz artifact.productType = science artifact.contentChecksum = md5:88bfd03471053a916067a4e6f80d332d CRPIX3 = 0.500000000000 @@ -30,7 +30,7 @@ time.exposure= 15.000000000000 NAXIS3 = 3 -?ad:CFHT/1709071g.fits.gz +?cadc:CFHT/1709071g.fits.gz artifact.productType = auxiliary artifact.contentChecksum = md5:47cdd15371f82893ed384dec96240ae2 CD1_1 = -0.000083333333 @@ -43,36 +43,36 @@ CDELT3 = 0.000000231481 time.resolution= 0.020000000000 time.exposure= 0.020000000000 NAXIS3 = 1964 -?ad:CFHT/1709071g.fits.gz#[0] +?cadc:CFHT/1709071g.fits.gz#[0] CRPIX1 = 7.00000000 CRPIX2 = 7.00000000 CRVAL1 = 210.551666667 CRVAL2 = 54.526222222 -?ad:CFHT/1709071g.fits.gz#[1] +?cadc:CFHT/1709071g.fits.gz#[1] CRPIX1 = 7.00000000 CRPIX2 = 7.00000000 CRVAL1 = 210.551666667 CRVAL2 = 54.526222222 -?ad:CFHT/1709071g.fits.gz#[2] +?cadc:CFHT/1709071g.fits.gz#[2] CRPIX1 = 7.00000000 CRPIX2 = 7.00000000 CRVAL1 = 210.508333333 CRVAL2 = 54.345555556 -?ad:CFHT/1709071g.fits.gz#[3] +?cadc:CFHT/1709071g.fits.gz#[3] CRPIX1 = 7.00000000 CRPIX2 = 7.00000000 CRVAL1 = 210.898333333 CRVAL2 = 54.341916667 -?ad:CFHT/1709071g.fits.gz#[4] +?cadc:CFHT/1709071g.fits.gz#[4] CRPIX1 = 7.00000000 CRPIX2 = 7.00000000 CRVAL1 = 210.942083333 CRVAL2 = 54.446805556 -?ad:CFHT/1709071g.fits.gz#[5] +?cadc:CFHT/1709071g.fits.gz#[5] CRPIX1 = 7.00000000 CRPIX2 = 7.00000000 CRVAL1 = 0.000000000 CRVAL2 = 0.000000000 -?ad:CFHT/1709071g.fits.gz#[6] +?cadc:CFHT/1709071g.fits.gz#[6] BITPIX = 0 diff --git a/caom2utils/caom2utils/tests/data/cfht/1709071p/1709071p.xml b/caom2utils/caom2utils/tests/data/cfht/1709071p/1709071p.xml index 7a4de40b..6da01859 100644 --- a/caom2utils/caom2utils/tests/data/cfht/1709071p/1709071p.xml +++ b/caom2utils/caom2utils/tests/data/cfht/1709071p/1709071p.xml @@ -55,7 +55,7 @@ - ad:CFHT/1709071p.fits.fz + cadc:CFHT/1709071p.fits.fz science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/1916216i/1916216i.xml b/caom2utils/caom2utils/tests/data/cfht/1916216i/1916216i.xml index 7f60ad1b..58c82504 100644 --- a/caom2utils/caom2utils/tests/data/cfht/1916216i/1916216i.xml +++ b/caom2utils/caom2utils/tests/data/cfht/1916216i/1916216i.xml @@ -59,7 +59,7 @@ - ad:CFHT/1916216i.fits.gz + cadc:CFHT/1916216i.fits.gz science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/1916216o/1916216o.xml b/caom2utils/caom2utils/tests/data/cfht/1916216o/1916216o.xml index ea89c57b..9c0f31c4 100644 --- a/caom2utils/caom2utils/tests/data/cfht/1916216o/1916216o.xml +++ b/caom2utils/caom2utils/tests/data/cfht/1916216o/1916216o.xml @@ -45,7 +45,7 @@ 1 - ad:CFHT/1916216o.fits.gz + cadc:CFHT/1916216o.fits.gz science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/1916216p/1916216p.xml b/caom2utils/caom2utils/tests/data/cfht/1916216p/1916216p.xml index 1cd1efce..d57864e9 100644 --- a/caom2utils/caom2utils/tests/data/cfht/1916216p/1916216p.xml +++ b/caom2utils/caom2utils/tests/data/cfht/1916216p/1916216p.xml @@ -62,7 +62,7 @@ - ad:CFHT/1916216p.fits.gz + cadc:CFHT/1916216p.fits.gz science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/2087482o/2087482o.xml b/caom2utils/caom2utils/tests/data/cfht/2087482o/2087482o.xml index 8e1dd8d8..ce757635 100644 --- a/caom2utils/caom2utils/tests/data/cfht/2087482o/2087482o.xml +++ b/caom2utils/caom2utils/tests/data/cfht/2087482o/2087482o.xml @@ -49,7 +49,7 @@ - ad:CFHT/2087482o.fits.fz + cadc:CFHT/2087482o.fits.fz science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/2087482p/2087482p.xml b/caom2utils/caom2utils/tests/data/cfht/2087482p/2087482p.xml index 98a3bde2..bc8e3595 100644 --- a/caom2utils/caom2utils/tests/data/cfht/2087482p/2087482p.xml +++ b/caom2utils/caom2utils/tests/data/cfht/2087482p/2087482p.xml @@ -54,7 +54,7 @@ - ad:CFHT/2087482p.fits.fz + cadc:CFHT/2087482p.fits.fz science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/2136164o/2136164o.xml b/caom2utils/caom2utils/tests/data/cfht/2136164o/2136164o.xml index a482706d..d56d18c0 100644 --- a/caom2utils/caom2utils/tests/data/cfht/2136164o/2136164o.xml +++ b/caom2utils/caom2utils/tests/data/cfht/2136164o/2136164o.xml @@ -50,7 +50,7 @@ - ad:CFHT/2136164o.fits.fz + cadc:CFHT/2136164o.fits.fz science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/2136164p/2136164p.xml b/caom2utils/caom2utils/tests/data/cfht/2136164p/2136164p.xml index 0d024fa1..10df67e9 100644 --- a/caom2utils/caom2utils/tests/data/cfht/2136164p/2136164p.xml +++ b/caom2utils/caom2utils/tests/data/cfht/2136164p/2136164p.xml @@ -475,7 +475,7 @@ - ad:CFHT/2136164p.fits + cadc:CFHT/2136164p.fits science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/2216850f/2216850f.xml b/caom2utils/caom2utils/tests/data/cfht/2216850f/2216850f.xml index 647732fa..cc3e7a21 100644 --- a/caom2utils/caom2utils/tests/data/cfht/2216850f/2216850f.xml +++ b/caom2utils/caom2utils/tests/data/cfht/2216850f/2216850f.xml @@ -43,7 +43,7 @@ 1 - ad:CFHT/2216850f.fits.gz + cadc:CFHT/2216850f.fits.gz calibration data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfht/2216860b/2216860b.xml b/caom2utils/caom2utils/tests/data/cfht/2216860b/2216860b.xml index 37604fc4..043be4b9 100644 --- a/caom2utils/caom2utils/tests/data/cfht/2216860b/2216860b.xml +++ b/caom2utils/caom2utils/tests/data/cfht/2216860b/2216860b.xml @@ -43,7 +43,7 @@ 1 - ad:CFHT/2216860b.fits.gz + cadc:CFHT/2216860b.fits.gz calibration data application/fits diff --git a/caom2utils/caom2utils/tests/data/cfhtsg/mp9801/y.xml b/caom2utils/caom2utils/tests/data/cfhtsg/mp9801/y.xml index b9a57bea..3f2808ee 100644 --- a/caom2utils/caom2utils/tests/data/cfhtsg/mp9801/y.xml +++ b/caom2utils/caom2utils/tests/data/cfhtsg/mp9801/y.xml @@ -44,7 +44,7 @@ - ad:CFHTSG/MegaPipe.080.156.Z.MP9801.fits.gif + cadc:CFHTSG/MegaPipe.080.156.Z.MP9801.fits.gif preview data image/gif @@ -52,7 +52,7 @@ md5:3882865ea398f4197928c9f3f1fba65b - ad:CFHTSG/MegaPipe.080.156.Z.MP9801.weight.fits + cadc:CFHTSG/MegaPipe.080.156.Z.MP9801.weight.fits weight data application/fits @@ -139,7 +139,7 @@ - ad:CFHTSG/MegaPipe.080.156.Z.MP9801.fits + cadc:CFHTSG/MegaPipe.080.156.Z.MP9801.fits science data application/fits diff --git a/caom2utils/caom2utils/tests/data/cgps/myOBS/cgps.xml b/caom2utils/caom2utils/tests/data/cgps/myOBS/cgps.xml index 6f5ece3e..42ab766b 100644 --- a/caom2utils/caom2utils/tests/data/cgps/myOBS/cgps.xml +++ b/caom2utils/caom2utils/tests/data/cgps/myOBS/cgps.xml @@ -21,7 +21,7 @@ 2 - ad:CGPS/CGPS_MA1_HI_line_image.fits + cadc:CGPS/CGPS_MA1_HI_line_image.fits science data application/fits diff --git a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007777/dao_c122_2016_007777.override b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007777/dao_c122_2016_007777.override index 40080b6f..d9107561 100644 --- a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007777/dao_c122_2016_007777.override +++ b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007777/dao_c122_2016_007777.override @@ -75,12 +75,12 @@ CRPIX5 = 0.500000 CRVAL5 = 57598.4482755 CDELT5 = 0.041667 -?ad:DAO/dao_c122_2016_007777.fits.gz +?cadc:DAO/dao_c122_2016_007777.fits.gz artifact.productType = science -?ad:DAO/dao_c122_2016_007777_256.png +?cadc:DAO/dao_c122_2016_007777_256.png artifact.productType = thumbnail -?ad:DAO/dao_c122_2016_007777_1024.png +?cadc:DAO/dao_c122_2016_007777_1024.png artifact.productType = preview diff --git a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007777/dao_c122_2016_007777.xml b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007777/dao_c122_2016_007777.xml index a1759979..8575d61a 100644 --- a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007777/dao_c122_2016_007777.xml +++ b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007777/dao_c122_2016_007777.xml @@ -46,7 +46,7 @@ - ad:DAO/dao_c122_2016_007777.fits.gz + cadc:DAO/dao_c122_2016_007777.fits.gz science data application/fits @@ -151,7 +151,7 @@ - ad:DAO/dao_c122_2016_007777_1024.png + cadc:DAO/dao_c122_2016_007777_1024.png preview data image/png @@ -160,7 +160,7 @@ - ad:DAO/dao_c122_2016_007777_256.png + cadc:DAO/dao_c122_2016_007777_256.png thumbnail data image/png diff --git a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007830/dao_c122_2016_007830.override b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007830/dao_c122_2016_007830.override index 7657a13d..beb9b4c8 100644 --- a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007830/dao_c122_2016_007830.override +++ b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007830/dao_c122_2016_007830.override @@ -75,12 +75,12 @@ CRPIX5 = 0.500000 CRVAL5 = 57598.7862269 CDELT5 = 0.000035 -?ad:DAO/dao_c122_2016_007830.fits.gz +?cadc:DAO/dao_c122_2016_007830.fits.gz artifact.productType = calibration -?ad:DAO/dao_c122_2016_007830_256.png +?cadc:DAO/dao_c122_2016_007830_256.png artifact.productType = thumbnail -?ad:DAO/dao_c122_2016_007830_1024.png +?cadc:DAO/dao_c122_2016_007830_1024.png artifact.productType = preview diff --git a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007830/dao_c122_2016_007830.xml b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007830/dao_c122_2016_007830.xml index a48babf3..59224c81 100644 --- a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007830/dao_c122_2016_007830.xml +++ b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007830/dao_c122_2016_007830.xml @@ -46,7 +46,7 @@ - ad:DAO/dao_c122_2016_007830.fits.gz + cadc:DAO/dao_c122_2016_007830.fits.gz calibration data application/fits @@ -115,7 +115,7 @@ - ad:DAO/dao_c122_2016_007830_1024.png + cadc:DAO/dao_c122_2016_007830_1024.png preview data image/png @@ -124,7 +124,7 @@ - ad:DAO/dao_c122_2016_007830_256.png + cadc:DAO/dao_c122_2016_007830_256.png thumbnail data image/png diff --git a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007939/dao_c122_2016_007939.override b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007939/dao_c122_2016_007939.override index 2bc22b4b..088bd6d8 100644 --- a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007939/dao_c122_2016_007939.override +++ b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007939/dao_c122_2016_007939.override @@ -31,7 +31,7 @@ CRVAL2 = 1 CRPIX2 = 1 CTYPE2 = FLUX CUNIT2 = COUNTS -CDELT2 = +CDELT2 = telescope.name = DAO 1.2-m telescope.geoLocationX = -2331226.7883358444 @@ -56,13 +56,13 @@ artifact.releaseType = data proposal.pi = Koubsky proposal.id = DAO122_2016C1 -provenance.lastExecuted = +provenance.lastExecuted = provenance.project = DAO Science Archive provenance.producer = NRC Herzberg provenance.reference = http://www.cadc.hia.nrc.gc.ca/dao/ -provenance.version = +provenance.version = provenance.name = DAO unprocessed data -provenance.inputs = +provenance.inputs = environment.photometric = false target.moving = false @@ -75,12 +75,12 @@ CRPIX5 = 0.500000 CRVAL5 = 57600.5146644 CDELT5 = 0.000058 -?ad:DAO/dao_c122_2016_007939.fits.gz +?cadc:DAO/dao_c122_2016_007939.fits.gz artifact.productType = calibration -?ad:DAO/dao_c122_2016_007939_256.png +?cadc:DAO/dao_c122_2016_007939_256.png artifact.productType = thumbnail -?ad:DAO/dao_c122_2016_007939_1024.png +?cadc:DAO/dao_c122_2016_007939_1024.png artifact.productType = preview diff --git a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007939/dao_c122_2016_007939.xml b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007939/dao_c122_2016_007939.xml index 7544e18a..6c0d4974 100644 --- a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007939/dao_c122_2016_007939.xml +++ b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007939/dao_c122_2016_007939.xml @@ -45,7 +45,7 @@ - ad:DAO/dao_c122_2016_007939.fits.gz + cadc:DAO/dao_c122_2016_007939.fits.gz calibration data application/fits @@ -114,7 +114,7 @@ - ad:DAO/dao_c122_2016_007939_1024.png + cadc:DAO/dao_c122_2016_007939_1024.png preview data image/png @@ -123,7 +123,7 @@ - ad:DAO/dao_c122_2016_007939_256.png + cadc:DAO/dao_c122_2016_007939_256.png thumbnail data image/png diff --git a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007942/dao_c122_2016_007942.override b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007942/dao_c122_2016_007942.override index 416f7cf4..5dfb8a2b 100644 --- a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007942/dao_c122_2016_007942.override +++ b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007942/dao_c122_2016_007942.override @@ -75,12 +75,12 @@ CRPIX5 = 0.500000 CRVAL5 = 57600.536713 CDELT5 = 0.000000 -?ad:DAO/dao_c122_2016_007942.fits.gz +?cadc:DAO/dao_c122_2016_007942.fits.gz artifact.productType = calibration -?ad:DAO/dao_c122_2016_007942_256.png +?cadc:DAO/dao_c122_2016_007942_256.png artifact.productType = thumbnail -?ad:DAO/dao_c122_2016_007942_1024.png +?cadc:DAO/dao_c122_2016_007942_1024.png artifact.productType = preview diff --git a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007942/dao_c122_2016_007942.xml b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007942/dao_c122_2016_007942.xml index dafb284c..def56c0c 100644 --- a/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007942/dao_c122_2016_007942.xml +++ b/caom2utils/caom2utils/tests/data/dao/dao_c122_2016_007942/dao_c122_2016_007942.xml @@ -45,7 +45,7 @@ - ad:DAO/dao_c122_2016_007942.fits.gz + cadc:DAO/dao_c122_2016_007942.fits.gz calibration data application/fits @@ -93,7 +93,7 @@ - ad:DAO/dao_c122_2016_007942_1024.png + cadc:DAO/dao_c122_2016_007942_1024.png preview data image/png @@ -102,7 +102,7 @@ - ad:DAO/dao_c122_2016_007942_256.png + cadc:DAO/dao_c122_2016_007942_256.png thumbnail data image/png diff --git a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.expected.xml b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.expected.xml index d952d774..0876bc38 100644 --- a/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.expected.xml +++ b/caom2utils/caom2utils/tests/data/gemini/N20030325S0098/N20030325S0098.expected.xml @@ -35,7 +35,7 @@ - ad:GEMINI/N20030325S0098.fits + cadc:GEMINI/N20030325S0098.fits science data application/fits diff --git a/caom2utils/caom2utils/tests/data/lotss/P124+62/P124+62.fits.header b/caom2utils/caom2utils/tests/data/lotss/P124+62/P124+62.fits.header new file mode 100644 index 00000000..f452b6a3 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/lotss/P124+62/P124+62.fits.header @@ -0,0 +1,32 @@ +visit +# HDU 0 in P124+62: +SIMPLE = T / conforms to FITS standard +BITPIX = -64 / array data type +NAXIS = 2 / number of array dimensions +NAXIS1 = 9264 +NAXIS2 = 9264 +WCSAXES = 2 +CTYPE1 = 'RA---SIN' +CTYPE2 = 'DEC--SIN' +CUNIT1 = 'deg ' +CUNIT2 = 'deg ' +CRPIX1 = 4632 +CRPIX2 = 4632 +CRVAL1 = 124.678 +CRVAL2 = 61.9895 +CDELT1 = -0.00041666666666666 +CDELT2 = 0.000416666666666666 +RADESYS = 'ICRS ' +EQUINOX = 2000.0 +LONPOLE = 180.0 +LATPOLE = 61.9895 +BMAJ = 0.001666666666666667 +BMIN = 0.001666666666666667 +BPA = 90 +TELESCOP= 'LOFAR ' +RESTFRQ = 143650000.0 +OBSERVER= 'LoTSS ' +BUNIT = 'JY/BEAM ' +BTYPE = 'Intensity' +OBJECT = 'P124+62 ' +ORIGIN = 'ddf-pipeline v2.2-201-g2e77149' diff --git a/caom2utils/caom2utils/tests/data/lotss/P124+62/P124+62.xml b/caom2utils/caom2utils/tests/data/lotss/P124+62/P124+62.xml new file mode 100644 index 00000000..72fc83a5 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/lotss/P124+62/P124+62.xml @@ -0,0 +1,138 @@ + + LOTSS + P124+62_dr2 + 2023-01-01T00:00:00.000 + + mosaic + + OBJECT + science + + LoTSS + T.W. Shimwell + LOFAR Two-metre Sky Survey + + surveys + techniques: image processing + catalogs + radio continuum: general + + + + P124+62 + field + + + LOFAR + 3826577.461999999 + 461022.6239999999 + 5064892.525999999 + + + LOFAR.HBA + + + + P124+62_mosaic + 2023-01-01T00:00:00.000 + 2023-01-01T00:00:00.000 + image + 4 + + + astron:LOTSS/P124+62/mosaic.fits + science + data + application/fits + 686577600 + md5:abc + + + 0 + + + 4 + 1 + 2 + 3 + + + + RA---SIN + deg + + + DEC--SIN + deg + + + + 9264 + 9264 + + + + 4632.0 + 124.678 + + + 4632.0 + 61.9895 + + + -0.0004166669968981296 + 0.0 + 0.0 + 0.0004166669968981296 + + + ICRS + 2000.0 + + + + + WAVE + m + + + + 0.5 + 2.49827 + + + 1.5 + 1.78448 + + + + TOPOCENT + 120-168MHz + + + + + TIME + d + + + 1 + 0.6666666666666666 + + 0.5 + 58350.241377315 + + + + UTC + 8.0 + + + + + + + + + + diff --git a/caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.fits.blueprint b/caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.fits.blueprint new file mode 100644 index 00000000..cc3d9fea --- /dev/null +++ b/caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.fits.blueprint @@ -0,0 +1,61 @@ +DerivedObservation.members = [] +Observation.observationID = P124+62_dr2 +Observation.type = OBJECT +Observation.metaRelease = 2023-01-01T00:00:00.000 +Observation.metaProducer = None +Observation.algorithm.name = mosaic +Observation.instrument.name = LOFAR.HBA +Observation.proposal.id = LoTSS +Observation.proposal.pi = T.W. Shimwell +Observation.proposal.title = LOFAR Two-metre Sky Survey +Observation.proposal.keywords = _get_observation_proposal_keywords() +Observation.target.name = P124+62 +Observation.target.type = field +Observation.telescope.name = LOFAR +Observation.telescope.geoLocationX = 3826577.461999999 +Observation.telescope.geoLocationY = 461022.6239999999 +Observation.telescope.geoLocationZ = 5064892.525999999 +Plane.productID = P124+62_mosaic +Plane.metaRelease = 2023-01-01T00:00:00.000 +Plane.dataRelease = 2023-01-01T00:00:00.000 +Plane.dataProductType = image +Plane.calibrationLevel = 4 +Plane.metaProducer = None +Artifact.productType = science +Artifact.releaseType = data +Artifact.metaProducer = None +Chunk = include +Chunk.metaProducer = None +Chunk.position.coordsys = ICRS +Chunk.position.equinox = 2000.0 +Chunk.position.axis.axis1.ctype = RA---SIN +Chunk.position.axis.axis1.cunit = deg +Chunk.position.axis.axis2.ctype = DEC--SIN +Chunk.position.axis.axis2.cunit = deg +Chunk.position.axis.function.cd11 = -0.0004166669968981296 +Chunk.position.axis.function.cd12 = 0.0 +Chunk.position.axis.function.cd21 = 0.0 +Chunk.position.axis.function.cd22 = 0.0004166669968981296 +Chunk.position.axis.function.dimension.naxis1 = 9264 +Chunk.position.axis.function.dimension.naxis2 = 9264 +Chunk.position.axis.function.refCoord.coord1.pix = 4632.0 +Chunk.position.axis.function.refCoord.coord1.val = 124.678 +Chunk.position.axis.function.refCoord.coord2.pix = 4632.0 +Chunk.position.axis.function.refCoord.coord2.val = 61.9895 +Chunk.energy.specsys = TOPOCENT +Chunk.energy.bandpassName = 120-168MHz +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = m +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = 2.49827 +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = 1.78448 +Chunk.timeAxis = None +Chunk.time.resolution = 8 +Chunk.time.timesys = UTC +Chunk.time.axis.axis.ctype = TIME +Chunk.time.axis.axis.cunit = d +Chunk.time.axis.function.naxis = 1 +Chunk.time.axis.function.delta = 0.6666666666666666 +Chunk.time.axis.function.refCoord.pix = 0.5 +Chunk.time.axis.function.refCoord.val = 58350.241377315 diff --git a/caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.module b/caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.module new file mode 100644 index 00000000..0dbed045 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.module @@ -0,0 +1,7 @@ +def _get_observation_proposal_keywords(base): + temp = set() + temp.add('surveys') + temp.add('catalogs') + temp.add('radio continuum: general') + temp.add('techniques: image processing') + return temp diff --git a/caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.py b/caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.py new file mode 100644 index 00000000..0dbed045 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/lotss/P124+62/mosaic.py @@ -0,0 +1,7 @@ +def _get_observation_proposal_keywords(base): + temp = set() + temp.add('surveys') + temp.add('catalogs') + temp.add('radio continuum: general') + temp.add('techniques: image processing') + return temp diff --git a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/y.xml b/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/y.xml index 31bb540b..1d6cf98e 100644 --- a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/y.xml +++ b/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/y.xml @@ -26,7 +26,7 @@ 2 - ad:OMM/Cdemo_ext2_SCIRED.fits.gz + cadc:OMM/Cdemo_ext2_SCIRED.fits.gz science data application/fits diff --git a/caom2utils/caom2utils/tests/test_collections.py b/caom2utils/caom2utils/tests/test_collections.py index 7555c9b5..8ba21bee 100644 --- a/caom2utils/caom2utils/tests/test_collections.py +++ b/caom2utils/caom2utils/tests/test_collections.py @@ -156,7 +156,6 @@ def info_mock(uri): archive = uri.split(':')[1].split('/')[0] file_id = uri.split('/')[-1] temp = file_meta[1][(archive, file_id)] - logging.error(temp) return temp def _get_vos_headers(uri, subject=None): @@ -227,18 +226,18 @@ def _get_cardinality(directory): # alignment of product id / artifact URI works if '/cfhtsg/' in directory: return '--lineage ' \ - 'MegaPipe.080.156.Z.MP9801/ad:CFHTSG/' \ + 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' \ 'MegaPipe.080.156.Z.MP9801.weight.fits ' \ - 'MegaPipe.080.156.Z.MP9801/ad:CFHTSG/' \ + 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' \ 'MegaPipe.080.156.Z.MP9801.fits ' \ - 'MegaPipe.080.156.Z.MP9801/ad:CFHTSG/' \ + 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' \ 'MegaPipe.080.156.Z.MP9801.fits.gif' elif '/omm/' in directory: if 'SCIRED' in directory: - return '--lineage Cdemo_ext2_SCIRED/ad:OMM/' \ + return '--lineage Cdemo_ext2_SCIRED/cadc:OMM/' \ 'Cdemo_ext2_SCIRED.fits.gz' else: - return '--lineage C190531_0432_SCI/ad:OMM/' \ + return '--lineage C190531_0432_SCI/cadc:OMM/' \ 'C190531_0432_SCI.fits.gz' elif 'apass/catalog' in directory: return '--lineage catalog/vos://cadc.nrc.ca!vospace/CAOMworkshop/' \ @@ -249,9 +248,11 @@ def _get_cardinality(directory): else: return '--lineage star04239531/cadc:TAOSII/taos2_20220201T201317Z_star04239531.h5' elif 'brite' in directory: - return '--lineage HD36486_65-Ori-VIII-2021_BAb_1_5_A/ad:BRITE-Constellation/HD36486.orig' + return '--lineage HD36486_65-Ori-VIII-2021_BAb_1_5_A/cadc:BRITE-Constellation/HD36486.orig' elif 'gemini' in directory: - return '--lineage GN-2003A-Q-51-2-004/ad:GEMINI/N20030325S0098.fits' + return '--lineage GN-2003A-Q-51-2-004/cadc:GEMINI/N20030325S0098.fits' + elif 'lotss' in directory: + return '--lineage P124+62_mosaic/astron:LOTSS/P124+62/mosaic.fits' else: return '' @@ -319,19 +320,17 @@ def _get_uris(collection, fnames, obs): f = os.path.basename(fname).replace('.header', '') for p in obs.planes.values(): for a in p.artifacts.values(): - if (f'ad:{collection}/{f}' in a.uri or - (a.uri.startswith('vos') and f in a.uri)): + if (f'cadc:{collection}/{f}' in a.uri or + (a.uri.startswith('vos') and f in a.uri) or + (a.uri == 'astron:LOTSS/P124+62/mosaic.fits')): uris.append(a.uri) meta = FileInfo(id=a.uri, file_type=a.content_type, size=a.content_length, md5sum=a.content_checksum.checksum) file_url = urlparse(a.uri) - if file_url.scheme not in ['ad', 'vos', 'cadc']: - # TODO add hook to support other service providers - raise NotImplementedError( - 'Only ad, vos type URIs supported') - archive, file_id = file_url.path.split('/')[-2:] + file_id = file_url.path.split('/')[-1] + archive = file_url.path.split('/')[0] file_meta[(archive, file_id)] = meta return uris, file_meta else: From 261d3c86d0b617475a30ceae6cbe7d3df22a905f Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Tue, 16 Jan 2024 11:20:27 -0800 Subject: [PATCH 17/36] CADC-12858 - addresss code review comments. --- caom2utils/caom2utils/caom2blueprint.py | 125 +++++++++++------------- 1 file changed, 59 insertions(+), 66 deletions(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index 01e06e83..0e93e10a 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -197,6 +197,7 @@ 'observable', 'FLUX'] +GLOBAL_STORAGE_RESOURCE_ID = "ivo://cadc.nrc.ca/global/raven" class Caom2Exception(Exception): """Exception raised when an attempt to create or update a CAOM2 record @@ -2277,51 +2278,47 @@ def augment_artifact(self, artifact, index): f'No WCS Data. End content artifact augmentation for ' f'{artifact.uri}.') - if self.ignore_chunks(artifact, index): - return + if self.add_parts(artifact, index): + part = artifact.parts[str(index)] + part.product_type = self._get_from_list('Part.productType', index) + part.meta_producer = self._get_from_list('Part.metaProducer', index=0, current=part.meta_producer) + + # each Part has one Chunk, if it's not an empty part as determined + # just previously + if not part.chunks: + part.chunks.append(Chunk()) + chunk = part.chunks[0] + chunk.meta_producer = self._get_from_list('Chunk.metaProducer', index=0, current=chunk.meta_producer) + + self._get_chunk_naxis(chunk, index) + + # order by which the blueprint is used to set WCS information: + # 1 - try to construct the information for an axis from WCS information + # 2 - if the WCS information is insufficient, try to construct the information from the blueprint + # 3 - Always try to fill the range metadata from the blueprint. + if self.blueprint._pos_axes_configed: + self._wcs_parser.augment_position(chunk) + self._try_position_with_blueprint(chunk, index) + + if self.blueprint._energy_axis_configed: + self._wcs_parser.augment_energy(chunk) + self._try_energy_with_blueprint(chunk, index) + + if self.blueprint._time_axis_configed: + self._wcs_parser.augment_temporal(chunk) + self._try_time_with_blueprint(chunk, index) + + if self.blueprint._polarization_axis_configed: + self._wcs_parser.augment_polarization(chunk) + self._try_polarization_with_blueprint(chunk, index) + + if self.blueprint._obs_axis_configed: + self._wcs_parser.augment_observable(chunk) + self._try_observable_with_blueprint(chunk, index) - part = artifact.parts[str(index)] - part.product_type = self._get_from_list('Part.productType', index) - part.meta_producer = self._get_from_list( - 'Part.metaProducer', index=0, current=part.meta_producer) - - # each Part has one Chunk, if it's not an empty part as determined - # just previously - if not part.chunks: - part.chunks.append(Chunk()) - chunk = part.chunks[0] - chunk.meta_producer = self._get_from_list( - 'Chunk.metaProducer', index=0, current=chunk.meta_producer) - - self._get_chunk_naxis(chunk, index) - - # order by which the blueprint is used to set WCS information: - # 1 - try to construct the information for an axis from WCS information - # 2 - if the WCS information is insufficient, try to construct the information from the blueprint - # 3 - Always try to fill the range metadata from the blueprint. - if self.blueprint._pos_axes_configed: - self._wcs_parser.augment_position(chunk) - self._try_position_with_blueprint(chunk, index) - - if self.blueprint._energy_axis_configed: - self._wcs_parser.augment_energy(chunk) - self._try_energy_with_blueprint(chunk, index) - - if self.blueprint._time_axis_configed: - self._wcs_parser.augment_temporal(chunk) - self._try_time_with_blueprint(chunk, index) - - if self.blueprint._polarization_axis_configed: - self._wcs_parser.augment_polarization(chunk) - self._try_polarization_with_blueprint(chunk, index) - - if self.blueprint._obs_axis_configed: - self._wcs_parser.augment_observable(chunk) - self._try_observable_with_blueprint(chunk, index) - - if self.blueprint._custom_axis_configed: - self._wcs_parser.augment_custom(chunk) - self._try_custom_with_blueprint(chunk, index) + if self.blueprint._custom_axis_configed: + self._wcs_parser.augment_custom(chunk) + self._try_custom_with_blueprint(chunk, index) self.logger.debug( f'End content artifact augmentation for {artifact.uri}.') @@ -3188,11 +3185,11 @@ def _two_param_constructor(self, lookup1, lookup2, index, to_type, ctor): return new_object # TODO - is this the right implementation? - def ignore_chunks(self, artifact, index=0): - result = True + def add_parts(self, artifact, index=0): + result = False if self.blueprint.has_chunk(index): artifact.parts.add(Part(str(index))) - result = False + result = True return result @staticmethod @@ -3298,7 +3295,7 @@ def headers(self): """ return self._headers - def ignore_chunks(self, artifact, index): + def add_parts(self, artifact, index): # there is one Part per extension, the name is the extension number if ( FitsParser._has_data_array(self._headers[index]) @@ -3308,11 +3305,11 @@ def ignore_chunks(self, artifact, index): # TODO use extension name? artifact.parts.add(Part(str(index))) self.logger.debug(f'Part created for HDU {index}.') - result = False + result = True else: artifact.parts.add(Part(str(index))) self.logger.debug(f'Create empty part for HDU {index}') - result = True + result = False return result def apply_blueprint(self): @@ -3459,7 +3456,7 @@ def augment_artifact(self, artifact, index=0): artifact.uri)) for i, header in enumerate(self.headers): - if self.ignore_chunks(artifact, i): + if not self.add_parts(artifact, i): # artifact-level attributes still require updating BlueprintParser.augment_artifact(self, artifact, 0) continue @@ -3910,9 +3907,9 @@ def augment_artifact(self, artifact, index=0): def _get_chunk_naxis(self, chunk, index): chunk.naxis = self._get_from_list('Chunk.naxis', index, chunk.naxis) - def ignore_chunks(self, artifact, index=0): + def add_parts(self, artifact, index=0): artifact.parts.add(Part(str(index))) - return False + return True class WcsParser: @@ -3955,11 +3952,14 @@ def assign_sanitize(self, assignee, index, key, sanitize=True): Do not want to blindly assign None to astropy.wcs attributes, so use this method for conditional assignment. - The current implementation is that ff there is a legitimate need to + The current implementation is that if there is a legitimate need to assign None to a value, either use 'set' in the Hdf5ObsBlueprint, and specifically assign None, or execute a function to set it to None conditionally. There will be no support for a Default value of None with HDF5 files. + + By the time this method is called, if the value still passes the "ObsBlueprint.needs_lookup" + check, the value should be ignored for fulfilling the WCS needs of the record under construction. """ x = self._blueprint._get(key, self._extension) if sanitize: @@ -3968,17 +3968,11 @@ def assign_sanitize(self, assignee, index, key, sanitize=True): assignee[index] = x def _set_wcs(self): - self._wcs = WCS(naxis=self._blueprint.get_configed_axes_count()) + num_axes = self._blueprint.get_configed_axes_count() + self._wcs = WCS(naxis=num_axes) self.wcs = self._wcs.wcs - array_shape = [0] * self._blueprint.get_configed_axes_count() - crder = [0] * self._blueprint.get_configed_axes_count() - crpix = [0] * self._blueprint.get_configed_axes_count() - crval = [0] * self._blueprint.get_configed_axes_count() - csyer = [0] * self._blueprint.get_configed_axes_count() - ctype = [0] * self._blueprint.get_configed_axes_count() - cunit = [0] * self._blueprint.get_configed_axes_count() - temp = [0] * self._blueprint.get_configed_axes_count() - cd = [temp.copy() for _ in range(self._blueprint.get_configed_axes_count())] + array_shape, crder, crpix, crval, csyer, ctype, cunit, temp = [[0] * num_axes for _ in range(8)] + cd = [temp.copy() for _ in range(num_axes)] count = 0 if self._blueprint._pos_axes_configed: self._axes['ra'][1] = True @@ -5229,10 +5223,9 @@ def _get_common_arg_parser(): fits2caom2 and caom2gen :return: args parser """ - resource_id = "ivo://cadc.nrc.ca/global/raven" parser = util.get_base_parser(subparsers=False, version=version.version, - default_resource_id=resource_id) + default_resource_id=GLOBAL_STORAGE_RESOURCE_ID) parser.description = ( 'Augments an observation with information in one or more fits files.') From 1afe62d376ce89c0e9e6f2306e5b6ccdfe4d2d4e Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Tue, 16 Jan 2024 13:44:56 -0800 Subject: [PATCH 18/36] CADC-12858 - flake8 --- caom2utils/caom2utils/caom2blueprint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index 0e93e10a..ed100413 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -199,6 +199,7 @@ GLOBAL_STORAGE_RESOURCE_ID = "ivo://cadc.nrc.ca/global/raven" + class Caom2Exception(Exception): """Exception raised when an attempt to create or update a CAOM2 record fails for some reason.""" @@ -2260,7 +2261,6 @@ def __init__(self, obs_blueprint=None, uri=None): def _get_chunk_naxis(self, chunk, index): chunk.naxis = self._get_from_list( 'Chunk.naxis', index, self.blueprint.get_configed_axes_count()) - # 'Chunk.naxis', index, self._wcs_parser.wcs.wcs.naxis) def augment_artifact(self, artifact, index): """ From bdc2e3f2225dde53ae6f31e422e385387fe77acd Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Tue, 16 Jan 2024 16:14:34 -0800 Subject: [PATCH 19/36] Refactor caom2blueprint.py content into separate modules. --- caom2utils/caom2utils/blueprints.py | 1737 +++++++ caom2utils/caom2utils/caom2blueprint.py | 4620 +---------------- caom2utils/caom2utils/legacy.py | 18 +- caom2utils/caom2utils/parsers.py | 2159 ++++++++ .../tests/test_convert_from_java.py | 2 +- .../caom2utils/tests/test_fits2caom2.py | 10 +- .../caom2utils/tests/test_obs_blueprint.py | 2 +- caom2utils/caom2utils/wcs_parsers.py | 923 ++++ 8 files changed, 4859 insertions(+), 4612 deletions(-) create mode 100644 caom2utils/caom2utils/blueprints.py create mode 100644 caom2utils/caom2utils/parsers.py create mode 100644 caom2utils/caom2utils/wcs_parsers.py diff --git a/caom2utils/caom2utils/blueprints.py b/caom2utils/caom2utils/blueprints.py new file mode 100644 index 00000000..f4dd3a7d --- /dev/null +++ b/caom2utils/caom2utils/blueprints.py @@ -0,0 +1,1737 @@ +# *********************************************************************** +# ****************** CANADIAN ASTRONOMY DATA CENTRE ******************* +# ************* CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +# +# (c) 2024. (c) 2024. +# Government of Canada Gouvernement du Canada +# National Research Council Conseil national de recherches +# Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +# All rights reserved Tous droits réservés +# +# NRC disclaims any warranties, Le CNRC dénie toute garantie +# expressed, implied, or énoncée, implicite ou légale, +# statutory, of any kind with de quelque nature que ce +# respect to the software, soit, concernant le logiciel, +# including without limitation y compris sans restriction +# any warranty of merchantability toute garantie de valeur +# or fitness for a particular marchande ou de pertinence +# purpose. NRC shall not be pour un usage particulier. +# liable in any event for any Le CNRC ne pourra en aucun cas +# damages, whether direct or être tenu responsable de tout +# indirect, special or general, dommage, direct ou indirect, +# consequential or incidental, particulier ou général, +# arising from the use of the accessoire ou fortuit, résultant +# software. Neither the name de l'utilisation du logiciel. Ni +# of the National Research le nom du Conseil National de +# Council of Canada nor the Recherches du Canada ni les noms +# names of its contributors may de ses participants ne peuvent +# be used to endorse or promote être utilisés pour approuver ou +# products derived from this promouvoir les produits dérivés +# software without specific prior de ce logiciel sans autorisation +# written permission. préalable et particulière +# par écrit. +# +# This file is part of the Ce fichier fait partie du projet +# OpenCADC project. OpenCADC. +# +# OpenCADC is free software: OpenCADC est un logiciel libre ; +# you can redistribute it and/or vous pouvez le redistribuer ou le +# modify it under the terms of modifier suivant les termes de +# the GNU Affero General Public la “GNU Affero General Public +# License as published by the License” telle que publiée +# Free Software Foundation, par la Free Software Foundation +# either version 3 of the : soit la version 3 de cette +# License, or (at your option) licence, soit (à votre gré) +# any later version. toute version ultérieure. +# +# OpenCADC is distributed in the OpenCADC est distribué +# hope that it will be useful, dans l’espoir qu’il vous +# but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +# without even the implied GARANTIE : sans même la garantie +# warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +# or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +# PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +# General Public License for Générale Publique GNU Affero +# more details. pour plus de détails. +# +# You should have received Vous devriez avoir reçu une +# a copy of the GNU Affero copie de la Licence Générale +# General Public License along Publique GNU Affero avec +# with OpenCADC. If not, see OpenCADC ; si ce n’est +# . pas le cas, consultez : +# . +# +# Revision: 4 +# +# *********************************************************************** +# + +import logging + +from caom2 import CalibrationLevel, DataProductType, ReleaseType +from caom2.caom_util import int_32 + + +__all__ = [ + 'Hdf5ObsBlueprint', + 'ObsBlueprint', + '_to_float', + '_to_int', + '_to_int_32', + '_to_str', +] + + +class classproperty: + """ + Class property used for CAOM2_ELEMENTS in ObsBleprint + """ + def __init__(self, f): + self.f = f + + def __get__(self, obj, owner): + return self.f(owner) + + +class ObsBlueprint: + """ + Class that represents the blueprint of a CAOM2 Observation that can be + used to build an observation. + + The following CAOM2 elements can be specified in the blueprint: + _CAOM2_ELEMENTS + + The blueprint designates the source of each of these attributes as either + FITS keywords with possible default values or sets the actual values. + The blueprint can be checked by simply displaying it. + + For example: + + # display the default blueprint when WCS axes are not specified + print(ObsBlueprint()) + + # display the default blueprint when WCS axes are specified + print(ObsBlueprint(position_axis=(1, 2), energy_axis=3, + polarization_axis=4, time_axis=5)) + + # create a blueprint and customize it + ob = ObsBlueprint(position_axis=(1, 2), energy_axis=3, + polarization_axis=4, time_axis=5)) + ob.set('Observation.algorithm.name', 'exposure') + ob.add_attribute('Chunk.energy.axis.axis.ctype', ['MYCTYPE'], + extension=1) + ob.add_attribute('Chunk.energy.axis.axis.ctype', 'MYCTYPE2', + extension=1) + ob.set('Chunk.energy.velang', 33, extension=1) + ob.set_default('Chunk.position.coordsys', 'RA-DEC', extension=1) + + ob.set('Chunk.energy.velang', 44, extension=2) + print(ob) + + """ + _CAOM2_ELEMENTS = [ + 'CompositeObservation.members', + 'DerivedObservation.members', + 'Observation.observationID', + 'Observation.type', + 'Observation.intent', + 'Observation.sequenceNumber', + 'Observation.metaRelease', + 'Observation.metaReadGroups', + 'Observation.metaProducer', + 'Observation.requirements.flag', + + 'Observation.algorithm.name', + + 'Observation.instrument.name', + 'Observation.instrument.keywords', + + 'Observation.proposal.id', + 'Observation.proposal.pi', + 'Observation.proposal.project', + 'Observation.proposal.title', + 'Observation.proposal.keywords', + + 'Observation.target.name', + 'Observation.target.type', + 'Observation.target.standard', + 'Observation.target.redshift', + 'Observation.target.keywords', + 'Observation.target.moving', + 'Observation.target.targetID', + + 'Observation.target_position.point.cval1', + 'Observation.target_position.point.cval2', + 'Observation.target_position.coordsys', + 'Observation.target_position.equinox', + + 'Observation.telescope.name', + 'Observation.telescope.geoLocationX', + 'Observation.telescope.geoLocationY', + 'Observation.telescope.geoLocationZ', + 'Observation.telescope.keywords', + + 'Observation.environment.seeing', + 'Observation.environment.humidity', + 'Observation.environment.elevation', + 'Observation.environment.tau', + 'Observation.environment.wavelengthTau', + 'Observation.environment.ambientTemp', + 'Observation.environment.photometric', + + 'Plane.productID', + 'Plane.metaRelease', + 'Plane.dataRelease', + 'Plane.dataProductType', + 'Plane.calibrationLevel', + 'Plane.dataQuality', + 'Plane.metaReadGroups', + 'Plane.dataReadGroups', + 'Plane.metaProducer', + + 'Plane.provenance.name', + 'Plane.provenance.version', + 'Plane.provenance.project', + 'Plane.provenance.producer', + 'Plane.provenance.runID', + 'Plane.provenance.reference', + 'Plane.provenance.lastExecuted', + 'Plane.provenance.keywords', + 'Plane.provenance.inputs', + + 'Plane.metrics.sourceNumberDensity', + 'Plane.metrics.background', + 'Plane.metrics.backgroundStddev', + 'Plane.metrics.fluxDensityLimit', + 'Plane.metrics.magLimit', + 'Plane.metrics.sampleSNR', + + 'Plane.observable.ucd', + + 'Artifact.productType', + 'Artifact.releaseType', + 'Artifact.contentChecksum', + 'Artifact.contentLength', + 'Artifact.contentType', + 'Artifact.contentRelease', + 'Artifact.contentReadGroups', + 'Artifact.uri', + 'Artifact.metaProducer', + + 'Part.name', + 'Part.productType', + 'Part.metaProducer', + + 'Chunk', + 'Chunk.naxis', + 'Chunk.observableAxis', + 'Chunk.positionAxis1', + 'Chunk.positionAxis2', + 'Chunk.energyAxis', + 'Chunk.timeAxis', + 'Chunk.polarizationAxis', + 'Chunk.metaProducer', + + 'Chunk.observable.dependent.bin', + 'Chunk.observable.dependent.axis.ctype', + 'Chunk.observable.dependent.axis.cunit', + 'Chunk.observable.independent.bin', + 'Chunk.observable.independent.axis.ctype', + 'Chunk.observable.independent.axis.cunit', + + 'Chunk.position.coordsys', + 'Chunk.position.equinox', + 'Chunk.position.resolution', + 'Chunk.position.axis.axis1.ctype', + 'Chunk.position.axis.axis1.cunit', + 'Chunk.position.axis.axis2.ctype', + 'Chunk.position.axis.axis2.cunit', + 'Chunk.position.axis.error1.syser', + 'Chunk.position.axis.error1.rnder', + 'Chunk.position.axis.error2.syser', + 'Chunk.position.axis.error2.rnder', + 'Chunk.position.axis.function.cd11', + 'Chunk.position.axis.function.cd12', + 'Chunk.position.axis.function.cd21', + 'Chunk.position.axis.function.cd22', + 'Chunk.position.axis.function.dimension.naxis1', + 'Chunk.position.axis.function.dimension.naxis2', + 'Chunk.position.axis.function.refCoord.coord1.pix', + 'Chunk.position.axis.function.refCoord.coord1.val', + 'Chunk.position.axis.function.refCoord.coord2.pix', + 'Chunk.position.axis.function.refCoord.coord2.val', + 'Chunk.position.axis.range.start.coord1.pix', + 'Chunk.position.axis.range.start.coord1.val', + 'Chunk.position.axis.range.start.coord2.pix', + 'Chunk.position.axis.range.start.coord2.val', + 'Chunk.position.axis.range.end.coord1.pix', + 'Chunk.position.axis.range.end.coord1.val', + 'Chunk.position.axis.range.end.coord2.pix', + 'Chunk.position.axis.range.end.coord2.val', + + 'Chunk.energy.specsys', + 'Chunk.energy.ssysobs', + 'Chunk.energy.restfrq', + 'Chunk.energy.restwav', + 'Chunk.energy.velosys', + 'Chunk.energy.zsource', + 'Chunk.energy.ssyssrc', + 'Chunk.energy.velang', + 'Chunk.energy.bandpassName', + 'Chunk.energy.resolvingPower', + 'Chunk.energy.transition', + 'Chunk.energy.transition.species', + 'Chunk.energy.transition.transition', + 'Chunk.energy.axis.axis.ctype', + 'Chunk.energy.axis.axis.cunit', + 'Chunk.energy.axis.bounds.samples', + 'Chunk.energy.axis.error.syser', + 'Chunk.energy.axis.error.rnder', + 'Chunk.energy.axis.function.naxis', + 'Chunk.energy.axis.function.delta', + 'Chunk.energy.axis.function.refCoord.pix', + 'Chunk.energy.axis.function.refCoord.val', + 'Chunk.energy.axis.range.start.pix', + 'Chunk.energy.axis.range.start.val', + 'Chunk.energy.axis.range.end.pix', + 'Chunk.energy.axis.range.end.val', + + 'Chunk.polarization.axis.axis.ctype', + 'Chunk.polarization.axis.axis.cunit', + 'Chunk.polarization.axis.bounds.samples', + 'Chunk.polarization.axis.error.syser', + 'Chunk.polarization.axis.error.rnder', + 'Chunk.polarization.axis.function.naxis', + 'Chunk.polarization.axis.function.delta', + 'Chunk.polarization.axis.function.refCoord.pix', + 'Chunk.polarization.axis.function.refCoord.val', + 'Chunk.polarization.axis.range.start.pix', + 'Chunk.polarization.axis.range.start.val', + 'Chunk.polarization.axis.range.end.pix', + 'Chunk.polarization.axis.range.end.val', + + 'Chunk.time.exposure', + 'Chunk.time.resolution', + 'Chunk.time.timesys', + 'Chunk.time.trefpos', + 'Chunk.time.mjdref', + 'Chunk.time.axis.axis.ctype', + 'Chunk.time.axis.axis.cunit', + 'Chunk.time.axis.bounds.samples', + 'Chunk.time.axis.error.syser', + 'Chunk.time.axis.error.rnder', + 'Chunk.time.axis.function.naxis', + 'Chunk.time.axis.function.delta', + 'Chunk.time.axis.function.refCoord.pix', + 'Chunk.time.axis.function.refCoord.val', + 'Chunk.time.axis.range.start.pix', + 'Chunk.time.axis.range.start.val', + 'Chunk.time.axis.range.end.pix', + 'Chunk.time.axis.range.end.val', + + 'Chunk.observable.axis.axis.ctype', + 'Chunk.observable.axis.axis.cunit', + 'Chunk.observable.axis.function.refCoord.pix', + + 'Chunk.custom.axis.axis.ctype', + 'Chunk.custom.axis.axis.cunit', + 'Chunk.custom.axis.bounds.samples', + 'Chunk.custom.axis.error.syser', + 'Chunk.custom.axis.error.rnder', + 'Chunk.custom.axis.function.naxis', + 'Chunk.custom.axis.function.delta', + 'Chunk.custom.axis.function.refCoord.pix', + 'Chunk.custom.axis.function.refCoord.val', + 'Chunk.custom.axis.range.start.pix', + 'Chunk.custom.axis.range.start.val', + 'Chunk.custom.axis.range.end.pix', + 'Chunk.custom.axis.range.end.val' + ] + + # replace _CAOM2_ELEMENTS in __doc__ with the real elements + __doc__ = __doc__.replace('_CAOM2_ELEMENTS', '\n'.join(['\t\t{}'.format( + elem) for elem in _CAOM2_ELEMENTS])) + + def __init__(self, position_axes=None, energy_axis=None, + polarization_axis=None, time_axis=None, + obs_axis=None, custom_axis=None, module=None, + update=True, instantiated_class=None): + """ + Ctor + :param position_axes: tuple of form (int, int) indicating the indexes + of position axis + :param energy_axis: index of energy axis (int) + :param polarization_axis: index of polarization axis (int) + :param time_axis: index of time axis (int) + :param obs_axis: index of observable axis (int) + :param custom_axis: index of custom axis (int) + :param module: user-provided code, will be loaded with + importlib.import_module if a value is provided. + """ + + if position_axes and isinstance(position_axes, tuple) and\ + (len(position_axes) != 2): + raise ValueError( + 'Invalid position axis: {}. Must be tuple with 2 elements'. + format(str(position_axes))) + + self.logger = logging.getLogger(__name__) + + # this is the default blueprint + self._plan = {} + tmp = {'Observation.metaRelease': + (['DATE', 'DATE-OBS', 'UTCOBS', 'UTCDATE', + 'UTC-DATE', 'MJDOBS', 'MJD_OBS'], None), + 'Observation.instrument.name': (['INSTRUME'], None), + 'Observation.type': (['OBSTYPE'], None), + 'Observation.environment.ambientTemp': (['TEMPERAT'], + None), + # set the default for SimpleObservation construction + 'Observation.algorithm.name': (['PROCNAME'], 'exposure'), + 'Observation.instrument.keywords': (['INSTMODE'], None), + 'Observation.proposal.id': (['RUNID'], None), + 'Observation.target.name': (['OBJECT'], None), + 'Observation.telescope.name': (['TELESCOP'], None), + 'Observation.telescope.geoLocationX': (['OBSGEO-X'], + None), + 'Observation.telescope.geoLocationY': (['OBSGEO-Y'], + None), + 'Observation.telescope.geoLocationZ': (['OBSGEO-Z'], + None), + 'Observation.observationID': (['OBSID'], None), + 'Plane.calibrationLevel': ([], CalibrationLevel.RAW_STANDARD), + 'Plane.dataProductType': ([], DataProductType.IMAGE), + 'Plane.metaRelease': (['RELEASE', 'REL_DATE'], None), + 'Plane.dataRelease': (['RELEASE', 'REL_DATE'], None), + 'Plane.productID': (['RUNID'], None), + 'Plane.provenance.name': (['XPRVNAME'], None), + 'Plane.provenance.project': (['ADC_ARCH'], None), + 'Plane.provenance.producer': (['ORIGIN'], None), + 'Plane.provenance.reference': (['XREFER'], None), + 'Plane.provenance.lastExecuted': (['DATE-FTS'], None), + 'Artifact.releaseType': ([], ReleaseType.DATA), + 'Chunk': 'include' + } + # using the tmp to make sure that the keywords are valid + for key in tmp: + self.set(key, tmp[key]) + + self._extensions = {} + + # contains the standard WCS keywords in the FITS file expected by the + # astropy.WCS package. + self._wcs_std = { + 'Chunk.naxis': 'ZNAXIS,NAXIS' + } + self._pos_axes_configed = False + self._energy_axis_configed = False + self._time_axis_configed = False + self._polarization_axis_configed = False + self._obs_axis_configed = False + self._custom_axis_configed = False + if position_axes: + self.configure_position_axes(position_axes) + + if energy_axis: + self.configure_energy_axis(energy_axis) + + if polarization_axis: + self.configure_polarization_axis(polarization_axis) + + if time_axis: + self.configure_time_axis(time_axis) + + if obs_axis: + self.configure_observable_axis(obs_axis) + + if custom_axis: + self.configure_custom_axis(custom_axis) + + if module: + self._module = module + else: + self._module = None + self._module_instance = instantiated_class + # if True, existing values are used instead of defaults + self._update = update + # a data structure to carry around twelve bits of data at a time: + # the first item in the set is the ctype index, and the second is + # whether or not the index means anything, resulting in a + # call to the blueprint configure_* methods if it's True. + self._axis_info = { + 'custom': (0, False), + 'dec': (0, False), + 'energy': (0, False), + 'obs': (0, False), + 'polarization': (0, False), + 'ra': (0, False), + 'time': (0, False)} + + def configure_custom_axis(self, axis, override=True): + """ + Set the expected FITS custom keywords by index in the blueprint + and the wcs_std lookup. + + :param axis: The index expected for the custom axis. + :param override: Set to False when reading from a file. + :return: + """ + if self._custom_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured custom axis.') + return + + if override: + self.set('Chunk.custom.axis.axis.ctype', + ([f'CTYPE{axis}'], None)) + self.set('Chunk.custom.axis.axis.cunit', + ([f'CUNIT{axis}'], None)) + self.set('Chunk.custom.axis.function.naxis', + ([f'NAXIS{axis}'], None)) + self.set('Chunk.custom.axis.function.delta', + ([f'CDELT{axis}'], None)) + self.set('Chunk.custom.axis.function.refCoord.pix', + ([f'CRPIX{axis}'], None)) + self.set('Chunk.custom.axis.function.refCoord.val', + ([f'CRVAL{axis}'], None)) + + self._wcs_std['Chunk.custom.axis.axis.ctype'] = f'CTYPE{axis}' + self._wcs_std['Chunk.custom.axis.axis.cunit'] = f'CUNIT{axis}' + self._wcs_std['Chunk.custom.axis.function.naxis'] = f'NAXIS{axis}' + self._wcs_std['Chunk.custom.axis.function.delta'] = f'CDELT{axis}' + self._wcs_std['Chunk.custom.axis.function.refCoord.pix'] = \ + f'CRPIX{axis}' + self._wcs_std['Chunk.custom.axis.function.refCoord.val'] = \ + f'CRVAL{axis}' + + self._custom_axis_configed = True + + def configure_position_axes(self, axes, override=True): + """ + Set the expected FITS spatial keywords by indices in the blueprint and + the wcs_std lookup. + + :param axes: The index expected for the position axes. + :return: + """ + if self._pos_axes_configed: + self.logger.debug( + 'Attempt to configure already-configured position axes.') + return + + if override: + self.set('Chunk.position.coordsys', (['RADESYS'], None)) + self.set('Chunk.position.equinox', (['EQUINOX', 'EPOCH'], None)) + self.set('Chunk.position.axis.axis1.ctype', + ([f'CTYPE{axes[0]}'], None)) + self.set('Chunk.position.axis.axis1.cunit', + ([f'CUNIT{axes[0]}'], None)) + self.set('Chunk.position.axis.axis2.ctype', + ([f'CTYPE{axes[1]}'], None)) + self.set('Chunk.position.axis.axis2.cunit', + ([f'CUNIT{axes[1]}'], None)) + self.set('Chunk.position.axis.error1.syser', + ([f'CSYER{axes[0]}'], None)) + self.set('Chunk.position.axis.error1.rnder', + ([f'CRDER{axes[0]}'], None)) + self.set('Chunk.position.axis.error2.syser', + ([f'CSYER{axes[1]}'], None)) + self.set('Chunk.position.axis.error2.rnder', + ([f'CRDER{axes[1]}'], None)) + self.set('Chunk.position.axis.function.cd11', + ([f'CD{axes[0]}_{axes[0]}'], None)) + self.set('Chunk.position.axis.function.cd12', + ([f'CD{axes[0]}_{axes[1]}'], None)) + self.set('Chunk.position.axis.function.cd21', + ([f'CD{axes[1]}_{axes[0]}'], None)) + self.set('Chunk.position.axis.function.cd22', + ([f'CD{axes[1]}_{axes[1]}'], None)) + self.set('Chunk.position.axis.function.dimension.naxis1', + ([f'ZNAXIS{axes[0]}', + f'NAXIS{axes[0]}'], None)) + self.set('Chunk.position.axis.function.dimension.naxis2', + ([f'ZNAXIS{axes[1]}', + f'NAXIS{axes[1]}'], None)) + self.set('Chunk.position.axis.function.refCoord.coord1.pix', + ([f'CRPIX{axes[0]}'], None)) + self.set('Chunk.position.axis.function.refCoord.coord1.val', + ([f'CRVAL{axes[0]}'], None)) + self.set('Chunk.position.axis.function.refCoord.coord2.pix', + ([f'CRPIX{axes[1]}'], None)) + self.set('Chunk.position.axis.function.refCoord.coord2.val', + ([f'CRVAL{axes[1]}'], None)) + + self._wcs_std['Chunk.position.coordsys'] = 'RADESYS' + self._wcs_std['Chunk.position.equinox'] = 'EQUINOX' + + self._wcs_std['Chunk.position.axis.axis1.ctype'] = \ + f'CTYPE{axes[0]}' + self._wcs_std['Chunk.position.axis.axis1.cunit'] = \ + f'CUNIT{axes[0]}' + self._wcs_std['Chunk.position.axis.axis2.ctype'] = \ + f'CTYPE{axes[1]}' + self._wcs_std['Chunk.position.axis.axis2.cunit'] = \ + f'CUNIT{axes[1]}' + self._wcs_std['Chunk.position.axis.error1.syser'] = \ + f'CSYER{axes[0]}' + self._wcs_std['Chunk.position.axis.error1.rnder'] = \ + f'CRDER{axes[0]}' + self._wcs_std['Chunk.position.axis.error2.syser'] = \ + f'CSYER{axes[1]}' + self._wcs_std['Chunk.position.axis.error2.rnder'] = \ + f'CRDER{axes[1]}' + self._wcs_std['Chunk.position.axis.function.cd11'] = \ + f'CD{axes[0]}_{axes[0]}' + self._wcs_std['Chunk.position.axis.function.cd12'] = \ + f'CD{axes[0]}_{axes[1]}' + self._wcs_std['Chunk.position.axis.function.cd21'] = \ + f'CD{axes[1]}_{axes[0]}' + self._wcs_std['Chunk.position.axis.function.cd22'] = \ + f'CD{axes[1]}_{axes[1]}' + self._wcs_std['Chunk.position.axis.function.dimension.naxis1'] = \ + f'NAXIS{axes[0]}' + self._wcs_std['Chunk.position.axis.function.dimension.naxis2'] = \ + f'NAXIS{axes[1]}' + self._wcs_std['Chunk.position.axis.function.refCoord.coord1.pix'] \ + = f'CRPIX{axes[0]}' + self._wcs_std['Chunk.position.axis.function.refCoord.coord1.val'] \ + = f'CRVAL{axes[0]}' + self._wcs_std['Chunk.position.axis.function.refCoord.coord2.pix'] \ + = f'CRPIX{axes[1]}' + self._wcs_std['Chunk.position.axis.function.refCoord.coord2.val'] \ + = f'CRVAL{axes[1]}' + + self._pos_axes_configed = True + + def configure_energy_axis(self, axis, override=True): + """ + Set the expected FITS energy keywords by index in the blueprint and + the wcs_std lookup. + + :param axis: The index expected for the energy axis. + :param override: Set to False when reading from a file. + :return: + """ + if self._energy_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured energy axis.') + return + + if override: + self.set('Chunk.energy.specsys', (['SPECSYS'], None)) + self.set('Chunk.energy.ssysobs', (['SSYSOBS'], None)) + self.set('Chunk.energy.restfrq', (['RESTFRQ'], None)) + self.set('Chunk.energy.restwav', (['RESTWAV'], None)) + self.set('Chunk.energy.velosys', (['VELOSYS'], None)) + self.set('Chunk.energy.zsource', (['ZSOURCE'], None)) + self.set('Chunk.energy.ssyssrc', (['SSYSSRC'], None)) + self.set('Chunk.energy.velang', (['VELANG'], None)) + + self.set('Chunk.energy.bandpassName', ([], None)) + self.set('Chunk.energy.resolvingPower', ([], None)) + + self.set('Chunk.energy.axis.axis.ctype', + ([f'CTYPE{axis}'], None)) + self.set('Chunk.energy.axis.axis.cunit', + ([f'CUNIT{axis}'], None)) + self.set('Chunk.energy.axis.error.syser', + ([f'CSYER{axis}'], None)) + self.set('Chunk.energy.axis.error.rnder', + ([f'CRDER{axis}'], None)) + self.set('Chunk.energy.axis.function.naxis', + ([f'NAXIS{axis}'], None)) + self.set('Chunk.energy.axis.function.delta', + ([f'CDELT{axis}'], None)) + self.set('Chunk.energy.axis.function.refCoord.pix', + ([f'CRPIX{axis}'], None)) + self.set('Chunk.energy.axis.function.refCoord.val', + ([f'CRVAL{axis}'], None)) + + self._wcs_std['Chunk.energy.specsys'] = 'SPECSYS' + self._wcs_std['Chunk.energy.ssysobs'] = 'SSYSOBS' + self._wcs_std['Chunk.energy.restfrq'] = 'RESTFRQ' + self._wcs_std['Chunk.energy.restwav'] = 'RESTWAV' + self._wcs_std['Chunk.energy.velosys'] = 'VELOSYS' + self._wcs_std['Chunk.energy.zsource'] = 'ZSOURCE' + self._wcs_std['Chunk.energy.ssyssrc'] = 'SSYSSRC' + self._wcs_std['Chunk.energy.velang'] = 'VELANG' + + self._wcs_std['Chunk.energy.axis.axis.ctype'] = \ + f'CTYPE{axis}' + self._wcs_std['Chunk.energy.axis.axis.cunit'] = \ + f'CUNIT{axis}' + self._wcs_std['Chunk.energy.axis.error.syser'] = \ + f'CSYER{axis}' + self._wcs_std['Chunk.energy.axis.error.rnder'] = \ + f'CRDER{axis}' + self._wcs_std['Chunk.energy.axis.function.naxis'] = \ + f'NAXIS{axis}' + self._wcs_std['Chunk.energy.axis.function.delta'] = \ + f'CDELT{axis}' + self._wcs_std['Chunk.energy.axis.function.refCoord.pix'] = \ + f'CRPIX{axis}' + self._wcs_std['Chunk.energy.axis.function.refCoord.val'] = \ + f'CRVAL{axis}' + + self._energy_axis_configed = True + + def configure_polarization_axis(self, axis, override=True): + """ + Set the expected FITS polarization keywords by index in the blueprint + and the wcs_std lookup. + + :param axis: The index expected for the polarization axis. + :param override: Set to False when reading from a file. + :return: + """ + if self._polarization_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured polarization axis.') + return + + if override: + self.set('Chunk.polarization.axis.axis.ctype', + ([f'CTYPE{axis}'], None)) + self.set('Chunk.polarization.axis.axis.cunit', + ([f'CUNIT{axis}'], None)) + self.set('Chunk.polarization.axis.function.naxis', + ([f'NAXIS{axis}'], None)) + self.set('Chunk.polarization.axis.function.delta', + ([f'CDELT{axis}'], None)) + self.set('Chunk.polarization.axis.function.refCoord.pix', + ([f'CRPIX{axis}'], None)) + self.set('Chunk.polarization.axis.function.refCoord.val', + ([f'CRVAL{axis}'], None)) + + self._wcs_std['Chunk.polarization.axis.axis.ctype'] = \ + f'CTYPE{axis}' + self._wcs_std['Chunk.polarization.axis.axis.cunit'] = \ + f'CUNIT{axis}' + self._wcs_std['Chunk.polarization.axis.function.naxis'] = \ + f'NAXIS{axis}' + self._wcs_std['Chunk.polarization.axis.function.delta'] = \ + f'CDELT{axis}' + self._wcs_std['Chunk.polarization.axis.function.refCoord.pix'] = \ + f'CRPIX{axis}' + self._wcs_std['Chunk.polarization.axis.function.refCoord.val'] = \ + f'CRVAL{axis}' + + self._polarization_axis_configed = True + + def configure_observable_axis(self, axis, override=True): + """ + Set the expected FITS observable keywords by index in the blueprint + and the wcs_std lookup. + Note: observable axis is not a standard WCS and it's not used by + astropy.wcs so, arguably, it can be removed. It is here for now for + consistency purposes. + :param axis: The index expected for the observable axis. + :param override: Set to False when reading from a file. + :return: + """ + if self._obs_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured observable axis.') + return + + if override: + self.set('Chunk.observable.axis.axis.ctype', + ([f'CTYPE{axis}'], None)) + self.set('Chunk.observable.axis.axis.cunit', + ([f'CUNIT{axis}'], None)) + self.set('Chunk.observable.axis.function.refCoord.pix', + ([f'CRPIX{axis}'], None)) + + self._wcs_std['Chunk.observable.axis.axis.ctype'] = \ + f'CTYPE{axis}' + self._wcs_std['Chunk.observable.axis.axis.cunit'] = \ + f'CUNIT{axis}' + self._wcs_std['Chunk.observable.axis.function.refCoord.pix'] = \ + f'CRPIX{axis}' + + self._obs_axis_configed = True + + def configure_time_axis(self, axis, override=True): + """ + Set the expected FITS time keywords by index in the blueprint and + the wcs_std lookup. + + :param axis: The index expected for the time axis. + :param override: Set to False when reading from a file. + :return: + """ + if self._time_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured time axis.') + return + + if override: + self.set('Chunk.time.exposure', (['EXPTIME', 'INTTIME'], None)) + self.set('Chunk.time.timesys', (['TIMESYS'], None)) + self.set('Chunk.time.trefpos', (['TREFPOS'], None)) + self.set('Chunk.time.mjdref', (['MJDREF'], None)) + self.set('Chunk.time.resolution', (['TIMEDEL'], None)) + self.set('Chunk.time.axis.axis.ctype', + ([f'CTYPE{axis}'], None)) + self.set('Chunk.time.axis.axis.cunit', + ([f'CUNIT{axis}'], None)) + self.set('Chunk.time.axis.error.syser', + ([f'CSYER{axis}'], None)) + self.set('Chunk.time.axis.error.rnder', + ([f'CRDER{axis}'], None)) + self.set('Chunk.time.axis.function.naxis', + ([f'NAXIS{axis}'], None)) + self.set('Chunk.time.axis.function.delta', + ([f'CDELT{axis}'], None)) + self.set('Chunk.time.axis.function.refCoord.pix', + ([f'CRPIX{axis}'], None)) + self.set('Chunk.time.axis.function.refCoord.val', + ([f'CRVAL{axis}'], None)) + + self._wcs_std['Chunk.time.exposure'] = 'EXPTIME' + self._wcs_std['Chunk.time.resolution'] = 'TIMEDEL' + self._wcs_std['Chunk.time.timesys'] = 'TIMESYS' + self._wcs_std['Chunk.time.trefpos'] = 'TREFPOS' + self._wcs_std['Chunk.time.mjdref'] = 'MJDREF' + + self._wcs_std['Chunk.time.axis.axis.ctype'] = \ + f'CTYPE{axis}' + self._wcs_std['Chunk.time.axis.axis.cunit'] = \ + f'CUNIT{axis}' + self._wcs_std['Chunk.time.axis.error.syser'] = \ + f'CSYER{axis}' + self._wcs_std['Chunk.time.axis.error.rnder'] = \ + f'CRDER{axis}' + self._wcs_std['Chunk.time.axis.function.naxis'] = \ + f'NAXIS{axis}' + self._wcs_std['Chunk.time.axis.function.delta'] = \ + f'CDELT{axis}' + self._wcs_std['Chunk.time.axis.function.refCoord.pix'] = \ + f'CRPIX{axis}' + self._wcs_std['Chunk.time.axis.function.refCoord.val'] = \ + f'CRVAL{axis}' + + self._time_axis_configed = True + + def _guess_axis_info(self): + """Look for info regarding axis types in the blueprint wcs_std. + Configure the blueprint according to the guesses. + """ + for ii in self._plan: + if isinstance(self._plan[ii], tuple): + for value in self._plan[ii][0]: + if (value.startswith('CTYPE')) and value[-1].isdigit(): + value = value.split('-')[0] + self._guess_axis_info_from_ctypes(ii, int(value[-1])) + else: + value = self._plan[ii] + if value is None: + continue + if (value.startswith('CTYPE')) and value[-1].isdigit(): + value = value.split('-')[0] + self._guess_axis_info_from_ctypes(ii, int(value[-1])) + + self._guess_axis_info_from_plan() + + def _guess_axis_info_from_plan(self): + for ii in self._plan: + if ii.startswith('Chunk.position') and ii.endswith('axis1.ctype') \ + and not self._axis_info['ra'][1]: + configured_index = self._get_configured_index( + self._axis_info, 'ra') + self._axis_info['ra'] = (configured_index, True) + elif ii.startswith('Chunk.position') and \ + ii.endswith('axis2.ctype') and not \ + self._axis_info['dec'][1]: + configured_index = self._get_configured_index(self._axis_info, + 'dec') + self._axis_info['dec'] = (configured_index, True) + elif ii.startswith('Chunk.energy') and not \ + self._axis_info['energy'][1]: + configured_index = self._get_configured_index(self._axis_info, + 'energy') + self._axis_info['energy'] = (configured_index, True) + elif ii.startswith('Chunk.time') and not \ + self._axis_info['time'][1]: + configured_index = self._get_configured_index(self._axis_info, + 'time') + self._axis_info['time'] = (configured_index, True) + elif ii.startswith('Chunk.polarization') \ + and not self._axis_info['polarization'][1]: + configured_index = self._get_configured_index(self._axis_info, + 'polarization') + self._axis_info['polarization'] = (configured_index, True) + elif ii.startswith('Chunk.observable') and not \ + self._axis_info['obs'][1]: + configured_index = self._get_configured_index(self._axis_info, + 'obs') + self._axis_info['obs'] = (configured_index, True) + elif ii.startswith('Chunk.custom') and not \ + self._axis_info['custom'][1]: + configured_index = self._get_configured_index(self._axis_info, + 'custom') + self._axis_info['custom'] = (configured_index, True) + + if self._axis_info['ra'][1] and self._axis_info['dec'][1]: + self.configure_position_axes( + (self._axis_info['ra'][0], self._axis_info['dec'][0]), False) + elif self._axis_info['ra'][1] or self._axis_info['dec'][1]: + raise ValueError('Only one positional axis found ' + '(ra/dec): {}/{}'. + format(self._axis_info['ra'][0], + self._axis_info['dec'][0])) + else: + # assume that positional axis are 1 and 2 by default + if (self._axis_info['time'][0] in [1, 2] or + self._axis_info['energy'][0] in [1, 2] or + self._axis_info['polarization'][0] in [1, 2] or + self._axis_info['obs'][0] in [1, 2] or + self._axis_info['custom'][0] in [1, 2]): + raise ValueError('Cannot determine the positional axis') + else: + self.configure_position_axes((1, 2), False) + + if self._axis_info['time'][1]: + self.configure_time_axis(self._axis_info['time'][0], False) + if self._axis_info['energy'][1]: + self.configure_energy_axis(self._axis_info['energy'][0], False) + if self._axis_info['polarization'][1]: + self.configure_polarization_axis( + self._axis_info['polarization'][0], False) + if self._axis_info['obs'][1]: + self.configure_observable_axis(self._axis_info['obs'][0], False) + if self._axis_info['custom'][1]: + self.configure_custom_axis(self._axis_info['custom'][0], False) + + def _guess_axis_info_from_ctypes(self, lookup, counter): + """ + Check for the presence of blueprint keys in the plan, and whether or + not they indicate an index in their configuration. + + :param lookup: Blueprint plan key. + :param counter: Value to set the index to for an axis. + :param axis_info: local data structure to pass around what is + configured, and what is it's value. + """ + if lookup.startswith('Chunk.energy'): + self._axis_info['energy'] = (counter, True) + elif lookup.startswith('Chunk.polarization'): + self._axis_info['polarization'] = (counter, True) + elif lookup.startswith('Chunk.time'): + self._axis_info['time'] = (counter, True) + elif lookup.startswith('Chunk.position') and lookup.endswith( + 'axis1.ctype'): + self._axis_info['ra'] = (counter, True) + elif lookup.startswith('Chunk.position') and lookup.endswith( + 'axis2.ctype'): + self._axis_info['dec'] = (counter, True) + elif lookup.startswith('Chunk.observable'): + self._axis_info['obs'] = (counter, True) + elif lookup.startswith('Chunk.custom'): + self._axis_info['custom'] = (counter, True) + else: + raise ValueError( + f'Unrecognized axis type: {lookup}') + + def _get_configured_index(self, axis_info, lookup): + """Find the next available index value among those that are not set. + + :param axis_info: local data structure to pass around what is + configured, and what is it's value.""" + DEFAULT_INDICES = {'ra': 1, + 'dec': 2, + 'energy': 3, + 'time': 4, + 'polarization': 5, + 'obs': 6, + 'custom': 7} + + # the logic - if the default index is already used, assign the lowest + # index that is unused, otherwise use the default index + + max_index = 0 + min_index = 7 + default_index = DEFAULT_INDICES[lookup] + default_used = False + for axis in axis_info: + # do two unrelated things in this for loop + # 1. determine where to start counting + if axis_info[axis][1]: + max_index = max(max_index, axis_info[axis][0]) + min_index = min(min_index, axis_info[axis][0]) + # 2. determine if the default is used + if axis_info[axis][1] and default_index == axis_info[axis][0]: + default_used = True + + configured_index = 0 + if default_used: + if min_index == 1: + configured_index = max_index + 1 + else: + configured_index = min(1, min_index) + else: + configured_index = default_index + return configured_index + + def load_from_file(self, file_name): + """ + Load a blueprint from a file. The expected input format is the same + as is output by _serialize. This means there's lots of stripping of + extra spaces, equals signs, and the word default. Also manage + square brackets as list construction. + + Accept comments that start with '#'. + + :param file_name: The fully-qualified pathname for the blueprint + file on disk. + """ + with open(file_name) as file: + for line in file: + if '=' in line: + if '#' in line: + if line.find('#') == 0: + # ignore lines starting with a comment + continue + line = line.split('#')[0] + key, value = line.split('=', 1) + if 'default' in value: + temp = value.replace('default', ''). \ + replace('=', '').strip('\n').strip() + default = temp.rsplit(',')[1] + temp_list = temp.rsplit(',')[0].replace('[', ''). \ + replace(']', '').replace('\'', '').split(',') + if 'None' in default: + default = None + else: + default = default.strip() + cleaned_up_value = (temp_list, default) + else: + if '[' in value: + temp_list = value.replace('[', ''). \ + replace(']', '').replace('\'', '').split(',') + temp_list_2 = [] + for ii in temp_list: + temp_list_2.append(ii.strip().strip('\n')) + cleaned_up_value = (temp_list_2, None) + else: + cleaned_up_value = value.strip('\n').strip() + if cleaned_up_value == 'None': + cleaned_up_value = None + self.set(key.strip(), cleaned_up_value) + self._guess_axis_info() + + @classproperty + def CAOM2_ELEMENTS(cls): + """ + List of valid names of CAOM2 elements. + :return: + """ + return list(ObsBlueprint._CAOM2_ELEMENTS) # return a copy + + @classmethod + def check_caom2_element(cls, caom2_element): + """ + Checks that an element is a valid caom2_element in the blueprint. It + checks that it's part of the ObsBlueprint._CAOM2_ELEMENTS + :param caom2_element: name CAOM2 element to check + :raises KeyError + """ + if caom2_element not in cls._CAOM2_ELEMENTS: + raise KeyError( + '{} not a valid CAOM2 element name (mispelling?).'. + format(caom2_element)) + + @staticmethod + def check_chunk(caom2_element): + """ + Checks that an element is a valid Chunk-type caom2_element + :param caom2_element: name CAOM2 element to check + :raises ValueError + """ + if not caom2_element.startswith('Chunk'): + raise ValueError( + "Extension number refers to Chunk elements only") + + @staticmethod + def check_extension(extension): + if extension is not None and extension < 0: + raise ValueError( + f'Extension count failure. {extension} should be >= 0') + + def __str__(self): + plan = self._serialize(self._plan) + + extensions = '' + if self._extensions: + for key in sorted(self._extensions): + extensions = extensions + f'\nextension {key}:\n' +\ + self._serialize(self._extensions[key]) + return plan + extensions + + def _serialize(self, src): + return '\n'.join( + ['{} = {}'.format(key, '{}, default = {}'.format(src[key][0], + src[key][1]) + if isinstance(src[key], tuple) + else src[key]) + for key in ObsBlueprint._CAOM2_ELEMENTS + if key in src]) + + def set(self, caom2_element, value, extension=0): + """ + Sets the value associated with an element in the CAOM2 model. Value + cannot be a tuple. + :param caom2_element: name CAOM2 element (as in + ObsBlueprint.CAOM2_ELEMEMTS) + :param value: new value of the CAOM2 element + :param extension: extension number (used only for Chunk elements) + """ + ObsBlueprint.check_caom2_element(caom2_element) + ObsBlueprint.check_extension(extension) + if extension: + ObsBlueprint.check_chunk(caom2_element) + if extension not in self._extensions: + self._extensions[extension] = {} + self._extensions[extension][caom2_element] = value + else: + self._plan[caom2_element] = value + + def add_attribute(self, caom2_element, attribute, extension=0): + """ + Adds an attribute in the list of other attributes associated + with an caom2 element. + :param caom2_element: name CAOM2 element (as in + ObsBlueprint.CAOM2_ELEMEMTS) + :param attribute: name of attribute the element is mapped to + :param extension: extension number (used only for Chunk elements) + :raises AttributeError if the caom2 element has already an associated + value or KeyError if the caom2 element does not exists. + """ + ObsBlueprint.check_caom2_element(caom2_element) + ObsBlueprint.check_extension(extension) + if extension: + ObsBlueprint.check_chunk(caom2_element) + if extension not in self._extensions: + raise AttributeError( + f'No extension {extension} in the blueprint') + else: + if caom2_element in self._extensions[extension]: + if (isinstance(self._extensions[extension][caom2_element], + tuple)): + if (attribute not in + self._extensions[extension][caom2_element][0]): + self._extensions[extension][caom2_element][0].\ + insert(0, attribute) + else: + raise AttributeError( + (f'No attributes in extension {extension} ' + f'associated with keyword {caom2_element}')) + else: + self._extensions[extension][caom2_element] = \ + ([attribute], None) + else: + if caom2_element in self._plan: + if isinstance(self._plan[caom2_element], tuple): + if attribute not in self._plan[caom2_element][0]: + self._plan[caom2_element][0].insert(0, attribute) + else: + raise AttributeError(f'No attributes associated with ' + f'keyword {caom2_element}') + else: + self._plan[caom2_element] = ([attribute], None) + + def add_table_attribute(self, caom2_element, ttype_attribute, extension=0, + index=0): + """ + Adds a FITS BINTABLE TTYPE* lookup, to a list of other FITS attributes + associated with an caom2 element. This does not co-exist with + non-table attributes. + + There is no support for default values for table attributes. + + :param caom2_element: name CAOM2 element (as in + ObsBlueprint.CAOM2_ELEMEMTS) + :param ttype_attribute: name of TTYPE attribute element is mapped to + :param extension: extension number (used only for Chunk elements) + :param index: which row values to return. If index is None, all row + values will be returned as a comma-separated list. + :raises AttributeError if the caom2 element has already an associated + value or KeyError if the caom2 element does not exists. + """ + ObsBlueprint.check_caom2_element(caom2_element) + ObsBlueprint.check_extension(extension) + if extension: + if extension in self._extensions: + if caom2_element in self._extensions[extension]: + if (ObsBlueprint.is_table( + self._extensions[extension][caom2_element])): + if (ttype_attribute not in + self._extensions[extension][caom2_element][1]): + self._extensions[extension][caom2_element][1]. \ + insert(0, ttype_attribute) + else: + raise AttributeError( + ('No TTYPE attributes in extension {} associated ' + 'with keyword {}').format(extension, + caom2_element)) + else: + self._extensions[extension][caom2_element] = \ + ('BINTABLE', [ttype_attribute], index) + else: + self._extensions[extension] = {} + self._extensions[extension][caom2_element] = \ + ('BINTABLE', [ttype_attribute], index) + else: + if caom2_element in self._plan: + if ObsBlueprint.is_table(self._plan[caom2_element]): + if ttype_attribute not in self._plan[caom2_element][1]: + self._plan[caom2_element][1].insert(0, ttype_attribute) + else: + raise AttributeError('No TTYPE attributes associated ' + 'with keyword {}'.format( + caom2_element)) + else: + self._plan[caom2_element] = ( + 'BINTABLE', [ttype_attribute], None) + + def set_default(self, caom2_element, default, extension=0): + """ + Sets the default value of a caom2 element that is associated with + attributes. If the element does not exist or does not have a list of + associated attributes, default is set as the associated value + of the element. + + If set is called for the same caom2_element after this, the default + value will be reset to None. + + :param caom2_element: name CAOM2 element (as in + ObsBlueprint.CAOM2_ELEMEMTS) + :param default: default value + :param extension: extension number (used only for Chunk elements) + """ + ObsBlueprint.check_caom2_element(caom2_element) + ObsBlueprint.check_extension(extension) + if extension: + ObsBlueprint.check_chunk(caom2_element) + if extension not in self._extensions: + self._extensions[extension] = {} + if caom2_element in self._extensions[extension] and \ + isinstance(self._extensions[extension][caom2_element], tuple): + self._extensions[extension][caom2_element] = \ + (self._extensions[extension][caom2_element][0], default) + else: + # default is the only value + self._extensions[extension][caom2_element] = default + else: + if (caom2_element in self._plan) and \ + isinstance(self._plan[caom2_element], tuple): + self._plan[caom2_element] = (self._plan[caom2_element][0], + default) + else: + # override the value + self._plan[caom2_element] = default + + def delete(self, caom2_element, extension=0): + """ + Deletes an element from the blueprint + :param caom2_element: name CAOM2 element (as in + ObsBlueprint.CAOM2_ELEMEMTS) + :param extension: extension number + :raises exceptions if the element or extension not found + """ + ObsBlueprint.check_caom2_element(caom2_element) + ObsBlueprint.check_extension(extension) + if extension: + ObsBlueprint.check_chunk(caom2_element) + if extension not in self._extensions: + raise ValueError('Extension {} not configured in blueprint'. + format(extension)) + if caom2_element in self._extensions[extension]: + del self._extensions[extension][caom2_element] + if len(self._extensions[extension]) == 0: + del self._extensions[extension] + else: + if caom2_element in self._plan: + del self._plan[caom2_element] + + def clear(self, caom2_element, extension=0): + """ + Clears the value for an element in the blueprint by resetting it to an + empty list with no default. + + :param caom2_element: name CAOM2 element (as in + ObsBlueprint.CAOM2_ELEMEMTS) + :param extension: extension number + :raises exceptions if the element or extension not found + """ + ObsBlueprint.check_caom2_element(caom2_element) + ObsBlueprint.check_extension(extension) + if extension: + ObsBlueprint.check_chunk(caom2_element) + if extension not in self._extensions: + raise ValueError('Extension {} not configured in blueprint'. + format(extension)) + if caom2_element in self._extensions[extension]: + self._extensions[extension][caom2_element] = ([], None) + else: + if caom2_element in self._plan: + self._plan[caom2_element] = ([], None) + + def _get(self, caom2_element, extension=0): + """ + Returns the source associated with a CAOM2 element + :param caom2_element: name CAOM2 element (as in + ObsBlueprint.CAOM2_ELEMEMTS) + :param extension: extension number + :return: Tuple of the form (list_of_associated_attributes, + default_value) OR the actual value associated with the CAOM2 element + """ + ObsBlueprint.check_caom2_element(caom2_element) + ObsBlueprint.check_extension(extension) + if extension: + if (extension in self._extensions) and \ + (caom2_element in self._extensions[extension]): + return self._extensions[extension][caom2_element] + + # look in the minimal plan + if caom2_element not in self._plan: + return None + else: + return self._plan[caom2_element] + + def has_chunk(self, extension): + """What does the plan say about creating chunks for an + extension? + + :return True if there should be a chunk to go along with a part + """ + value = '' + if extension is not None and extension in self._extensions: + if 'Chunk' in self._extensions[extension]: + value = self._extensions[extension]['Chunk'] + elif 'Chunk' in self._plan: + if ((extension is not None and extension == 0) or ( + extension is None)): + value = self._plan['Chunk'] + return not value == '{ignore}' + + @staticmethod + def is_table(value): + """Hide the blueprint structure from clients - they shouldn't need + to know that a value of type tuple requires special processing.""" + return ObsBlueprint.needs_lookup(value) and value[0] == 'BINTABLE' + + @staticmethod + def is_function(value): + """ + Check if a blueprint value has Python 'function' syntax. The + "'/' not in value" clause excludes strings with syntax that enables + addressing HDF5 arrays. + + :return: True if the value is the name of a function to be executed, + False, otherwise + """ + return (not ObsBlueprint.needs_lookup(value) and isinstance(value, str) + and isinstance(value, str) and '(' in value and ')' in value + and '/' not in value) + + @staticmethod + def has_default_value(value): + """""" + return isinstance(value, tuple) and value[1] + + @staticmethod + def has_no_value(value): + """If functions return None, try not to update the WCS with this + value.""" + return value is None or ( + isinstance(value, str) and 'None' in value.strip()) + + @staticmethod + def needs_lookup(value): + """Hide the blueprint structure from clients - they shouldn't need + to know that a value of type tuple requires special processing.""" + return isinstance(value, tuple) + + def get_configed_axes_count(self): + """:return how many axes have been configured to read from WCS""" + configed_axes = 0 + if self._pos_axes_configed: + configed_axes += 2 + if self._energy_axis_configed: + configed_axes += 1 + if self._time_axis_configed: + configed_axes += 1 + if self._polarization_axis_configed: + configed_axes += 1 + if self._obs_axis_configed: + configed_axes += 1 + if self._custom_axis_configed: + configed_axes += 1 + return configed_axes + + @property + def update(self): + return self._update + + @update.setter + def update(self, value): + self._update = value + + +class Hdf5ObsBlueprint(ObsBlueprint): + """ + Class that specializes the CAOM2 Observation construction based on HDF5 + file content. + + The blueprint designates the source of each of these attributes as either + HDF5 Dataset or Group values. Specific or default values may also be + indicated in the same fashion os for an ObsBlueprint. The blueprint can + be checked by simply displaying it. + + HDF5-specific example: + # create a blueprint and customize it + ob = Hdf5ObsBlueprint(position_axes=(1, 2) + + # lookup value starting with // means rooted at base of the hdf5 file + ob.add_attribute('Observation.target.name', '//header/object/obj_id') + + # lookup value starting with / means rooted at the base of the + # "find_roots_here" parameter for Hdf5Parser + # + # (integer) means return only the value with the index of "integer" + # from a list + ob.add_attribute( + 'Chunk.position.axis.function.refCoord.coord1.pix', + '/header/wcs/crpix(0)') + + # (integer:integer) means return only the value with the index of + # "integer" from a list, followed by "integer" from the list in the + # list + ob.add_attribute( + 'Chunk.position.axis.function.cd11', '/header/wcs/cd(0:0)') + print(ob) + + """ + def __init__(self, position_axes=None, energy_axis=None, + polarization_axis=None, time_axis=None, + obs_axis=None, custom_axis=None, module=None, + update=True, instantiated_class=None): + """ + There are no sensible/known HDF5 defaults for WCS construction, so + default to ensuring the blueprint executes with mostly values of None. + + Use the attribute _wcs_std, so that the list of WCS keywords used + as input is known. + """ + super().__init__( + position_axes, + energy_axis, + polarization_axis, + time_axis, + obs_axis, + custom_axis, + module, + update, + instantiated_class, + ) + tmp = { + 'Observation.algorithm.name': ([], 'exposure'), + 'Plane.calibrationLevel': ([], CalibrationLevel.RAW_STANDARD), + 'Plane.dataProductType': ([], DataProductType.IMAGE), + 'Artifact.releaseType': ([], ReleaseType.DATA), + 'Chunk': 'include' + } + # using the tmp to make sure that the keywords are valid + for key in tmp: + self.set(key, tmp[key]) + + def configure_custom_axis(self, axis, override=True): + """ + Set the expected custom keywords by index in the blueprint + and the wcs_std lookup. + + :param axis: The index expected for the custom axis. + :param override: Set to False when reading from a file. + """ + if self._custom_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured custom axis.') + return + + if override: + self.set('Chunk.custom.axis.axis.ctype', ([], None)) + self.set('Chunk.custom.axis.axis.cunit', ([], None)) + self.set('Chunk.custom.axis.function.naxis', ([], 1)) + self.set('Chunk.custom.axis.function.delta', ([], None)) + self.set('Chunk.custom.axis.function.refCoord.pix', ([], None)) + self.set('Chunk.custom.axis.function.refCoord.val', ([], None)) + + self._wcs_std['Chunk.custom.axis.axis.ctype'] = '' + self._wcs_std['Chunk.custom.axis.axis.cunit'] = '' + self._wcs_std['Chunk.custom.axis.function.naxis'] = '' + self._wcs_std['Chunk.custom.axis.function.delta'] = '' + self._wcs_std['Chunk.custom.axis.function.refCoord.pix'] = '' + self._wcs_std['Chunk.custom.axis.function.refCoord.val'] = '' + self._custom_axis_configed = True + + def configure_position_axes(self, axes, override=True): + """ + Set the expected spatial keywords by indices in the blueprint and + the wcs_std lookup. + + :param axes: The index expected for the position axes. + :param override: Set to False when reading from a file. + """ + if self._pos_axes_configed: + self.logger.debug( + 'Attempt to configure already-configured position axes.') + return + + if override: + self.set('Chunk.position.coordsys', ([], None)) + self.set('Chunk.position.equinox', ([], None)) + self.set('Chunk.position.axis.axis1.ctype', ([], None)) + self.set('Chunk.position.axis.axis1.cunit', ([], None)) + self.set('Chunk.position.axis.axis2.ctype', ([], None)) + self.set('Chunk.position.axis.axis2.cunit', ([], None)) + self.set('Chunk.position.axis.error1.syser', ([], None)) + self.set('Chunk.position.axis.error1.rnder', ([], None)) + self.set('Chunk.position.axis.error2.syser', ([], None)) + self.set('Chunk.position.axis.error2.rnder', ([], None)) + self.set('Chunk.position.axis.function.cd11', ([], None)) + self.set('Chunk.position.axis.function.cd12', ([], None)) + self.set('Chunk.position.axis.function.cd21', ([], None)) + self.set('Chunk.position.axis.function.cd22', ([], None)) + self.set('Chunk.position.axis.function.dimension.naxis1', + ([], 1)) + self.set('Chunk.position.axis.function.dimension.naxis2', + ([], 1)) + self.set('Chunk.position.axis.function.refCoord.coord1.pix', + ([], None)) + self.set('Chunk.position.axis.function.refCoord.coord1.val', + ([], None)) + self.set('Chunk.position.axis.function.refCoord.coord2.pix', + ([], None)) + self.set('Chunk.position.axis.function.refCoord.coord2.val', + ([], None)) + + self._wcs_std['Chunk.position.coordsys'] = '' + self._wcs_std['Chunk.position.equinox'] = '' + + self._wcs_std['Chunk.position.axis.axis1.ctype'] = '' + self._wcs_std['Chunk.position.axis.axis1.cunit'] = '' + self._wcs_std['Chunk.position.axis.axis2.ctype'] = '' + self._wcs_std['Chunk.position.axis.axis2.cunit'] = '' + self._wcs_std['Chunk.position.axis.error1.syser'] = '' + self._wcs_std['Chunk.position.axis.error1.rnder'] = '' + self._wcs_std['Chunk.position.axis.error2.syser'] = '' + self._wcs_std['Chunk.position.axis.error2.rnder'] = '' + self._wcs_std['Chunk.position.axis.function.cd11'] = '' + self._wcs_std['Chunk.position.axis.function.cd12'] = '' + self._wcs_std['Chunk.position.axis.function.cd21'] = '' + self._wcs_std['Chunk.position.axis.function.cd22'] = '' + self._wcs_std['Chunk.position.axis.function.dimension.naxis1'] = '' + self._wcs_std['Chunk.position.axis.function.dimension.naxis2'] = '' + self._wcs_std['Chunk.position.axis.function.refCoord.coord1.pix'] = '' + self._wcs_std['Chunk.position.axis.function.refCoord.coord1.val'] = '' + self._wcs_std['Chunk.position.axis.function.refCoord.coord2.pix'] = '' + self._wcs_std['Chunk.position.axis.function.refCoord.coord2.val'] = '' + + self._pos_axes_configed = True + + def configure_energy_axis(self, axis, override=True): + """ + :param axis: The index expected for the energy axis. + :param override: Set to False when reading from a file. + :return: + """ + if self._energy_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured energy axis.') + return + + if override: + self.set('Chunk.energy.specsys', ([], None)) + self.set('Chunk.energy.ssysobs', ([], None)) + self.set('Chunk.energy.restfrq', ([], None)) + self.set('Chunk.energy.restwav', ([], None)) + self.set('Chunk.energy.velosys', ([], None)) + self.set('Chunk.energy.zsource', ([], None)) + self.set('Chunk.energy.ssyssrc', ([], None)) + self.set('Chunk.energy.velang', ([], None)) + + self.set('Chunk.energy.bandpassName', ([], None)) + self.set('Chunk.energy.resolvingPower', ([], None)) + + self.set('Chunk.energy.axis.axis.ctype', ([], None)) + self.set('Chunk.energy.axis.axis.cunit', ([], None)) + self.set('Chunk.energy.axis.error.syser', ([], None)) + self.set('Chunk.energy.axis.error.rnder', ([], None)) + self.set('Chunk.energy.axis.function.naxis', ([], 1)) + self.set('Chunk.energy.axis.function.delta', ([], None)) + self.set('Chunk.energy.axis.function.refCoord.pix', ([], None)) + self.set('Chunk.energy.axis.function.refCoord.val', ([], None)) + + self._wcs_std['Chunk.energy.specsys'] = '' + self._wcs_std['Chunk.energy.ssysobs'] = '' + self._wcs_std['Chunk.energy.restfrq'] = '' + self._wcs_std['Chunk.energy.restwav'] = '' + self._wcs_std['Chunk.energy.velosys'] = '' + self._wcs_std['Chunk.energy.zsource'] = '' + self._wcs_std['Chunk.energy.ssyssrc'] = '' + self._wcs_std['Chunk.energy.velang'] = '' + + self._wcs_std['Chunk.energy.axis.axis.ctype'] = '' + self._wcs_std['Chunk.energy.axis.axis.cunit'] = '' + self._wcs_std['Chunk.energy.axis.error.syser'] = '' + self._wcs_std['Chunk.energy.axis.error.rnder'] = '' + self._wcs_std['Chunk.energy.axis.function.naxis'] = '' + self._wcs_std['Chunk.energy.axis.function.delta'] = '' + self._wcs_std['Chunk.energy.axis.function.refCoord.pix'] = '' + self._wcs_std['Chunk.energy.axis.function.refCoord.val'] = '' + self._energy_axis_configed = True + + def configure_polarization_axis(self, axis, override=True): + """ + Set the expected polarization keywords by index in the blueprint + and the wcs_std lookup. + + :param axis: The index expected for the polarization axis. + :param override: Set to False when reading from a file. + :return: + """ + if self._polarization_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured polarization axis.') + return + + if override: + # STOKES is the only value allowed for PolarizationWCS ctype. + self.set('Chunk.polarization.axis.axis.ctype', ([], 'STOKES')) + self.set('Chunk.polarization.axis.axis.cunit', ([], None)) + self.set('Chunk.polarization.axis.function.naxis', ([], 1)) + self.set('Chunk.polarization.axis.function.delta', ([], None)) + self.set('Chunk.polarization.axis.function.refCoord.pix', + ([], None)) + self.set('Chunk.polarization.axis.function.refCoord.val', + ([], None)) + + self._wcs_std['Chunk.polarization.axis.axis.ctype'] = '' + self._wcs_std['Chunk.polarization.axis.axis.cunit'] = '' + self._wcs_std['Chunk.polarization.axis.function.naxis'] = '' + self._wcs_std['Chunk.polarization.axis.function.delta'] = '' + self._wcs_std['Chunk.polarization.axis.function.refCoord.pix'] = '' + self._wcs_std['Chunk.polarization.axis.function.refCoord.val'] = '' + + self._polarization_axis_configed = True + + def configure_observable_axis(self, axis, override=True): + """ + Set the expected observable keywords by index in the blueprint + and the wcs_std lookup. + Note: observable axis is not a standard WCS and it's not used by + astropy.wcs so, arguably, it can be removed. It is here for now for + consistency purposes. + :param axis: The index expected for the observable axis. + :param override: Set to False when reading from a file. + :return: + """ + if self._obs_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured observable axis.') + return + + if override: + self.set('Chunk.observable.axis.axis.ctype', ([], None)) + self.set('Chunk.observable.axis.axis.cunit', ([], None)) + self.set('Chunk.observable.axis.function.refCoord.pix', ([], None)) + + self._wcs_std['Chunk.observable.axis.axis.ctype'] = '' + self._wcs_std['Chunk.observable.axis.axis.cunit'] = '' + self._wcs_std['Chunk.observable.axis.function.refCoord.pix'] = '' + + self._obs_axis_configed = True + + def configure_time_axis(self, axis, override=True): + """ + Set the expected time keywords by index in the blueprint and + the wcs_std lookup. + + :param axis: The index expected for the time axis. + :param override: Set to False when reading from a file. + :return: + """ + if self._time_axis_configed: + self.logger.debug( + 'Attempt to configure already-configured time axis.') + return + + if override: + self.set('Chunk.time.exposure', ([], None)) + self.set('Chunk.time.timesys', ([], None)) + self.set('Chunk.time.trefpos', ([], None)) + self.set('Chunk.time.mjdref', ([], None)) + self.set('Chunk.time.resolution', ([], None)) + self.set('Chunk.time.axis.axis.ctype', ([], None)) + self.set('Chunk.time.axis.axis.cunit', ([], None)) + self.set('Chunk.time.axis.error.syser', ([], None)) + self.set('Chunk.time.axis.error.rnder', ([], None)) + self.set('Chunk.time.axis.function.naxis', ([], 1)) + self.set('Chunk.time.axis.function.delta', ([], None)) + self.set('Chunk.time.axis.function.refCoord.pix', ([], None)) + self.set('Chunk.time.axis.function.refCoord.val', ([], None)) + + self._wcs_std['Chunk.time.exposure'] = '' + self._wcs_std['Chunk.time.resolution'] = '' + self._wcs_std['Chunk.time.timesys'] = '' + self._wcs_std['Chunk.time.trefpos'] = '' + self._wcs_std['Chunk.time.mjdref'] = '' + + self._wcs_std['Chunk.time.axis.axis.ctype'] = '' + self._wcs_std['Chunk.time.axis.axis.cunit'] = '' + self._wcs_std['Chunk.time.axis.error.syser'] = '' + self._wcs_std['Chunk.time.axis.error.rnder'] = '' + self._wcs_std['Chunk.time.axis.function.naxis'] = '' + self._wcs_std['Chunk.time.axis.function.delta'] = '' + self._wcs_std['Chunk.time.axis.function.refCoord.pix'] = '' + self._wcs_std['Chunk.time.axis.function.refCoord.val'] = '' + + self._time_axis_configed = True + + def set(self, caom2_element, value, extension=0): + """ + Sets the value associated with an element in the CAOM2 model. Value + cannot be a tuple. + :param caom2_element: name CAOM2 element (as in + ObsBlueprint.CAOM2_ELEMEMTS) + :param value: new value of the CAOM2 element + :param extension: extension number (used only for Chunk elements) + """ + if hasattr(value, 'decode'): + value = value.decode('utf-8') + super().set(caom2_element, value, extension) + + def _guess_axis_info(self): + self._guess_axis_info_from_plan() + + +def _to_float(value): + return float(value) if value is not None else None + + +def _to_int(value): + return int(value) if value is not None else None + + +def _to_int_32(value): + if value is None: + return None + elif isinstance(value, str): + return int_32(value) + else: + return value + + +def _to_str(value): + return str(value).strip() if value is not None else None diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index ed100413..6f74cceb 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -92,42 +92,34 @@ """ import argparse -from datetime import datetime from logging.handlers import TimedRotatingFileHandler -import math -from astropy.wcs import SingularMatrixError, utils, Wcsprm, WCS -from astropy.io import fits -from astropy.time import Time from cadcutils import version -from caom2.caom_util import int_32 from caom2 import ( - Artifact, Part, Chunk, Plane, Observation, CoordError, - SpectralWCS, CoordAxis1D, Axis, CoordFunction1D, RefCoord, - SpatialWCS, Dimension2D, Coord2D, CoordFunction2D, - CoordAxis2D, CoordRange1D, PolarizationWCS, TemporalWCS, - ObservationReader, ObservationWriter, Algorithm, - ReleaseType, ProductType, ObservationIntentType, - DataProductType, Telescope, Environment, - Instrument, Proposal, Target, Provenance, Metrics, - CalibrationLevel, Requirements, DataQuality, PlaneURI, - SimpleObservation, DerivedObservation, ChecksumURI, - ObservationURI, ObservableAxis, Slice, Point, TargetPosition, - CoordRange2D, TypedSet, CustomWCS, Observable, - CompositeObservation, EnergyTransition + Artifact, + Algorithm, + ChecksumURI, + CompositeObservation, + DerivedObservation, + ObservationReader, + ObservationWriter, + Plane, + ProductType, + ReleaseType, + SimpleObservation, ) from caom2utils import data_util from caom2utils.caomvalidator import validate from caom2utils.wcsvalidator import InvalidWCSError +from caom2utils.blueprints import Hdf5ObsBlueprint, ObsBlueprint, _to_int, _to_str +from caom2utils.parsers import BlueprintParser, ContentParser, FitsParser, Hdf5Parser import importlib import logging import os -import re import requests import sys import tempfile import traceback -from collections import defaultdict from urllib.parse import urlparse from cadcutils import net, util from cadcdata import FileInfo @@ -137,101 +129,22 @@ APP_NAME = 'caom2gen' -__all__ = ['Caom2Exception', 'ContentParser', 'FitsParser', 'FitsWcsParser', - 'DispatchingFormatter', 'ObsBlueprint', 'get_arg_parser', 'proc', - 'POLARIZATION_CTYPES', 'gen_proc', 'get_gen_proc_arg_parser', - 'BlueprintParser', 'augment', 'get_vos_headers', - 'get_external_headers', 'Hdf5Parser', 'Hdf5ObsBlueprint', - 'Hdf5WcsParser', 'update_artifact_meta'] - -CUSTOM_CTYPES = [ - 'RM', - 'FDEP' -] - -POSITION_CTYPES = [ - ['RA', - 'GLON', - 'ELON', - 'HLON', - 'SLON'], - ['DEC', - 'GLAT', - 'ELAT', - 'HLAT', - 'SLAT'] +__all__ = [ + 'augment', + 'DispatchingFormatter', + 'gen_proc', + 'get_arg_parser', + 'get_external_headers', + 'get_gen_proc_arg_parser', + 'get_vos_headers', + 'proc', + 'update_artifact_meta', ] -ENERGY_CTYPES = [ - 'FREQ', - 'ENER', - 'WAVN', - 'VRAD', - 'WAVE', - 'VOPT', - 'ZOPT', - 'AWAV', - 'VELO', - 'BETA'] - -# From http://hea-www.cfa.harvard.edu/~arots/TimeWCS/ -TIME_KEYWORDS = [ - 'TIME', - 'TAI', - 'TT', - 'TDT', - 'ET', - 'IAT', - 'UT1', - 'UTC', - 'GMT', - 'GPS', - 'TCG', - 'TCB', - 'TDB', - 'LOCAL'] - -POLARIZATION_CTYPES = ['STOKES'] - -OBSERVABLE_CTYPES = [ - 'observable', - 'FLUX'] GLOBAL_STORAGE_RESOURCE_ID = "ivo://cadc.nrc.ca/global/raven" -class Caom2Exception(Exception): - """Exception raised when an attempt to create or update a CAOM2 record - fails for some reason.""" - pass - - -class HDULoggingFilter(logging.Filter): - """Add the HDU number to logging messages as a default.""" - - def __init__(self): - super().__init__() - self._extension = -1 - - def filter(self, record): - record.hdu = self._extension - return True - - def extension(self, value): - self._extension = value - - -class classproperty: - """ - Class property used for CAOM2_ELEMENTS in ObsBleprint - """ - def __init__(self, f): - self.f = f - - def __get__(self, obj, owner): - return self.f(owner) - - class DispatchingFormatter: """Dispatch formatter for logger and it's sub-logger, so there can be multiple formatters.""" @@ -255,4493 +168,6 @@ def format(self, record): return formatter.format(record) -class ObsBlueprint: - """ - Class that represents the blueprint of a CAOM2 Observation that can be - used to build an observation. - - The following CAOM2 elements can be specified in the blueprint: - _CAOM2_ELEMENTS - - The blueprint designates the source of each of these attributes as either - FITS keywords with possible default values or sets the actual values. - The blueprint can be checked by simply displaying it. - - For example: - - # display the default blueprint when WCS axes are not specified - print(ObsBlueprint()) - - # display the default blueprint when WCS axes are specified - print(ObsBlueprint(position_axis=(1, 2), energy_axis=3, - polarization_axis=4, time_axis=5)) - - # create a blueprint and customize it - ob = ObsBlueprint(position_axis=(1, 2), energy_axis=3, - polarization_axis=4, time_axis=5)) - ob.set('Observation.algorithm.name', 'exposure') - ob.add_attribute('Chunk.energy.axis.axis.ctype', ['MYCTYPE'], - extension=1) - ob.add_attribute('Chunk.energy.axis.axis.ctype', 'MYCTYPE2', - extension=1) - ob.set('Chunk.energy.velang', 33, extension=1) - ob.set_default('Chunk.position.coordsys', 'RA-DEC', extension=1) - - ob.set('Chunk.energy.velang', 44, extension=2) - print(ob) - - """ - _CAOM2_ELEMENTS = [ - 'CompositeObservation.members', - 'DerivedObservation.members', - 'Observation.observationID', - 'Observation.type', - 'Observation.intent', - 'Observation.sequenceNumber', - 'Observation.metaRelease', - 'Observation.metaReadGroups', - 'Observation.metaProducer', - 'Observation.requirements.flag', - - 'Observation.algorithm.name', - - 'Observation.instrument.name', - 'Observation.instrument.keywords', - - 'Observation.proposal.id', - 'Observation.proposal.pi', - 'Observation.proposal.project', - 'Observation.proposal.title', - 'Observation.proposal.keywords', - - 'Observation.target.name', - 'Observation.target.type', - 'Observation.target.standard', - 'Observation.target.redshift', - 'Observation.target.keywords', - 'Observation.target.moving', - 'Observation.target.targetID', - - 'Observation.target_position.point.cval1', - 'Observation.target_position.point.cval2', - 'Observation.target_position.coordsys', - 'Observation.target_position.equinox', - - 'Observation.telescope.name', - 'Observation.telescope.geoLocationX', - 'Observation.telescope.geoLocationY', - 'Observation.telescope.geoLocationZ', - 'Observation.telescope.keywords', - - 'Observation.environment.seeing', - 'Observation.environment.humidity', - 'Observation.environment.elevation', - 'Observation.environment.tau', - 'Observation.environment.wavelengthTau', - 'Observation.environment.ambientTemp', - 'Observation.environment.photometric', - - 'Plane.productID', - 'Plane.metaRelease', - 'Plane.dataRelease', - 'Plane.dataProductType', - 'Plane.calibrationLevel', - 'Plane.dataQuality', - 'Plane.metaReadGroups', - 'Plane.dataReadGroups', - 'Plane.metaProducer', - - 'Plane.provenance.name', - 'Plane.provenance.version', - 'Plane.provenance.project', - 'Plane.provenance.producer', - 'Plane.provenance.runID', - 'Plane.provenance.reference', - 'Plane.provenance.lastExecuted', - 'Plane.provenance.keywords', - 'Plane.provenance.inputs', - - 'Plane.metrics.sourceNumberDensity', - 'Plane.metrics.background', - 'Plane.metrics.backgroundStddev', - 'Plane.metrics.fluxDensityLimit', - 'Plane.metrics.magLimit', - 'Plane.metrics.sampleSNR', - - 'Plane.observable.ucd', - - 'Artifact.productType', - 'Artifact.releaseType', - 'Artifact.contentChecksum', - 'Artifact.contentLength', - 'Artifact.contentType', - 'Artifact.contentRelease', - 'Artifact.contentReadGroups', - 'Artifact.uri', - 'Artifact.metaProducer', - - 'Part.name', - 'Part.productType', - 'Part.metaProducer', - - 'Chunk', - 'Chunk.naxis', - 'Chunk.observableAxis', - 'Chunk.positionAxis1', - 'Chunk.positionAxis2', - 'Chunk.energyAxis', - 'Chunk.timeAxis', - 'Chunk.polarizationAxis', - 'Chunk.metaProducer', - - 'Chunk.observable.dependent.bin', - 'Chunk.observable.dependent.axis.ctype', - 'Chunk.observable.dependent.axis.cunit', - 'Chunk.observable.independent.bin', - 'Chunk.observable.independent.axis.ctype', - 'Chunk.observable.independent.axis.cunit', - - 'Chunk.position.coordsys', - 'Chunk.position.equinox', - 'Chunk.position.resolution', - 'Chunk.position.axis.axis1.ctype', - 'Chunk.position.axis.axis1.cunit', - 'Chunk.position.axis.axis2.ctype', - 'Chunk.position.axis.axis2.cunit', - 'Chunk.position.axis.error1.syser', - 'Chunk.position.axis.error1.rnder', - 'Chunk.position.axis.error2.syser', - 'Chunk.position.axis.error2.rnder', - 'Chunk.position.axis.function.cd11', - 'Chunk.position.axis.function.cd12', - 'Chunk.position.axis.function.cd21', - 'Chunk.position.axis.function.cd22', - 'Chunk.position.axis.function.dimension.naxis1', - 'Chunk.position.axis.function.dimension.naxis2', - 'Chunk.position.axis.function.refCoord.coord1.pix', - 'Chunk.position.axis.function.refCoord.coord1.val', - 'Chunk.position.axis.function.refCoord.coord2.pix', - 'Chunk.position.axis.function.refCoord.coord2.val', - 'Chunk.position.axis.range.start.coord1.pix', - 'Chunk.position.axis.range.start.coord1.val', - 'Chunk.position.axis.range.start.coord2.pix', - 'Chunk.position.axis.range.start.coord2.val', - 'Chunk.position.axis.range.end.coord1.pix', - 'Chunk.position.axis.range.end.coord1.val', - 'Chunk.position.axis.range.end.coord2.pix', - 'Chunk.position.axis.range.end.coord2.val', - - 'Chunk.energy.specsys', - 'Chunk.energy.ssysobs', - 'Chunk.energy.restfrq', - 'Chunk.energy.restwav', - 'Chunk.energy.velosys', - 'Chunk.energy.zsource', - 'Chunk.energy.ssyssrc', - 'Chunk.energy.velang', - 'Chunk.energy.bandpassName', - 'Chunk.energy.resolvingPower', - 'Chunk.energy.transition', - 'Chunk.energy.transition.species', - 'Chunk.energy.transition.transition', - 'Chunk.energy.axis.axis.ctype', - 'Chunk.energy.axis.axis.cunit', - 'Chunk.energy.axis.bounds.samples', - 'Chunk.energy.axis.error.syser', - 'Chunk.energy.axis.error.rnder', - 'Chunk.energy.axis.function.naxis', - 'Chunk.energy.axis.function.delta', - 'Chunk.energy.axis.function.refCoord.pix', - 'Chunk.energy.axis.function.refCoord.val', - 'Chunk.energy.axis.range.start.pix', - 'Chunk.energy.axis.range.start.val', - 'Chunk.energy.axis.range.end.pix', - 'Chunk.energy.axis.range.end.val', - - 'Chunk.polarization.axis.axis.ctype', - 'Chunk.polarization.axis.axis.cunit', - 'Chunk.polarization.axis.bounds.samples', - 'Chunk.polarization.axis.error.syser', - 'Chunk.polarization.axis.error.rnder', - 'Chunk.polarization.axis.function.naxis', - 'Chunk.polarization.axis.function.delta', - 'Chunk.polarization.axis.function.refCoord.pix', - 'Chunk.polarization.axis.function.refCoord.val', - 'Chunk.polarization.axis.range.start.pix', - 'Chunk.polarization.axis.range.start.val', - 'Chunk.polarization.axis.range.end.pix', - 'Chunk.polarization.axis.range.end.val', - - 'Chunk.time.exposure', - 'Chunk.time.resolution', - 'Chunk.time.timesys', - 'Chunk.time.trefpos', - 'Chunk.time.mjdref', - 'Chunk.time.axis.axis.ctype', - 'Chunk.time.axis.axis.cunit', - 'Chunk.time.axis.bounds.samples', - 'Chunk.time.axis.error.syser', - 'Chunk.time.axis.error.rnder', - 'Chunk.time.axis.function.naxis', - 'Chunk.time.axis.function.delta', - 'Chunk.time.axis.function.refCoord.pix', - 'Chunk.time.axis.function.refCoord.val', - 'Chunk.time.axis.range.start.pix', - 'Chunk.time.axis.range.start.val', - 'Chunk.time.axis.range.end.pix', - 'Chunk.time.axis.range.end.val', - - 'Chunk.observable.axis.axis.ctype', - 'Chunk.observable.axis.axis.cunit', - 'Chunk.observable.axis.function.refCoord.pix', - - 'Chunk.custom.axis.axis.ctype', - 'Chunk.custom.axis.axis.cunit', - 'Chunk.custom.axis.bounds.samples', - 'Chunk.custom.axis.error.syser', - 'Chunk.custom.axis.error.rnder', - 'Chunk.custom.axis.function.naxis', - 'Chunk.custom.axis.function.delta', - 'Chunk.custom.axis.function.refCoord.pix', - 'Chunk.custom.axis.function.refCoord.val', - 'Chunk.custom.axis.range.start.pix', - 'Chunk.custom.axis.range.start.val', - 'Chunk.custom.axis.range.end.pix', - 'Chunk.custom.axis.range.end.val' - ] - - # replace _CAOM2_ELEMENTS in __doc__ with the real elements - __doc__ = __doc__.replace('_CAOM2_ELEMENTS', '\n'.join(['\t\t{}'.format( - elem) for elem in _CAOM2_ELEMENTS])) - - def __init__(self, position_axes=None, energy_axis=None, - polarization_axis=None, time_axis=None, - obs_axis=None, custom_axis=None, module=None, - update=True, instantiated_class=None): - """ - Ctor - :param position_axes: tuple of form (int, int) indicating the indexes - of position axis - :param energy_axis: index of energy axis (int) - :param polarization_axis: index of polarization axis (int) - :param time_axis: index of time axis (int) - :param obs_axis: index of observable axis (int) - :param custom_axis: index of custom axis (int) - :param module: user-provided code, will be loaded with - importlib.import_module if a value is provided. - """ - - if position_axes and isinstance(position_axes, tuple) and\ - (len(position_axes) != 2): - raise ValueError( - 'Invalid position axis: {}. Must be tuple with 2 elements'. - format(str(position_axes))) - - self.logger = logging.getLogger(__name__) - - # this is the default blueprint - self._plan = {} - tmp = {'Observation.metaRelease': - (['DATE', 'DATE-OBS', 'UTCOBS', 'UTCDATE', - 'UTC-DATE', 'MJDOBS', 'MJD_OBS'], None), - 'Observation.instrument.name': (['INSTRUME'], None), - 'Observation.type': (['OBSTYPE'], None), - 'Observation.environment.ambientTemp': (['TEMPERAT'], - None), - # set the default for SimpleObservation construction - 'Observation.algorithm.name': (['PROCNAME'], 'exposure'), - 'Observation.instrument.keywords': (['INSTMODE'], None), - 'Observation.proposal.id': (['RUNID'], None), - 'Observation.target.name': (['OBJECT'], None), - 'Observation.telescope.name': (['TELESCOP'], None), - 'Observation.telescope.geoLocationX': (['OBSGEO-X'], - None), - 'Observation.telescope.geoLocationY': (['OBSGEO-Y'], - None), - 'Observation.telescope.geoLocationZ': (['OBSGEO-Z'], - None), - 'Observation.observationID': (['OBSID'], None), - 'Plane.calibrationLevel': ([], CalibrationLevel.RAW_STANDARD), - 'Plane.dataProductType': ([], DataProductType.IMAGE), - 'Plane.metaRelease': (['RELEASE', 'REL_DATE'], None), - 'Plane.dataRelease': (['RELEASE', 'REL_DATE'], None), - 'Plane.productID': (['RUNID'], None), - 'Plane.provenance.name': (['XPRVNAME'], None), - 'Plane.provenance.project': (['ADC_ARCH'], None), - 'Plane.provenance.producer': (['ORIGIN'], None), - 'Plane.provenance.reference': (['XREFER'], None), - 'Plane.provenance.lastExecuted': (['DATE-FTS'], None), - 'Artifact.releaseType': ([], ReleaseType.DATA), - 'Chunk': 'include' - } - # using the tmp to make sure that the keywords are valid - for key in tmp: - self.set(key, tmp[key]) - - self._extensions = {} - - # contains the standard WCS keywords in the FITS file expected by the - # astropy.WCS package. - self._wcs_std = { - 'Chunk.naxis': 'ZNAXIS,NAXIS' - } - self._pos_axes_configed = False - self._energy_axis_configed = False - self._time_axis_configed = False - self._polarization_axis_configed = False - self._obs_axis_configed = False - self._custom_axis_configed = False - if position_axes: - self.configure_position_axes(position_axes) - - if energy_axis: - self.configure_energy_axis(energy_axis) - - if polarization_axis: - self.configure_polarization_axis(polarization_axis) - - if time_axis: - self.configure_time_axis(time_axis) - - if obs_axis: - self.configure_observable_axis(obs_axis) - - if custom_axis: - self.configure_custom_axis(custom_axis) - - if module: - self._module = module - else: - self._module = None - self._module_instance = instantiated_class - # if True, existing values are used instead of defaults - self._update = update - # a data structure to carry around twelve bits of data at a time: - # the first item in the set is the ctype index, and the second is - # whether or not the index means anything, resulting in a - # call to the blueprint configure_* methods if it's True. - self._axis_info = { - 'custom': (0, False), - 'dec': (0, False), - 'energy': (0, False), - 'obs': (0, False), - 'polarization': (0, False), - 'ra': (0, False), - 'time': (0, False)} - - def configure_custom_axis(self, axis, override=True): - """ - Set the expected FITS custom keywords by index in the blueprint - and the wcs_std lookup. - - :param axis: The index expected for the custom axis. - :param override: Set to False when reading from a file. - :return: - """ - if self._custom_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured custom axis.') - return - - if override: - self.set('Chunk.custom.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.custom.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.custom.axis.function.naxis', - ([f'NAXIS{axis}'], None)) - self.set('Chunk.custom.axis.function.delta', - ([f'CDELT{axis}'], None)) - self.set('Chunk.custom.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - self.set('Chunk.custom.axis.function.refCoord.val', - ([f'CRVAL{axis}'], None)) - - self._wcs_std['Chunk.custom.axis.axis.ctype'] = f'CTYPE{axis}' - self._wcs_std['Chunk.custom.axis.axis.cunit'] = f'CUNIT{axis}' - self._wcs_std['Chunk.custom.axis.function.naxis'] = f'NAXIS{axis}' - self._wcs_std['Chunk.custom.axis.function.delta'] = f'CDELT{axis}' - self._wcs_std['Chunk.custom.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' - self._wcs_std['Chunk.custom.axis.function.refCoord.val'] = \ - f'CRVAL{axis}' - - self._custom_axis_configed = True - - def configure_position_axes(self, axes, override=True): - """ - Set the expected FITS spatial keywords by indices in the blueprint and - the wcs_std lookup. - - :param axes: The index expected for the position axes. - :return: - """ - if self._pos_axes_configed: - self.logger.debug( - 'Attempt to configure already-configured position axes.') - return - - if override: - self.set('Chunk.position.coordsys', (['RADESYS'], None)) - self.set('Chunk.position.equinox', (['EQUINOX', 'EPOCH'], None)) - self.set('Chunk.position.axis.axis1.ctype', - ([f'CTYPE{axes[0]}'], None)) - self.set('Chunk.position.axis.axis1.cunit', - ([f'CUNIT{axes[0]}'], None)) - self.set('Chunk.position.axis.axis2.ctype', - ([f'CTYPE{axes[1]}'], None)) - self.set('Chunk.position.axis.axis2.cunit', - ([f'CUNIT{axes[1]}'], None)) - self.set('Chunk.position.axis.error1.syser', - ([f'CSYER{axes[0]}'], None)) - self.set('Chunk.position.axis.error1.rnder', - ([f'CRDER{axes[0]}'], None)) - self.set('Chunk.position.axis.error2.syser', - ([f'CSYER{axes[1]}'], None)) - self.set('Chunk.position.axis.error2.rnder', - ([f'CRDER{axes[1]}'], None)) - self.set('Chunk.position.axis.function.cd11', - ([f'CD{axes[0]}_{axes[0]}'], None)) - self.set('Chunk.position.axis.function.cd12', - ([f'CD{axes[0]}_{axes[1]}'], None)) - self.set('Chunk.position.axis.function.cd21', - ([f'CD{axes[1]}_{axes[0]}'], None)) - self.set('Chunk.position.axis.function.cd22', - ([f'CD{axes[1]}_{axes[1]}'], None)) - self.set('Chunk.position.axis.function.dimension.naxis1', - ([f'ZNAXIS{axes[0]}', - f'NAXIS{axes[0]}'], None)) - self.set('Chunk.position.axis.function.dimension.naxis2', - ([f'ZNAXIS{axes[1]}', - f'NAXIS{axes[1]}'], None)) - self.set('Chunk.position.axis.function.refCoord.coord1.pix', - ([f'CRPIX{axes[0]}'], None)) - self.set('Chunk.position.axis.function.refCoord.coord1.val', - ([f'CRVAL{axes[0]}'], None)) - self.set('Chunk.position.axis.function.refCoord.coord2.pix', - ([f'CRPIX{axes[1]}'], None)) - self.set('Chunk.position.axis.function.refCoord.coord2.val', - ([f'CRVAL{axes[1]}'], None)) - - self._wcs_std['Chunk.position.coordsys'] = 'RADESYS' - self._wcs_std['Chunk.position.equinox'] = 'EQUINOX' - - self._wcs_std['Chunk.position.axis.axis1.ctype'] = \ - f'CTYPE{axes[0]}' - self._wcs_std['Chunk.position.axis.axis1.cunit'] = \ - f'CUNIT{axes[0]}' - self._wcs_std['Chunk.position.axis.axis2.ctype'] = \ - f'CTYPE{axes[1]}' - self._wcs_std['Chunk.position.axis.axis2.cunit'] = \ - f'CUNIT{axes[1]}' - self._wcs_std['Chunk.position.axis.error1.syser'] = \ - f'CSYER{axes[0]}' - self._wcs_std['Chunk.position.axis.error1.rnder'] = \ - f'CRDER{axes[0]}' - self._wcs_std['Chunk.position.axis.error2.syser'] = \ - f'CSYER{axes[1]}' - self._wcs_std['Chunk.position.axis.error2.rnder'] = \ - f'CRDER{axes[1]}' - self._wcs_std['Chunk.position.axis.function.cd11'] = \ - f'CD{axes[0]}_{axes[0]}' - self._wcs_std['Chunk.position.axis.function.cd12'] = \ - f'CD{axes[0]}_{axes[1]}' - self._wcs_std['Chunk.position.axis.function.cd21'] = \ - f'CD{axes[1]}_{axes[0]}' - self._wcs_std['Chunk.position.axis.function.cd22'] = \ - f'CD{axes[1]}_{axes[1]}' - self._wcs_std['Chunk.position.axis.function.dimension.naxis1'] = \ - f'NAXIS{axes[0]}' - self._wcs_std['Chunk.position.axis.function.dimension.naxis2'] = \ - f'NAXIS{axes[1]}' - self._wcs_std['Chunk.position.axis.function.refCoord.coord1.pix'] \ - = f'CRPIX{axes[0]}' - self._wcs_std['Chunk.position.axis.function.refCoord.coord1.val'] \ - = f'CRVAL{axes[0]}' - self._wcs_std['Chunk.position.axis.function.refCoord.coord2.pix'] \ - = f'CRPIX{axes[1]}' - self._wcs_std['Chunk.position.axis.function.refCoord.coord2.val'] \ - = f'CRVAL{axes[1]}' - - self._pos_axes_configed = True - - def configure_energy_axis(self, axis, override=True): - """ - Set the expected FITS energy keywords by index in the blueprint and - the wcs_std lookup. - - :param axis: The index expected for the energy axis. - :param override: Set to False when reading from a file. - :return: - """ - if self._energy_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured energy axis.') - return - - if override: - self.set('Chunk.energy.specsys', (['SPECSYS'], None)) - self.set('Chunk.energy.ssysobs', (['SSYSOBS'], None)) - self.set('Chunk.energy.restfrq', (['RESTFRQ'], None)) - self.set('Chunk.energy.restwav', (['RESTWAV'], None)) - self.set('Chunk.energy.velosys', (['VELOSYS'], None)) - self.set('Chunk.energy.zsource', (['ZSOURCE'], None)) - self.set('Chunk.energy.ssyssrc', (['SSYSSRC'], None)) - self.set('Chunk.energy.velang', (['VELANG'], None)) - - self.set('Chunk.energy.bandpassName', ([], None)) - self.set('Chunk.energy.resolvingPower', ([], None)) - - self.set('Chunk.energy.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.energy.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.energy.axis.error.syser', - ([f'CSYER{axis}'], None)) - self.set('Chunk.energy.axis.error.rnder', - ([f'CRDER{axis}'], None)) - self.set('Chunk.energy.axis.function.naxis', - ([f'NAXIS{axis}'], None)) - self.set('Chunk.energy.axis.function.delta', - ([f'CDELT{axis}'], None)) - self.set('Chunk.energy.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - self.set('Chunk.energy.axis.function.refCoord.val', - ([f'CRVAL{axis}'], None)) - - self._wcs_std['Chunk.energy.specsys'] = 'SPECSYS' - self._wcs_std['Chunk.energy.ssysobs'] = 'SSYSOBS' - self._wcs_std['Chunk.energy.restfrq'] = 'RESTFRQ' - self._wcs_std['Chunk.energy.restwav'] = 'RESTWAV' - self._wcs_std['Chunk.energy.velosys'] = 'VELOSYS' - self._wcs_std['Chunk.energy.zsource'] = 'ZSOURCE' - self._wcs_std['Chunk.energy.ssyssrc'] = 'SSYSSRC' - self._wcs_std['Chunk.energy.velang'] = 'VELANG' - - self._wcs_std['Chunk.energy.axis.axis.ctype'] = \ - f'CTYPE{axis}' - self._wcs_std['Chunk.energy.axis.axis.cunit'] = \ - f'CUNIT{axis}' - self._wcs_std['Chunk.energy.axis.error.syser'] = \ - f'CSYER{axis}' - self._wcs_std['Chunk.energy.axis.error.rnder'] = \ - f'CRDER{axis}' - self._wcs_std['Chunk.energy.axis.function.naxis'] = \ - f'NAXIS{axis}' - self._wcs_std['Chunk.energy.axis.function.delta'] = \ - f'CDELT{axis}' - self._wcs_std['Chunk.energy.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' - self._wcs_std['Chunk.energy.axis.function.refCoord.val'] = \ - f'CRVAL{axis}' - - self._energy_axis_configed = True - - def configure_polarization_axis(self, axis, override=True): - """ - Set the expected FITS polarization keywords by index in the blueprint - and the wcs_std lookup. - - :param axis: The index expected for the polarization axis. - :param override: Set to False when reading from a file. - :return: - """ - if self._polarization_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured polarization axis.') - return - - if override: - self.set('Chunk.polarization.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.polarization.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.polarization.axis.function.naxis', - ([f'NAXIS{axis}'], None)) - self.set('Chunk.polarization.axis.function.delta', - ([f'CDELT{axis}'], None)) - self.set('Chunk.polarization.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - self.set('Chunk.polarization.axis.function.refCoord.val', - ([f'CRVAL{axis}'], None)) - - self._wcs_std['Chunk.polarization.axis.axis.ctype'] = \ - f'CTYPE{axis}' - self._wcs_std['Chunk.polarization.axis.axis.cunit'] = \ - f'CUNIT{axis}' - self._wcs_std['Chunk.polarization.axis.function.naxis'] = \ - f'NAXIS{axis}' - self._wcs_std['Chunk.polarization.axis.function.delta'] = \ - f'CDELT{axis}' - self._wcs_std['Chunk.polarization.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' - self._wcs_std['Chunk.polarization.axis.function.refCoord.val'] = \ - f'CRVAL{axis}' - - self._polarization_axis_configed = True - - def configure_observable_axis(self, axis, override=True): - """ - Set the expected FITS observable keywords by index in the blueprint - and the wcs_std lookup. - Note: observable axis is not a standard WCS and it's not used by - astropy.wcs so, arguably, it can be removed. It is here for now for - consistency purposes. - :param axis: The index expected for the observable axis. - :param override: Set to False when reading from a file. - :return: - """ - if self._obs_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured observable axis.') - return - - if override: - self.set('Chunk.observable.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.observable.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.observable.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - - self._wcs_std['Chunk.observable.axis.axis.ctype'] = \ - f'CTYPE{axis}' - self._wcs_std['Chunk.observable.axis.axis.cunit'] = \ - f'CUNIT{axis}' - self._wcs_std['Chunk.observable.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' - - self._obs_axis_configed = True - - def configure_time_axis(self, axis, override=True): - """ - Set the expected FITS time keywords by index in the blueprint and - the wcs_std lookup. - - :param axis: The index expected for the time axis. - :param override: Set to False when reading from a file. - :return: - """ - if self._time_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured time axis.') - return - - if override: - self.set('Chunk.time.exposure', (['EXPTIME', 'INTTIME'], None)) - self.set('Chunk.time.timesys', (['TIMESYS'], None)) - self.set('Chunk.time.trefpos', (['TREFPOS'], None)) - self.set('Chunk.time.mjdref', (['MJDREF'], None)) - self.set('Chunk.time.resolution', (['TIMEDEL'], None)) - self.set('Chunk.time.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.time.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.time.axis.error.syser', - ([f'CSYER{axis}'], None)) - self.set('Chunk.time.axis.error.rnder', - ([f'CRDER{axis}'], None)) - self.set('Chunk.time.axis.function.naxis', - ([f'NAXIS{axis}'], None)) - self.set('Chunk.time.axis.function.delta', - ([f'CDELT{axis}'], None)) - self.set('Chunk.time.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - self.set('Chunk.time.axis.function.refCoord.val', - ([f'CRVAL{axis}'], None)) - - self._wcs_std['Chunk.time.exposure'] = 'EXPTIME' - self._wcs_std['Chunk.time.resolution'] = 'TIMEDEL' - self._wcs_std['Chunk.time.timesys'] = 'TIMESYS' - self._wcs_std['Chunk.time.trefpos'] = 'TREFPOS' - self._wcs_std['Chunk.time.mjdref'] = 'MJDREF' - - self._wcs_std['Chunk.time.axis.axis.ctype'] = \ - f'CTYPE{axis}' - self._wcs_std['Chunk.time.axis.axis.cunit'] = \ - f'CUNIT{axis}' - self._wcs_std['Chunk.time.axis.error.syser'] = \ - f'CSYER{axis}' - self._wcs_std['Chunk.time.axis.error.rnder'] = \ - f'CRDER{axis}' - self._wcs_std['Chunk.time.axis.function.naxis'] = \ - f'NAXIS{axis}' - self._wcs_std['Chunk.time.axis.function.delta'] = \ - f'CDELT{axis}' - self._wcs_std['Chunk.time.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' - self._wcs_std['Chunk.time.axis.function.refCoord.val'] = \ - f'CRVAL{axis}' - - self._time_axis_configed = True - - def _guess_axis_info(self): - """Look for info regarding axis types in the blueprint wcs_std. - Configure the blueprint according to the guesses. - """ - for ii in self._plan: - if isinstance(self._plan[ii], tuple): - for value in self._plan[ii][0]: - if (value.startswith('CTYPE')) and value[-1].isdigit(): - value = value.split('-')[0] - self._guess_axis_info_from_ctypes(ii, int(value[-1])) - else: - value = self._plan[ii] - if value is None: - continue - if (value.startswith('CTYPE')) and value[-1].isdigit(): - value = value.split('-')[0] - self._guess_axis_info_from_ctypes(ii, int(value[-1])) - - self._guess_axis_info_from_plan() - - def _guess_axis_info_from_plan(self): - for ii in self._plan: - if ii.startswith('Chunk.position') and ii.endswith('axis1.ctype') \ - and not self._axis_info['ra'][1]: - configured_index = self._get_configured_index( - self._axis_info, 'ra') - self._axis_info['ra'] = (configured_index, True) - elif ii.startswith('Chunk.position') and \ - ii.endswith('axis2.ctype') and not \ - self._axis_info['dec'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'dec') - self._axis_info['dec'] = (configured_index, True) - elif ii.startswith('Chunk.energy') and not \ - self._axis_info['energy'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'energy') - self._axis_info['energy'] = (configured_index, True) - elif ii.startswith('Chunk.time') and not \ - self._axis_info['time'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'time') - self._axis_info['time'] = (configured_index, True) - elif ii.startswith('Chunk.polarization') \ - and not self._axis_info['polarization'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'polarization') - self._axis_info['polarization'] = (configured_index, True) - elif ii.startswith('Chunk.observable') and not \ - self._axis_info['obs'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'obs') - self._axis_info['obs'] = (configured_index, True) - elif ii.startswith('Chunk.custom') and not \ - self._axis_info['custom'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'custom') - self._axis_info['custom'] = (configured_index, True) - - if self._axis_info['ra'][1] and self._axis_info['dec'][1]: - self.configure_position_axes( - (self._axis_info['ra'][0], self._axis_info['dec'][0]), False) - elif self._axis_info['ra'][1] or self._axis_info['dec'][1]: - raise ValueError('Only one positional axis found ' - '(ra/dec): {}/{}'. - format(self._axis_info['ra'][0], - self._axis_info['dec'][0])) - else: - # assume that positional axis are 1 and 2 by default - if (self._axis_info['time'][0] in [1, 2] or - self._axis_info['energy'][0] in [1, 2] or - self._axis_info['polarization'][0] in [1, 2] or - self._axis_info['obs'][0] in [1, 2] or - self._axis_info['custom'][0] in [1, 2]): - raise ValueError('Cannot determine the positional axis') - else: - self.configure_position_axes((1, 2), False) - - if self._axis_info['time'][1]: - self.configure_time_axis(self._axis_info['time'][0], False) - if self._axis_info['energy'][1]: - self.configure_energy_axis(self._axis_info['energy'][0], False) - if self._axis_info['polarization'][1]: - self.configure_polarization_axis( - self._axis_info['polarization'][0], False) - if self._axis_info['obs'][1]: - self.configure_observable_axis(self._axis_info['obs'][0], False) - if self._axis_info['custom'][1]: - self.configure_custom_axis(self._axis_info['custom'][0], False) - - def _guess_axis_info_from_ctypes(self, lookup, counter): - """ - Check for the presence of blueprint keys in the plan, and whether or - not they indicate an index in their configuration. - - :param lookup: Blueprint plan key. - :param counter: Value to set the index to for an axis. - :param axis_info: local data structure to pass around what is - configured, and what is it's value. - """ - if lookup.startswith('Chunk.energy'): - self._axis_info['energy'] = (counter, True) - elif lookup.startswith('Chunk.polarization'): - self._axis_info['polarization'] = (counter, True) - elif lookup.startswith('Chunk.time'): - self._axis_info['time'] = (counter, True) - elif lookup.startswith('Chunk.position') and lookup.endswith( - 'axis1.ctype'): - self._axis_info['ra'] = (counter, True) - elif lookup.startswith('Chunk.position') and lookup.endswith( - 'axis2.ctype'): - self._axis_info['dec'] = (counter, True) - elif lookup.startswith('Chunk.observable'): - self._axis_info['obs'] = (counter, True) - elif lookup.startswith('Chunk.custom'): - self._axis_info['custom'] = (counter, True) - else: - raise ValueError( - f'Unrecognized axis type: {lookup}') - - def _get_configured_index(self, axis_info, lookup): - """Find the next available index value among those that are not set. - - :param axis_info: local data structure to pass around what is - configured, and what is it's value.""" - DEFAULT_INDICES = {'ra': 1, - 'dec': 2, - 'energy': 3, - 'time': 4, - 'polarization': 5, - 'obs': 6, - 'custom': 7} - - # the logic - if the default index is already used, assign the lowest - # index that is unused, otherwise use the default index - - max_index = 0 - min_index = 7 - default_index = DEFAULT_INDICES[lookup] - default_used = False - for axis in axis_info: - # do two unrelated things in this for loop - # 1. determine where to start counting - if axis_info[axis][1]: - max_index = max(max_index, axis_info[axis][0]) - min_index = min(min_index, axis_info[axis][0]) - # 2. determine if the default is used - if axis_info[axis][1] and default_index == axis_info[axis][0]: - default_used = True - - configured_index = 0 - if default_used: - if min_index == 1: - configured_index = max_index + 1 - else: - configured_index = min(1, min_index) - else: - configured_index = default_index - return configured_index - - def load_from_file(self, file_name): - """ - Load a blueprint from a file. The expected input format is the same - as is output by _serialize. This means there's lots of stripping of - extra spaces, equals signs, and the word default. Also manage - square brackets as list construction. - - Accept comments that start with '#'. - - :param file_name: The fully-qualified pathname for the blueprint - file on disk. - """ - with open(file_name) as file: - for line in file: - if '=' in line: - if '#' in line: - if line.find('#') == 0: - # ignore lines starting with a comment - continue - line = line.split('#')[0] - key, value = line.split('=', 1) - if 'default' in value: - temp = value.replace('default', ''). \ - replace('=', '').strip('\n').strip() - default = temp.rsplit(',')[1] - temp_list = temp.rsplit(',')[0].replace('[', ''). \ - replace(']', '').replace('\'', '').split(',') - if 'None' in default: - default = None - else: - default = default.strip() - cleaned_up_value = (temp_list, default) - else: - if '[' in value: - temp_list = value.replace('[', ''). \ - replace(']', '').replace('\'', '').split(',') - temp_list_2 = [] - for ii in temp_list: - temp_list_2.append(ii.strip().strip('\n')) - cleaned_up_value = (temp_list_2, None) - else: - cleaned_up_value = value.strip('\n').strip() - if cleaned_up_value == 'None': - cleaned_up_value = None - self.set(key.strip(), cleaned_up_value) - self._guess_axis_info() - - @classproperty - def CAOM2_ELEMENTS(cls): - """ - List of valid names of CAOM2 elements. - :return: - """ - return list(ObsBlueprint._CAOM2_ELEMENTS) # return a copy - - @classmethod - def check_caom2_element(cls, caom2_element): - """ - Checks that an element is a valid caom2_element in the blueprint. It - checks that it's part of the ObsBlueprint._CAOM2_ELEMENTS - :param caom2_element: name CAOM2 element to check - :raises KeyError - """ - if caom2_element not in cls._CAOM2_ELEMENTS: - raise KeyError( - '{} not a valid CAOM2 element name (mispelling?).'. - format(caom2_element)) - - @staticmethod - def check_chunk(caom2_element): - """ - Checks that an element is a valid Chunk-type caom2_element - :param caom2_element: name CAOM2 element to check - :raises ValueError - """ - if not caom2_element.startswith('Chunk'): - raise ValueError( - "Extension number refers to Chunk elements only") - - @staticmethod - def check_extension(extension): - if extension is not None and extension < 0: - raise ValueError( - f'Extension count failure. {extension} should be >= 0') - - def __str__(self): - plan = self._serialize(self._plan) - - extensions = '' - if self._extensions: - for key in sorted(self._extensions): - extensions = extensions + f'\nextension {key}:\n' +\ - self._serialize(self._extensions[key]) - return plan + extensions - - def _serialize(self, src): - return '\n'.join( - ['{} = {}'.format(key, '{}, default = {}'.format(src[key][0], - src[key][1]) - if isinstance(src[key], tuple) - else src[key]) - for key in ObsBlueprint._CAOM2_ELEMENTS - if key in src]) - - def set(self, caom2_element, value, extension=0): - """ - Sets the value associated with an element in the CAOM2 model. Value - cannot be a tuple. - :param caom2_element: name CAOM2 element (as in - ObsBlueprint.CAOM2_ELEMEMTS) - :param value: new value of the CAOM2 element - :param extension: extension number (used only for Chunk elements) - """ - ObsBlueprint.check_caom2_element(caom2_element) - ObsBlueprint.check_extension(extension) - if extension: - ObsBlueprint.check_chunk(caom2_element) - if extension not in self._extensions: - self._extensions[extension] = {} - self._extensions[extension][caom2_element] = value - else: - self._plan[caom2_element] = value - - def add_attribute(self, caom2_element, attribute, extension=0): - """ - Adds an attribute in the list of other attributes associated - with an caom2 element. - :param caom2_element: name CAOM2 element (as in - ObsBlueprint.CAOM2_ELEMEMTS) - :param attribute: name of attribute the element is mapped to - :param extension: extension number (used only for Chunk elements) - :raises AttributeError if the caom2 element has already an associated - value or KeyError if the caom2 element does not exists. - """ - ObsBlueprint.check_caom2_element(caom2_element) - ObsBlueprint.check_extension(extension) - if extension: - ObsBlueprint.check_chunk(caom2_element) - if extension not in self._extensions: - raise AttributeError( - f'No extension {extension} in the blueprint') - else: - if caom2_element in self._extensions[extension]: - if (isinstance(self._extensions[extension][caom2_element], - tuple)): - if (attribute not in - self._extensions[extension][caom2_element][0]): - self._extensions[extension][caom2_element][0].\ - insert(0, attribute) - else: - raise AttributeError( - (f'No attributes in extension {extension} ' - f'associated with keyword {caom2_element}')) - else: - self._extensions[extension][caom2_element] = \ - ([attribute], None) - else: - if caom2_element in self._plan: - if isinstance(self._plan[caom2_element], tuple): - if attribute not in self._plan[caom2_element][0]: - self._plan[caom2_element][0].insert(0, attribute) - else: - raise AttributeError(f'No attributes associated with ' - f'keyword {caom2_element}') - else: - self._plan[caom2_element] = ([attribute], None) - - def add_table_attribute(self, caom2_element, ttype_attribute, extension=0, - index=0): - """ - Adds a FITS BINTABLE TTYPE* lookup, to a list of other FITS attributes - associated with an caom2 element. This does not co-exist with - non-table attributes. - - There is no support for default values for table attributes. - - :param caom2_element: name CAOM2 element (as in - ObsBlueprint.CAOM2_ELEMEMTS) - :param ttype_attribute: name of TTYPE attribute element is mapped to - :param extension: extension number (used only for Chunk elements) - :param index: which row values to return. If index is None, all row - values will be returned as a comma-separated list. - :raises AttributeError if the caom2 element has already an associated - value or KeyError if the caom2 element does not exists. - """ - ObsBlueprint.check_caom2_element(caom2_element) - ObsBlueprint.check_extension(extension) - if extension: - if extension in self._extensions: - if caom2_element in self._extensions[extension]: - if (ObsBlueprint.is_table( - self._extensions[extension][caom2_element])): - if (ttype_attribute not in - self._extensions[extension][caom2_element][1]): - self._extensions[extension][caom2_element][1]. \ - insert(0, ttype_attribute) - else: - raise AttributeError( - ('No TTYPE attributes in extension {} associated ' - 'with keyword {}').format(extension, - caom2_element)) - else: - self._extensions[extension][caom2_element] = \ - ('BINTABLE', [ttype_attribute], index) - else: - self._extensions[extension] = {} - self._extensions[extension][caom2_element] = \ - ('BINTABLE', [ttype_attribute], index) - else: - if caom2_element in self._plan: - if ObsBlueprint.is_table(self._plan[caom2_element]): - if ttype_attribute not in self._plan[caom2_element][1]: - self._plan[caom2_element][1].insert(0, ttype_attribute) - else: - raise AttributeError('No TTYPE attributes associated ' - 'with keyword {}'.format( - caom2_element)) - else: - self._plan[caom2_element] = ( - 'BINTABLE', [ttype_attribute], None) - - def set_default(self, caom2_element, default, extension=0): - """ - Sets the default value of a caom2 element that is associated with - attributes. If the element does not exist or does not have a list of - associated attributes, default is set as the associated value - of the element. - - If set is called for the same caom2_element after this, the default - value will be reset to None. - - :param caom2_element: name CAOM2 element (as in - ObsBlueprint.CAOM2_ELEMEMTS) - :param default: default value - :param extension: extension number (used only for Chunk elements) - """ - ObsBlueprint.check_caom2_element(caom2_element) - ObsBlueprint.check_extension(extension) - if extension: - ObsBlueprint.check_chunk(caom2_element) - if extension not in self._extensions: - self._extensions[extension] = {} - if caom2_element in self._extensions[extension] and \ - isinstance(self._extensions[extension][caom2_element], tuple): - self._extensions[extension][caom2_element] = \ - (self._extensions[extension][caom2_element][0], default) - else: - # default is the only value - self._extensions[extension][caom2_element] = default - else: - if (caom2_element in self._plan) and \ - isinstance(self._plan[caom2_element], tuple): - self._plan[caom2_element] = (self._plan[caom2_element][0], - default) - else: - # override the value - self._plan[caom2_element] = default - - def delete(self, caom2_element, extension=0): - """ - Deletes an element from the blueprint - :param caom2_element: name CAOM2 element (as in - ObsBlueprint.CAOM2_ELEMEMTS) - :param extension: extension number - :raises exceptions if the element or extension not found - """ - ObsBlueprint.check_caom2_element(caom2_element) - ObsBlueprint.check_extension(extension) - if extension: - ObsBlueprint.check_chunk(caom2_element) - if extension not in self._extensions: - raise ValueError('Extension {} not configured in blueprint'. - format(extension)) - if caom2_element in self._extensions[extension]: - del self._extensions[extension][caom2_element] - if len(self._extensions[extension]) == 0: - del self._extensions[extension] - else: - if caom2_element in self._plan: - del self._plan[caom2_element] - - def clear(self, caom2_element, extension=0): - """ - Clears the value for an element in the blueprint by resetting it to an - empty list with no default. - - :param caom2_element: name CAOM2 element (as in - ObsBlueprint.CAOM2_ELEMEMTS) - :param extension: extension number - :raises exceptions if the element or extension not found - """ - ObsBlueprint.check_caom2_element(caom2_element) - ObsBlueprint.check_extension(extension) - if extension: - ObsBlueprint.check_chunk(caom2_element) - if extension not in self._extensions: - raise ValueError('Extension {} not configured in blueprint'. - format(extension)) - if caom2_element in self._extensions[extension]: - self._extensions[extension][caom2_element] = ([], None) - else: - if caom2_element in self._plan: - self._plan[caom2_element] = ([], None) - - def _get(self, caom2_element, extension=0): - """ - Returns the source associated with a CAOM2 element - :param caom2_element: name CAOM2 element (as in - ObsBlueprint.CAOM2_ELEMEMTS) - :param extension: extension number - :return: Tuple of the form (list_of_associated_attributes, - default_value) OR the actual value associated with the CAOM2 element - """ - ObsBlueprint.check_caom2_element(caom2_element) - ObsBlueprint.check_extension(extension) - if extension: - if (extension in self._extensions) and \ - (caom2_element in self._extensions[extension]): - return self._extensions[extension][caom2_element] - - # look in the minimal plan - if caom2_element not in self._plan: - return None - else: - return self._plan[caom2_element] - - def has_chunk(self, extension): - """What does the plan say about creating chunks for an - extension? - - :return True if there should be a chunk to go along with a part - """ - value = '' - if extension is not None and extension in self._extensions: - if 'Chunk' in self._extensions[extension]: - value = self._extensions[extension]['Chunk'] - elif 'Chunk' in self._plan: - if ((extension is not None and extension == 0) or ( - extension is None)): - value = self._plan['Chunk'] - return not value == '{ignore}' - - @staticmethod - def is_table(value): - """Hide the blueprint structure from clients - they shouldn't need - to know that a value of type tuple requires special processing.""" - return ObsBlueprint.needs_lookup(value) and value[0] == 'BINTABLE' - - @staticmethod - def is_function(value): - """ - Check if a blueprint value has Python 'function' syntax. The - "'/' not in value" clause excludes strings with syntax that enables - addressing HDF5 arrays. - - :return: True if the value is the name of a function to be executed, - False, otherwise - """ - return (not ObsBlueprint.needs_lookup(value) and isinstance(value, str) - and isinstance(value, str) and '(' in value and ')' in value - and '/' not in value) - - @staticmethod - def has_default_value(value): - """""" - return isinstance(value, tuple) and value[1] - - @staticmethod - def has_no_value(value): - """If functions return None, try not to update the WCS with this - value.""" - return value is None or ( - isinstance(value, str) and 'None' in value.strip()) - - @staticmethod - def needs_lookup(value): - """Hide the blueprint structure from clients - they shouldn't need - to know that a value of type tuple requires special processing.""" - return isinstance(value, tuple) - - def get_configed_axes_count(self): - """:return how many axes have been configured to read from WCS""" - configed_axes = 0 - if self._pos_axes_configed: - configed_axes += 2 - if self._energy_axis_configed: - configed_axes += 1 - if self._time_axis_configed: - configed_axes += 1 - if self._polarization_axis_configed: - configed_axes += 1 - if self._obs_axis_configed: - configed_axes += 1 - if self._custom_axis_configed: - configed_axes += 1 - return configed_axes - - @property - def update(self): - return self._update - - @update.setter - def update(self, value): - self._update = value - - -class Hdf5ObsBlueprint(ObsBlueprint): - """ - Class that specializes the CAOM2 Observation construction based on HDF5 - file content. - - The blueprint designates the source of each of these attributes as either - HDF5 Dataset or Group values. Specific or default values may also be - indicated in the same fashion os for an ObsBlueprint. The blueprint can - be checked by simply displaying it. - - HDF5-specific example: - # create a blueprint and customize it - ob = Hdf5ObsBlueprint(position_axes=(1, 2) - - # lookup value starting with // means rooted at base of the hdf5 file - ob.add_attribute('Observation.target.name', '//header/object/obj_id') - - # lookup value starting with / means rooted at the base of the - # "find_roots_here" parameter for Hdf5Parser - # - # (integer) means return only the value with the index of "integer" - # from a list - ob.add_attribute( - 'Chunk.position.axis.function.refCoord.coord1.pix', - '/header/wcs/crpix(0)') - - # (integer:integer) means return only the value with the index of - # "integer" from a list, followed by "integer" from the list in the - # list - ob.add_attribute( - 'Chunk.position.axis.function.cd11', '/header/wcs/cd(0:0)') - print(ob) - - """ - def __init__(self, position_axes=None, energy_axis=None, - polarization_axis=None, time_axis=None, - obs_axis=None, custom_axis=None, module=None, - update=True, instantiated_class=None): - """ - There are no sensible/known HDF5 defaults for WCS construction, so - default to ensuring the blueprint executes with mostly values of None. - - Use the attribute _wcs_std, so that the list of WCS keywords used - as input is known. - """ - super().__init__( - position_axes, - energy_axis, - polarization_axis, - time_axis, - obs_axis, - custom_axis, - module, - update, - instantiated_class, - ) - tmp = { - 'Observation.algorithm.name': ([], 'exposure'), - 'Plane.calibrationLevel': ([], CalibrationLevel.RAW_STANDARD), - 'Plane.dataProductType': ([], DataProductType.IMAGE), - 'Artifact.releaseType': ([], ReleaseType.DATA), - 'Chunk': 'include' - } - # using the tmp to make sure that the keywords are valid - for key in tmp: - self.set(key, tmp[key]) - - def configure_custom_axis(self, axis, override=True): - """ - Set the expected custom keywords by index in the blueprint - and the wcs_std lookup. - - :param axis: The index expected for the custom axis. - :param override: Set to False when reading from a file. - """ - if self._custom_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured custom axis.') - return - - if override: - self.set('Chunk.custom.axis.axis.ctype', ([], None)) - self.set('Chunk.custom.axis.axis.cunit', ([], None)) - self.set('Chunk.custom.axis.function.naxis', ([], 1)) - self.set('Chunk.custom.axis.function.delta', ([], None)) - self.set('Chunk.custom.axis.function.refCoord.pix', ([], None)) - self.set('Chunk.custom.axis.function.refCoord.val', ([], None)) - - self._wcs_std['Chunk.custom.axis.axis.ctype'] = '' - self._wcs_std['Chunk.custom.axis.axis.cunit'] = '' - self._wcs_std['Chunk.custom.axis.function.naxis'] = '' - self._wcs_std['Chunk.custom.axis.function.delta'] = '' - self._wcs_std['Chunk.custom.axis.function.refCoord.pix'] = '' - self._wcs_std['Chunk.custom.axis.function.refCoord.val'] = '' - self._custom_axis_configed = True - - def configure_position_axes(self, axes, override=True): - """ - Set the expected spatial keywords by indices in the blueprint and - the wcs_std lookup. - - :param axes: The index expected for the position axes. - :param override: Set to False when reading from a file. - """ - if self._pos_axes_configed: - self.logger.debug( - 'Attempt to configure already-configured position axes.') - return - - if override: - self.set('Chunk.position.coordsys', ([], None)) - self.set('Chunk.position.equinox', ([], None)) - self.set('Chunk.position.axis.axis1.ctype', ([], None)) - self.set('Chunk.position.axis.axis1.cunit', ([], None)) - self.set('Chunk.position.axis.axis2.ctype', ([], None)) - self.set('Chunk.position.axis.axis2.cunit', ([], None)) - self.set('Chunk.position.axis.error1.syser', ([], None)) - self.set('Chunk.position.axis.error1.rnder', ([], None)) - self.set('Chunk.position.axis.error2.syser', ([], None)) - self.set('Chunk.position.axis.error2.rnder', ([], None)) - self.set('Chunk.position.axis.function.cd11', ([], None)) - self.set('Chunk.position.axis.function.cd12', ([], None)) - self.set('Chunk.position.axis.function.cd21', ([], None)) - self.set('Chunk.position.axis.function.cd22', ([], None)) - self.set('Chunk.position.axis.function.dimension.naxis1', - ([], 1)) - self.set('Chunk.position.axis.function.dimension.naxis2', - ([], 1)) - self.set('Chunk.position.axis.function.refCoord.coord1.pix', - ([], None)) - self.set('Chunk.position.axis.function.refCoord.coord1.val', - ([], None)) - self.set('Chunk.position.axis.function.refCoord.coord2.pix', - ([], None)) - self.set('Chunk.position.axis.function.refCoord.coord2.val', - ([], None)) - - self._wcs_std['Chunk.position.coordsys'] = '' - self._wcs_std['Chunk.position.equinox'] = '' - - self._wcs_std['Chunk.position.axis.axis1.ctype'] = '' - self._wcs_std['Chunk.position.axis.axis1.cunit'] = '' - self._wcs_std['Chunk.position.axis.axis2.ctype'] = '' - self._wcs_std['Chunk.position.axis.axis2.cunit'] = '' - self._wcs_std['Chunk.position.axis.error1.syser'] = '' - self._wcs_std['Chunk.position.axis.error1.rnder'] = '' - self._wcs_std['Chunk.position.axis.error2.syser'] = '' - self._wcs_std['Chunk.position.axis.error2.rnder'] = '' - self._wcs_std['Chunk.position.axis.function.cd11'] = '' - self._wcs_std['Chunk.position.axis.function.cd12'] = '' - self._wcs_std['Chunk.position.axis.function.cd21'] = '' - self._wcs_std['Chunk.position.axis.function.cd22'] = '' - self._wcs_std['Chunk.position.axis.function.dimension.naxis1'] = '' - self._wcs_std['Chunk.position.axis.function.dimension.naxis2'] = '' - self._wcs_std['Chunk.position.axis.function.refCoord.coord1.pix'] = '' - self._wcs_std['Chunk.position.axis.function.refCoord.coord1.val'] = '' - self._wcs_std['Chunk.position.axis.function.refCoord.coord2.pix'] = '' - self._wcs_std['Chunk.position.axis.function.refCoord.coord2.val'] = '' - - self._pos_axes_configed = True - - def configure_energy_axis(self, axis, override=True): - """ - :param axis: The index expected for the energy axis. - :param override: Set to False when reading from a file. - :return: - """ - if self._energy_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured energy axis.') - return - - if override: - self.set('Chunk.energy.specsys', ([], None)) - self.set('Chunk.energy.ssysobs', ([], None)) - self.set('Chunk.energy.restfrq', ([], None)) - self.set('Chunk.energy.restwav', ([], None)) - self.set('Chunk.energy.velosys', ([], None)) - self.set('Chunk.energy.zsource', ([], None)) - self.set('Chunk.energy.ssyssrc', ([], None)) - self.set('Chunk.energy.velang', ([], None)) - - self.set('Chunk.energy.bandpassName', ([], None)) - self.set('Chunk.energy.resolvingPower', ([], None)) - - self.set('Chunk.energy.axis.axis.ctype', ([], None)) - self.set('Chunk.energy.axis.axis.cunit', ([], None)) - self.set('Chunk.energy.axis.error.syser', ([], None)) - self.set('Chunk.energy.axis.error.rnder', ([], None)) - self.set('Chunk.energy.axis.function.naxis', ([], 1)) - self.set('Chunk.energy.axis.function.delta', ([], None)) - self.set('Chunk.energy.axis.function.refCoord.pix', ([], None)) - self.set('Chunk.energy.axis.function.refCoord.val', ([], None)) - - self._wcs_std['Chunk.energy.specsys'] = '' - self._wcs_std['Chunk.energy.ssysobs'] = '' - self._wcs_std['Chunk.energy.restfrq'] = '' - self._wcs_std['Chunk.energy.restwav'] = '' - self._wcs_std['Chunk.energy.velosys'] = '' - self._wcs_std['Chunk.energy.zsource'] = '' - self._wcs_std['Chunk.energy.ssyssrc'] = '' - self._wcs_std['Chunk.energy.velang'] = '' - - self._wcs_std['Chunk.energy.axis.axis.ctype'] = '' - self._wcs_std['Chunk.energy.axis.axis.cunit'] = '' - self._wcs_std['Chunk.energy.axis.error.syser'] = '' - self._wcs_std['Chunk.energy.axis.error.rnder'] = '' - self._wcs_std['Chunk.energy.axis.function.naxis'] = '' - self._wcs_std['Chunk.energy.axis.function.delta'] = '' - self._wcs_std['Chunk.energy.axis.function.refCoord.pix'] = '' - self._wcs_std['Chunk.energy.axis.function.refCoord.val'] = '' - self._energy_axis_configed = True - - def configure_polarization_axis(self, axis, override=True): - """ - Set the expected polarization keywords by index in the blueprint - and the wcs_std lookup. - - :param axis: The index expected for the polarization axis. - :param override: Set to False when reading from a file. - :return: - """ - if self._polarization_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured polarization axis.') - return - - if override: - # STOKES is the only value allowed for PolarizationWCS ctype. - self.set('Chunk.polarization.axis.axis.ctype', ([], 'STOKES')) - self.set('Chunk.polarization.axis.axis.cunit', ([], None)) - self.set('Chunk.polarization.axis.function.naxis', ([], 1)) - self.set('Chunk.polarization.axis.function.delta', ([], None)) - self.set('Chunk.polarization.axis.function.refCoord.pix', - ([], None)) - self.set('Chunk.polarization.axis.function.refCoord.val', - ([], None)) - - self._wcs_std['Chunk.polarization.axis.axis.ctype'] = '' - self._wcs_std['Chunk.polarization.axis.axis.cunit'] = '' - self._wcs_std['Chunk.polarization.axis.function.naxis'] = '' - self._wcs_std['Chunk.polarization.axis.function.delta'] = '' - self._wcs_std['Chunk.polarization.axis.function.refCoord.pix'] = '' - self._wcs_std['Chunk.polarization.axis.function.refCoord.val'] = '' - - self._polarization_axis_configed = True - - def configure_observable_axis(self, axis, override=True): - """ - Set the expected observable keywords by index in the blueprint - and the wcs_std lookup. - Note: observable axis is not a standard WCS and it's not used by - astropy.wcs so, arguably, it can be removed. It is here for now for - consistency purposes. - :param axis: The index expected for the observable axis. - :param override: Set to False when reading from a file. - :return: - """ - if self._obs_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured observable axis.') - return - - if override: - self.set('Chunk.observable.axis.axis.ctype', ([], None)) - self.set('Chunk.observable.axis.axis.cunit', ([], None)) - self.set('Chunk.observable.axis.function.refCoord.pix', ([], None)) - - self._wcs_std['Chunk.observable.axis.axis.ctype'] = '' - self._wcs_std['Chunk.observable.axis.axis.cunit'] = '' - self._wcs_std['Chunk.observable.axis.function.refCoord.pix'] = '' - - self._obs_axis_configed = True - - def configure_time_axis(self, axis, override=True): - """ - Set the expected time keywords by index in the blueprint and - the wcs_std lookup. - - :param axis: The index expected for the time axis. - :param override: Set to False when reading from a file. - :return: - """ - if self._time_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured time axis.') - return - - if override: - self.set('Chunk.time.exposure', ([], None)) - self.set('Chunk.time.timesys', ([], None)) - self.set('Chunk.time.trefpos', ([], None)) - self.set('Chunk.time.mjdref', ([], None)) - self.set('Chunk.time.resolution', ([], None)) - self.set('Chunk.time.axis.axis.ctype', ([], None)) - self.set('Chunk.time.axis.axis.cunit', ([], None)) - self.set('Chunk.time.axis.error.syser', ([], None)) - self.set('Chunk.time.axis.error.rnder', ([], None)) - self.set('Chunk.time.axis.function.naxis', ([], 1)) - self.set('Chunk.time.axis.function.delta', ([], None)) - self.set('Chunk.time.axis.function.refCoord.pix', ([], None)) - self.set('Chunk.time.axis.function.refCoord.val', ([], None)) - - self._wcs_std['Chunk.time.exposure'] = '' - self._wcs_std['Chunk.time.resolution'] = '' - self._wcs_std['Chunk.time.timesys'] = '' - self._wcs_std['Chunk.time.trefpos'] = '' - self._wcs_std['Chunk.time.mjdref'] = '' - - self._wcs_std['Chunk.time.axis.axis.ctype'] = '' - self._wcs_std['Chunk.time.axis.axis.cunit'] = '' - self._wcs_std['Chunk.time.axis.error.syser'] = '' - self._wcs_std['Chunk.time.axis.error.rnder'] = '' - self._wcs_std['Chunk.time.axis.function.naxis'] = '' - self._wcs_std['Chunk.time.axis.function.delta'] = '' - self._wcs_std['Chunk.time.axis.function.refCoord.pix'] = '' - self._wcs_std['Chunk.time.axis.function.refCoord.val'] = '' - - self._time_axis_configed = True - - def set(self, caom2_element, value, extension=0): - """ - Sets the value associated with an element in the CAOM2 model. Value - cannot be a tuple. - :param caom2_element: name CAOM2 element (as in - ObsBlueprint.CAOM2_ELEMEMTS) - :param value: new value of the CAOM2 element - :param extension: extension number (used only for Chunk elements) - """ - if hasattr(value, 'decode'): - value = value.decode('utf-8') - super().set(caom2_element, value, extension) - - def _guess_axis_info(self): - self._guess_axis_info_from_plan() - - -class BlueprintParser: - """ - Extract CAOM2 metadata from files with no WCS information. - """ - def __init__(self, obs_blueprint=None, uri=None): - if obs_blueprint: - self._blueprint = obs_blueprint - else: - self._blueprint = ObsBlueprint() - self._errors = [] - self.logger = logging.getLogger(__name__) - self.uri = uri - self.apply_blueprint() - - @property - def blueprint(self): - return self._blueprint - - @blueprint.setter - def blueprint(self, value): - self._blueprint = value - self.apply_blueprint() - - def apply_blueprint(self): - plan = self.blueprint._plan - - # first apply the functions - if (self.blueprint._module is not None or - self.blueprint._module_instance is not None): - for key, value in plan.items(): - if ObsBlueprint.is_function(value): - if self._blueprint._module_instance is None: - plan[key] = self._execute_external(value, key, 0) - else: - plan[key] = self._execute_external_instance( - value, key, 0) - - # apply defaults - for key, value in plan.items(): - if ObsBlueprint.has_default_value(value): - # there is a default value set - if key in plan: - plan[key] = value[1] - - def augment_observation(self, observation, artifact_uri, product_id=None): - """ - Augments a given observation with plane structure only. - :param observation: existing CAOM2 observation to be augmented. - :param artifact_uri: the key for finding the artifact to augment - :param product_id: the key for finding for the plane to augment - """ - self.logger.debug( - f'Begin CAOM2 observation augmentation for URI {artifact_uri}.') - if observation is None or not isinstance(observation, Observation): - raise ValueError( - f'Observation type mis-match for {observation}.') - - observation.meta_release = self._get_datetime(self._get_from_list( - 'Observation.metaRelease', index=0, - current=observation.meta_release)) - observation.meta_read_groups = self._get_from_list( - 'Observation.metaReadGroups', index=0, - current=observation.meta_read_groups) - observation.meta_producer = self._get_from_list( - 'Observation.metaProducer', index=0, - current=observation.meta_producer) - - plane = None - if not product_id: - product_id = self._get_from_list('Plane.productID', index=0) - if product_id is None: - raise ValueError('product ID required') - - for ii in observation.planes: - if observation.planes[ii].product_id == product_id: - plane = observation.planes[product_id] - break - if plane is None: - plane = Plane(product_id=product_id) - observation.planes[product_id] = plane - self.augment_plane(plane, artifact_uri) - self.logger.debug( - f'End CAOM2 observation augmentation for {artifact_uri}.') - - def augment_plane(self, plane, artifact_uri): - """ - Augments a given plane with artifact structure only. - :param plane: existing CAOM2 plane to be augmented. - :param artifact_uri: - """ - self.logger.debug( - f'Begin CAOM2 plane augmentation for {artifact_uri}.') - if plane is None or not isinstance(plane, Plane): - raise ValueError(f'Plane type mis-match for {plane}') - - plane.meta_release = self._get_datetime(self._get_from_list( - 'Plane.metaRelease', index=0, current=plane.meta_release)) - plane.data_release = self._get_datetime(self._get_from_list( - 'Plane.dataRelease', index=0, current=plane.data_release)) - plane.data_product_type = self._to_data_product_type( - self._get_from_list('Plane.dataProductType', index=0, - current=plane.data_product_type)) - plane.calibration_level = self._to_calibration_level(_to_int_32( - self._get_from_list('Plane.calibrationLevel', index=0, - current=plane.calibration_level))) - plane.meta_producer = self._get_from_list( - 'Plane.metaProducer', index=0, current=plane.meta_producer) - - artifact = None - for ii in plane.artifacts: - artifact = plane.artifacts[ii] - if artifact.uri == artifact_uri: - break - if artifact is None or artifact.uri != artifact_uri: - artifact = Artifact(artifact_uri, self._to_product_type( - self._get_from_list('Artifact.productType', index=0)), - self._to_release_type(self._get_from_list( - 'Artifact.releaseType', index=0))) - plane.artifacts[artifact_uri] = artifact - self.augment_artifact(artifact, 0) - self.logger.debug( - f'End CAOM2 plane augmentation for {artifact_uri}.') - - def augment_artifact(self, artifact, index): - """ - Augments a given CAOM2 artifact with available information - :param artifact: existing CAOM2 artifact to be augmented - :param index: int Part name, used in specializing classes - """ - self.logger.debug(f'Begin CAOM2 artifact augmentation for {self.uri}.') - if artifact is None or not isinstance(artifact, Artifact): - raise ValueError( - f'Artifact type mis-match for {artifact}') - - artifact.product_type = self._to_product_type(self._get_from_list( - 'Artifact.productType', index=0, current=artifact.product_type)) - artifact.release_type = self._to_release_type(self._get_from_list( - 'Artifact.releaseType', index=0, current=artifact.release_type)) - artifact.content_type = self._get_from_list( - 'Artifact.contentType', index=0, current=artifact.content_type) - artifact.content_length = self._get_from_list( - 'Artifact.contentLength', index=0, current=artifact.content_length) - artifact.content_checksum = _to_checksum_uri(self._get_from_list( - 'Artifact.contentChecksum', index=0, - current=artifact.content_checksum)) - artifact.content_release = self._get_from_list( - 'Artifact.contentRelease', index=0, - current=artifact.content_release) - artifact.content_read_groups = self._get_from_list( - 'Artifact.contentReadGroups', index=0, - current=artifact.content_read_groups) - artifact.meta_producer = self._get_from_list( - 'Artifact.metaProducer', index=0, current=artifact.meta_producer) - self.logger.debug(f'End CAOM2 artifact augmentation for {self.uri}.') - - def _get_from_list(self, lookup, index, current=None): - value = None - try: - keywords = self.blueprint._get(lookup) - except KeyError: - self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug( - f'Could not find {lookup} in configuration.') - if current: - self.logger.debug( - f'{lookup}: using current value of {current!r}.') - value = current - return value - if (keywords is not None and not ObsBlueprint.needs_lookup(keywords) - and not ObsBlueprint.is_function(keywords)): - value = keywords - elif self._blueprint.update: - # The first clause: boolean attributes are used to represent - # three different values: True, False, and unknown. For boolean - # attributes _only_ assessed that the risk of setting to None - # accidentally was better than being unable to set a value of - # 'unknown'. - # - # The second clause: the default value for the current parameter - # in the method signature is 'None', so do not want to - # inadvertently assign the default value. - # - if isinstance(value, bool) or current is not None: - value = current - - self.logger.debug(f'{lookup}: value is {value}') - return value - - def _get_set_from_list(self, lookup, index): - value = None - keywords = None - try: - keywords = self.blueprint._get(lookup) - except KeyError: - self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug(f'Could not find \'{lookup}\' in caom2blueprint ' - f'configuration.') - - # if there's something useful as a value in the keywords, - # extract it - if keywords: - if ObsBlueprint.needs_lookup(keywords): - # if there's a default value use it - if keywords[1]: - value = keywords[1] - self.logger.debug( - f'{lookup}: assigned default value {value}.') - elif not ObsBlueprint.is_function(keywords): - value = keywords - self.logger.debug(f'{lookup}: assigned value {value}.') - return value - - def add_error(self, key, message): - self._errors.append('{} {} {}'.format( - datetime.now().strftime('%Y-%m-%dT%H:%M:%S'), key, message)) - - def _to_data_product_type(self, value): - return self._to_enum_type(value, DataProductType) - - def _to_calibration_level(self, value): - return self._to_enum_type(value, CalibrationLevel) - - def _to_product_type(self, value): - return self._to_enum_type(value, ProductType) - - def _to_release_type(self, value): - return self._to_enum_type(value, ReleaseType) - - def _to_enum_type(self, value, to_enum_type): - if value is None: - raise ValueError( - f'Must set a value of {to_enum_type.__name__} for ' - f'{self.uri}.') - elif isinstance(value, to_enum_type): - return value - else: - return to_enum_type(value) - - def _execute_external(self, value, key, extension): - """Execute a function supplied by a user, assign a value to a - blueprint entry. The input parameters passed to the function are the - headers as read in by astropy, or the artifact uri. - - :param value the name of the function to apply. - :param key: - :param extension: the current extension name or number. - """ - # determine which of the possible values for parameter the user - # is hoping for - if 'uri' in value: - parameter = self.uri - elif 'header' in value and isinstance(self, FitsParser): - parameter = self._headers[extension] - elif isinstance(self, FitsParser): - parameter = {'uri': self.uri, - 'header': self._headers[extension]} - else: - if hasattr(self, '_file'): - parameter = {'base': self._file} - else: - parameter = {'uri': self.uri, - 'header': None} - - result = '' - execute = None - try: - execute = getattr(self.blueprint._module, value.split('(')[0]) - except Exception as e: - msg = 'Failed to find {}.{} for {}'.format( - self.blueprint._module.__name__, value.split('(')[0], key) - self.logger.error(msg) - self._errors.append(msg) - tb = traceback.format_exc() - self.logger.debug(tb) - self.logger.error(e) - try: - result = execute(parameter) - self.logger.debug(f'Key {key} calculated value of {result} using {value} type {type(result)}') - except Exception as e: - msg = 'Failed to execute {} for {} in {}'.format( - execute.__name__, key, self.uri) - self.logger.error(msg) - self.logger.debug('Input parameter was {}, value was {}'.format( - parameter, value)) - self._errors.append(msg) - tb = traceback.format_exc() - self.logger.debug(tb) - self.logger.error(e) - return result - - def _execute_external_instance(self, value, key, extension): - """Execute a function supplied by a user, assign a value to a - blueprint entry. The input parameters passed to the function are the - headers as read in by astropy, or the artifact uri. - - :param value the name of the function to apply. - :param key: - :param extension: the current extension name or number. - :raise Caom2Exception exception raised when there is a recognizable - error in the information being used to create a CAOM2 record. A - correct and consistent CAOM2 record cannot be created from the - input metadata. The client should treat the Observation instance - under construction as invalid. - """ - result = '' - try: - execute = getattr( - self.blueprint._module_instance, value.split('(')[0]) - except Exception as e: - msg = 'Failed to find {}.{} for {}'.format( - self.blueprint._module_instance.__class__.__name__, - value.split('(')[0], key) - self.logger.error(msg) - self._errors.append(msg) - tb = traceback.format_exc() - self.logger.debug(tb) - self.logger.error(e) - return result - try: - result = execute(extension) - self.logger.debug('Key {} calculated value of {} using {}'.format(key, result, value)) - except ValueError as e2: - # DB 23-03-22 - # Anything that you can do to make the CAOM2 record creation fail - # in this case of bad WCS metadata would be useful. Use - # ValueError because that happens to be what astropy is throwing - # for a SkyCoord construction failure. - raise Caom2Exception(e2) - except Exception as e: - msg = 'Failed to execute {} for {} in {}'.format( - execute, key, self.uri) - self.logger.error(msg) - self.logger.debug('Input value was {}'.format(value)) - self._errors.append(msg) - tb = traceback.format_exc() - self.logger.debug(tb) - self.logger.error(e) - return result - - def _get_datetime(self, from_value): - """ - Ensure datetime values are in MJD. Really. Just not yet. - :param from_value: - :return: - """ - if from_value: - if isinstance(from_value, datetime): - return from_value - elif isinstance(from_value, Time): - return from_value.datetime - else: - result = None - # CFHT 2003/03/29,01:34:54 - # CFHT 2003/03/29 - # DDO 12/02/95 - for dt_format in ['%Y-%m-%dT%H:%M:%S', '%Y-%m-%dT%H:%M:%S.%f', - '%Y-%m-%d %H:%M:%S.%f', '%Y-%m-%d', - '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S', - '%Y/%m/%d,%H:%M:%S', '%Y/%m/%d', - '%d/%m/%y', '%d/%m/%y %H:%M:%S', '%d-%m-%Y']: - try: - result = datetime.strptime(from_value, dt_format) - except ValueError: - pass - - if result is None: - self.logger.error('Cannot parse datetime {}'.format( - from_value)) - self.add_error('get_datetime', sys.exc_info()[1]) - return result - else: - return None - - -class ContentParser(BlueprintParser): - - def __init__(self, obs_blueprint=None, uri=None): - super().__init__(obs_blueprint, uri) - self._wcs_parser = WcsParser(obs_blueprint, extension=0) - - def _get_chunk_naxis(self, chunk, index): - chunk.naxis = self._get_from_list( - 'Chunk.naxis', index, self.blueprint.get_configed_axes_count()) - - def augment_artifact(self, artifact, index): - """ - Augments a given CAOM2 artifact with available content information - :param artifact: existing CAOM2 artifact to be augmented - :param index: int Part name - """ - super().augment_artifact(artifact, index) - - self.logger.debug( - f'Begin content artifact augmentation for {artifact.uri}') - - if self.blueprint.get_configed_axes_count() == 0: - raise TypeError( - f'No WCS Data. End content artifact augmentation for ' - f'{artifact.uri}.') - - if self.add_parts(artifact, index): - part = artifact.parts[str(index)] - part.product_type = self._get_from_list('Part.productType', index) - part.meta_producer = self._get_from_list('Part.metaProducer', index=0, current=part.meta_producer) - - # each Part has one Chunk, if it's not an empty part as determined - # just previously - if not part.chunks: - part.chunks.append(Chunk()) - chunk = part.chunks[0] - chunk.meta_producer = self._get_from_list('Chunk.metaProducer', index=0, current=chunk.meta_producer) - - self._get_chunk_naxis(chunk, index) - - # order by which the blueprint is used to set WCS information: - # 1 - try to construct the information for an axis from WCS information - # 2 - if the WCS information is insufficient, try to construct the information from the blueprint - # 3 - Always try to fill the range metadata from the blueprint. - if self.blueprint._pos_axes_configed: - self._wcs_parser.augment_position(chunk) - self._try_position_with_blueprint(chunk, index) - - if self.blueprint._energy_axis_configed: - self._wcs_parser.augment_energy(chunk) - self._try_energy_with_blueprint(chunk, index) - - if self.blueprint._time_axis_configed: - self._wcs_parser.augment_temporal(chunk) - self._try_time_with_blueprint(chunk, index) - - if self.blueprint._polarization_axis_configed: - self._wcs_parser.augment_polarization(chunk) - self._try_polarization_with_blueprint(chunk, index) - - if self.blueprint._obs_axis_configed: - self._wcs_parser.augment_observable(chunk) - self._try_observable_with_blueprint(chunk, index) - - if self.blueprint._custom_axis_configed: - self._wcs_parser.augment_custom(chunk) - self._try_custom_with_blueprint(chunk, index) - - self.logger.debug( - f'End content artifact augmentation for {artifact.uri}.') - - def augment_observation(self, observation, artifact_uri, product_id=None): - """ - Augments a given observation with available content information. - :param observation: existing CAOM2 observation to be augmented. - :param artifact_uri: the key for finding the artifact to augment - :param product_id: the key for finding for the plane to augment - """ - super().augment_observation(observation, artifact_uri, product_id) - self.logger.debug( - f'Begin content observation augmentation for URI {artifact_uri}.') - members = self._get_members(observation) - if members: - if isinstance(members, TypedSet): - for m in members: - observation.members.add(m) - else: - for m in members.split(): - observation.members.add(ObservationURI(m)) - observation.algorithm = self._get_algorithm(observation) - - observation.sequence_number = _to_int(self._get_from_list( - 'Observation.sequenceNumber', index=0)) - observation.intent = self._get_from_list( - 'Observation.intent', 0, (ObservationIntentType.SCIENCE if - observation.intent is None else - observation.intent)) - observation.type = self._get_from_list('Observation.type', 0, - current=observation.type) - observation.meta_release = self._get_datetime( - self._get_from_list('Observation.metaRelease', 0, - current=observation.meta_release)) - observation.meta_read_groups = self._get_from_list( - 'Observation.metaReadGroups', 0) - observation.meta_producer = self._get_from_list( - 'Observation.metaProducer', 0, current=observation.meta_producer) - observation.requirements = self._get_requirements( - observation.requirements) - observation.instrument = self._get_instrument(observation.instrument) - observation.proposal = self._get_proposal(observation.proposal) - observation.target = self._get_target(observation.target) - observation.target_position = self._get_target_position( - observation.target_position) - observation.telescope = self._get_telescope(observation.telescope) - observation.environment = self._get_environment( - observation.environment) - self.logger.debug( - f'End content observation augmentation for {artifact_uri}.') - - def augment_plane(self, plane, artifact_uri): - """ - Augments a given plane with available content information. - :param plane: existing CAOM2 plane to be augmented. - :param artifact_uri: - """ - super().augment_plane(plane, artifact_uri) - self.logger.debug( - f'Begin content plane augmentation for {artifact_uri}.') - - plane.meta_release = self._get_datetime(self._get_from_list( - 'Plane.metaRelease', index=0, current=plane.meta_release)) - plane.data_release = self._get_datetime(self._get_from_list( - 'Plane.dataRelease', index=0)) - plane.data_product_type = self._to_data_product_type( - self._get_from_list('Plane.dataProductType', index=0, - current=plane.data_product_type)) - plane.calibration_level = self._to_calibration_level(_to_int_32( - self._get_from_list('Plane.calibrationLevel', index=0, - current=plane.calibration_level))) - plane.meta_producer = self._get_from_list( - 'Plane.metaProducer', index=0, current=plane.meta_producer) - plane.observable = self._get_observable(current=plane.observable) - plane.provenance = self._get_provenance(plane.provenance) - plane.metrics = self._get_metrics(current=plane.metrics) - plane.quality = self._get_quality(current=plane.quality) - - self.logger.debug( - f'End content plane augmentation for {artifact_uri}.') - - def _get_algorithm(self, obs): - """ - Create an Algorithm instance populated with available content - information. - :return: Algorithm - """ - self.logger.debug('Begin Algorithm augmentation.') - # TODO DEFAULT VALUE - name = self._get_from_list('Observation.algorithm.name', index=0, - current=obs.algorithm.name) - if name is not None and name == 'exposure' and isinstance(obs, DerivedObservation): - # stop the raising of a ValueError when adding a Plane representing a SimpleObservation to a - # DerivedObservation under construction. It results in attempting to change Algorithm.name value to - # 'exposure' otherwise. - result = obs.algorithm - else: - result = Algorithm(str(name)) if name else None - self.logger.debug('End Algorithm augmentation.') - return result - - def _get_energy_transition(self, current): - """ - Create an EnergyTransition instance populated with available content - information. - :return: EnergyTransition - """ - self.logger.debug('Begin EnergyTransition augmentation.') - species = self._get_from_list( - 'Chunk.energy.transition.species', index=0, - current=None if current is None else current.species) - transition = self._get_from_list( - 'Chunk.energy.transition.transition', index=0, - current=None if current is None else current.transition) - result = None - if species is not None and transition is not None: - result = EnergyTransition(species, transition) - self.logger.debug('End EnergyTransition augmentation.') - return result - - def _get_environment(self, current): - """ - Create an Environment instance populated with available content - information. - :current Environment instance, if one already exists in the - Observation - :return: Environment - """ - self.logger.debug('Begin Environment augmentation.') - seeing = self._get_from_list( - 'Observation.environment.seeing', index=0, - current=None if current is None else current.seeing) - humidity = _to_float( - self._get_from_list( - 'Observation.environment.humidity', index=0, - current=None if current is None else current.humidity)) - elevation = self._get_from_list( - 'Observation.environment.elevation', index=0, - current=None if current is None else current.elevation) - tau = self._get_from_list( - 'Observation.environment.tau', index=0, - current=None if current is None else current.tau) - wavelength_tau = self._get_from_list( - 'Observation.environment.wavelengthTau', index=0, - current=None if current is None else current.wavelength_tau) - ambient = _to_float( - self._get_from_list( - 'Observation.environment.ambientTemp', index=0, - current=None if current is None else current.ambient_temp)) - photometric = self._cast_as_bool(self._get_from_list( - 'Observation.environment.photometric', index=0, - current=None if current is None else current.photometric)) - enviro = None - if seeing or humidity or elevation or tau or wavelength_tau or ambient: - enviro = Environment() - enviro.seeing = seeing - enviro.humidity = humidity - enviro.elevation = elevation - enviro.tau = tau - enviro.wavelength_tau = wavelength_tau - enviro.ambient_temp = ambient - enviro.photometric = photometric - self.logger.debug('End Environment augmentation.') - return enviro - - def _get_instrument(self, current): - """ - Create an Instrument instance populated with available content - information. - :return: Instrument - """ - self.logger.debug('Begin Instrument augmentation.') - name = self._get_from_list( - 'Observation.instrument.name', index=0, - current=None if current is None else current.name) - keywords = self._get_set_from_list( - 'Observation.instrument.keywords', index=0) - instr = None - if name: - instr = Instrument(str(name)) - ContentParser._add_keywords(keywords, current, instr) - self.logger.debug('End Instrument augmentation.') - return instr - - def _get_members(self, obs): - """ - Returns the members of a derived observation (if specified) - :param obs: observation to augment - :return: members value - """ - members = None - self.logger.debug('Begin Members augmentation.') - if (isinstance(obs, SimpleObservation) and - (self.blueprint._get('DerivedObservation.members') or - self.blueprint._get('CompositeObservation.members'))): - raise TypeError( - 'Cannot apply blueprint for DerivedObservation to a ' - 'simple observation') - elif isinstance(obs, DerivedObservation): - lookup = self.blueprint._get('DerivedObservation.members', - extension=1) - if ObsBlueprint.is_table(lookup) and len(self.headers) > 1: - member_list = self._get_from_table( - 'DerivedObservation.members', 1) - # ensure the members are good little ObservationURIs - if member_list.startswith('caom:'): - members = member_list - else: - members = ' '.join(['caom:{}/{}'.format( - obs.collection, i) if not i.startswith('caom') else i - for i in member_list.split()]) - else: - if obs.members is None: - members = self._get_from_list( - 'DerivedObservation.members', index=0) - else: - members = self._get_from_list( - 'DerivedObservation.members', index=0, - current=obs.members) - elif isinstance(obs, CompositeObservation): - lookup = self.blueprint._get('CompositeObservation.members', - extension=1) - if ObsBlueprint.is_table(lookup) and len(self.headers) > 1: - member_list = self._get_from_table( - 'CompositeObservation.members', 1) - # ensure the members are good little ObservationURIs - if member_list.startswith('caom:'): - members = member_list - else: - members = ' '.join(['caom:{}/{}'.format( - obs.collection, i) if not i.startswith('caom') else i - for i in member_list.split()]) - else: - if obs.members is None: - members = self._get_from_list( - 'CompositeObservation.members', index=0) - else: - members = self._get_from_list( - 'CompositeObservation.members', index=0, - current=obs.members) - self.logger.debug('End Members augmentation.') - return members - - def _get_metrics(self, current): - """ - Create a Metrics instance populated with available content information. - :return: Metrics - """ - self.logger.debug('Begin Metrics augmentation.') - source_number_density = self._get_from_list( - 'Plane.metrics.sourceNumberDensity', index=0, - current=None if current is None else current.source_number_density) - background = self._get_from_list( - 'Plane.metrics.background', index=0, - current=None if current is None else current.background) - background_stddev = self._get_from_list( - 'Plane.metrics.backgroundStddev', index=0, - current=None if current is None else current.background_std_dev) - flux_density_limit = self._get_from_list( - 'Plane.metrics.fluxDensityLimit', index=0, - current=None if current is None else current.flux_density_limit) - mag_limit = self._get_from_list( - 'Plane.metrics.magLimit', index=0, - current=None if current is None else current.mag_limit) - sample_snr = self._get_from_list( - 'Plane.metrics.sampleSNR', index=0, - current=None if current is None else current.sample_snr) - - metrics = None - if (source_number_density or background or background_stddev or - flux_density_limit or mag_limit or sample_snr): - metrics = Metrics() - metrics.source_number_density = source_number_density - metrics.background = background - metrics.background_std_dev = background_stddev - metrics.flux_density_limit = flux_density_limit - metrics.mag_limit = mag_limit - metrics.sample_snr = sample_snr - self.logger.debug('End Metrics augmentation.') - return metrics - - def _get_axis_wcs(self, label, wcs, index): - """Helper function to construct a CoordAxis1D instance, with all - it's members, from the blueprint. - - :param label: axis name - must be one of 'custom', 'energy', 'time', or 'polarization', as it's used for the - blueprint lookup. - :param index: which blueprint index to find a value in - :return an instance of CoordAxis1D - """ - self.logger.debug(f'Begin {label} axis construction from blueprint.') - - aug_axis = None - aug_error = None - if wcs is not None and wcs.axis is not None and wcs.axis.axis is not None: - aug_axis = wcs.axis.axis - aug_error = wcs.axis.error - else: - aug_axis_ctype = self._get_from_list(f'Chunk.{label}.axis.axis.ctype', index) - aug_axis_cunit = self._get_from_list(f'Chunk.{label}.axis.axis.cunit', index) - if aug_axis_ctype is not None: - aug_axis = Axis(aug_axis_ctype, aug_axis_cunit) - self.logger.debug(f'Creating {label} Axis for {self.uri} from blueprint') - - aug_error = self._two_param_constructor( - f'Chunk.{label}.axis.error.syser', - f'Chunk.{label}.axis.error.rnder', - index, _to_float, CoordError) - - aug_naxis = None - aug_range = self._try_range(index, label) - aug_naxis_index = None - if aug_axis is not None: - if aug_range is None: - if wcs is None or wcs.axis is None or wcs.axis.function is None: - aug_ref_coord = self._two_param_constructor( - f'Chunk.{label}.axis.function.refCoord.pix', - f'Chunk.{label}.axis.function.refCoord.val', - index, _to_float, RefCoord) - aug_delta = _to_float(self._get_from_list(f'Chunk.{label}.axis.function.delta', index)) - aug_length = _to_int(self._get_from_list(f'Chunk.{label}.axis.function.naxis', index)) - aug_function = None - if aug_length is not None and aug_delta is not None and aug_ref_coord is not None: - aug_function = CoordFunction1D(aug_length, aug_delta, aug_ref_coord) - aug_naxis = CoordAxis1D(aug_axis, aug_error, None, None, aug_function) - if aug_function is not None: - # if the WCS is described with a Function, cutouts can be supported, so specify an axis - aug_naxis_index = _to_int(self._get_from_list(f'Chunk.{label}Axis', index)) - self.logger.debug(f'Creating function {label} CoordAxis1D for {self.uri} from blueprint') - else: - aug_naxis = CoordAxis1D(axis=aug_axis, error=aug_error, range=aug_range) - self.logger.debug(f'Creating range {label} CoordAxis1D for {self.uri} from blueprint') - - self.logger.debug(f'End {label} axis construction from blueprint.') - return aug_naxis, aug_naxis_index - - def _get_observable(self, current): - """ - Create a Observable instance populated with available content - information. - :return: Observable - """ - self.logger.debug('Begin Observable augmentation.') - ucd = self._get_from_list( - 'Plane.observable.ucd', index=0, - current=None if current is None else current.ucd) - observable = Observable(ucd) if ucd else None - self.logger.debug('End Observable augmentation.') - return observable - - def _get_proposal(self, current): - """ - Create a Proposal instance populated with available content - information. - :return: Proposal - """ - self.logger.debug('Begin Proposal augmentation.') - prop_id = self._get_from_list( - 'Observation.proposal.id', index=0, - current=None if current is None else current.id) - pi = self._get_from_list( - 'Observation.proposal.pi', index=0, - current=None if current is None else current.pi_name) - project = self._get_from_list( - 'Observation.proposal.project', index=0, - current=None if current is None else current.project) - title = self._get_from_list( - 'Observation.proposal.title', index=0, - current=None if current is None else current.title) - keywords = self._get_set_from_list( - 'Observation.proposal.keywords', index=0) - proposal = current - if prop_id: - proposal = Proposal(str(prop_id), pi, project, title) - ContentParser._add_keywords(keywords, current, proposal) - self.logger.debug(f'End Proposal augmentation {prop_id}.') - return proposal - - def _get_provenance(self, current): - """ - Create a Provenance instance populated with available Content - information. - :return: Provenance - """ - self.logger.debug('Begin Provenance augmentation.') - name = _to_str( - self._get_from_list( - 'Plane.provenance.name', index=0, - current=None if current is None else current.name)) - p_version = _to_str(self._get_from_list( - 'Plane.provenance.version', index=0, - current=None if current is None else current.version)) - project = _to_str( - self._get_from_list( - 'Plane.provenance.project', index=0, - current=None if current is None else current.project)) - producer = _to_str( - self._get_from_list( - 'Plane.provenance.producer', index=0, - current=None if current is None else current.producer)) - run_id = _to_str( - self._get_from_list( - 'Plane.provenance.runID', index=0, - current=None if current is None else current.run_id)) - reference = _to_str( - self._get_from_list( - 'Plane.provenance.reference', index=0, - current=None if current is None else current.reference)) - last_executed = self._get_datetime( - self._get_from_list( - 'Plane.provenance.lastExecuted', index=0, - current=None if current is None else current.last_executed)) - keywords = self._get_set_from_list( - 'Plane.provenance.keywords', index=0) - inputs = self._get_set_from_list('Plane.provenance.inputs', index=0) - prov = None - if name: - prov = Provenance(name, p_version, project, producer, run_id, - reference, last_executed) - ContentParser._add_keywords(keywords, current, prov) - if inputs: - if isinstance(inputs, TypedSet): - for i in inputs: - prov.inputs.add(i) - else: - for i in inputs.split(): - prov.inputs.add(PlaneURI(str(i))) - else: - if current is not None and len(current.inputs) > 0: - # preserve the original value - prov.inputs.update(current.inputs) - self.logger.debug('End Provenance augmentation.') - return prov - - def _get_quality(self, current): - """ - Create a Quality instance populated with available content information. - :return: Quality - """ - self.logger.debug('Begin Quality augmentation.') - flag = self._get_from_list( - 'Plane.dataQuality', index=0, - current=None if current is None else current.flag) - quality = DataQuality(flag) if flag else None - self.logger.debug('End Quality augmentation.') - return quality - - def _get_requirements(self, current): - """ - Create a Requirements instance populated with available content - information. - :return: Requirements - """ - self.logger.debug('Begin Requirement augmentation.') - flag = self._get_from_list( - 'Observation.requirements.flag', index=0, - current=None if current is None else current.flag) - reqts = Requirements(flag) if flag else None - self.logger.debug('End Requirement augmentation.') - return reqts - - def _get_target(self, current): - """ - Create a Target instance populated with available content information. - :return: Target - """ - self.logger.debug('Begin Target augmentation.') - name = self._get_from_list( - 'Observation.target.name', index=0, - current=None if current is None else current.name) - target_type = self._get_from_list( - 'Observation.target.type', index=0, - current=None if current is None else current.target_type) - standard = self._cast_as_bool(self._get_from_list( - 'Observation.target.standard', index=0, - current=None if current is None else current.standard)) - redshift = self._get_from_list( - 'Observation.target.redshift', index=0, - current=None if current is None else current.redshift) - keywords = self._get_set_from_list( - 'Observation.target.keywords', index=0) - moving = self._cast_as_bool( - self._get_from_list( - 'Observation.target.moving', index=0, - current=None if current is None else current.moving)) - target_id = _to_str(self._get_from_list( - 'Observation.target.targetID', index=0, - current=None if current is None else current.target_id)) - target = None - if name: - target = Target(str(name), target_type, standard, redshift, - moving=moving, target_id=target_id) - ContentParser._add_keywords(keywords, current, target) - self.logger.debug('End Target augmentation.') - return target - - def _get_target_position(self, current): - """ - Create a Target Position instance populated with available content - information. - :return: Target Position - """ - self.logger.debug('Begin CAOM2 TargetPosition augmentation.') - x = self._get_from_list( - 'Observation.target_position.point.cval1', index=0, - current=None if current is None else current.coordinates.cval1) - y = self._get_from_list( - 'Observation.target_position.point.cval2', index=0, - current=None if current is None else current.coordinates.cval2) - coordsys = self._get_from_list( - 'Observation.target_position.coordsys', index=0, - current=None if current is None else current.coordsys) - equinox = self._get_from_list( - 'Observation.target_position.equinox', index=0, - current=None if current is None else current.equinox) - aug_target_position = None - if x and y: - aug_point = Point(x, y) - aug_target_position = TargetPosition(aug_point, coordsys) - aug_target_position.equinox = _to_float(equinox) - self.logger.debug('End CAOM2 TargetPosition augmentation.') - return aug_target_position - - def _get_telescope(self, current): - """ - Create a Telescope instance populated with available content - information. - :return: Telescope - """ - self.logger.debug('Begin Telescope augmentation.') - name = self._get_from_list( - 'Observation.telescope.name', index=0, - current=None if current is None else current.name) - geo_x = _to_float( - self._get_from_list( - 'Observation.telescope.geoLocationX', index=0, - current=None if current is None else current.geo_location_x)) - geo_y = _to_float( - self._get_from_list( - 'Observation.telescope.geoLocationY', index=0, - current=None if current is None else current.geo_location_y)) - geo_z = _to_float( - self._get_from_list( - 'Observation.telescope.geoLocationZ', index=0, - current=None if current is None else current.geo_location_z)) - keywords = self._get_set_from_list( - 'Observation.telescope.keywords', index=0) - aug_tel = None - if name: - aug_tel = Telescope(str(name), geo_x, geo_y, geo_z) - ContentParser._add_keywords(keywords, current, aug_tel) - self.logger.debug('End Telescope augmentation.') - return aug_tel - - def _cast_as_bool(self, from_value): - """ - Make lower case Java booleans into capitalized python booleans. - :param from_value: Something that represents a boolean value - :return: a python boolean value - """ - if isinstance(from_value, bool): - return from_value - result = None - # so far, these are the only options that are coming in from the - # config files - may need to add more as more types are experienced - if from_value == 'false': - result = False - elif from_value == 'true': - result = True - return result - - def _try_custom_with_blueprint(self, chunk, index): - """ - A mechanism to augment the Custom WCS completely from the blueprint. Do nothing if the WCS information cannot - be correctly created. - - :param chunk: The chunk to modify with the addition of custom information. - :param index: The index in the blueprint for looking up plan information. - """ - self.logger.debug('Begin augmentation with blueprint for custom.') - aug_naxis, aug_naxis_index = self._get_axis_wcs('custom', chunk.custom, index) - if aug_naxis is None: - self.logger.debug('No blueprint custom information.') - else: - # always create a new CustomWCS instance because there's no setter for 'axis' parameter - chunk.custom = CustomWCS(aug_naxis) - chunk.custom_axis = aug_naxis_index - self.logger.debug(f'Updating CustomWCS for {self.uri}.') - self.logger.debug('End augmentation with blueprint for custom.') - - def _try_energy_with_blueprint(self, chunk, index): - """ - A mechanism to augment the Energy WCS completely from the blueprint. - Do nothing if the WCS information cannot be correctly created. - - :param chunk: The chunk to modify with the addition of energy - information. - :param index: The index in the blueprint for looking up plan - information. - """ - self.logger.debug('Begin augmentation with blueprint for energy.') - aug_axis, aug_naxis_index = self._get_axis_wcs('energy', chunk.energy, index) - specsys = _to_str(self._get_from_list('Chunk.energy.specsys', index)) - if aug_axis is None: - self.logger.debug('No blueprint energy information.') - else: - if chunk.energy: - chunk.energy.axis = aug_axis - chunk.energy.specsys = specsys - else: - chunk.energy = SpectralWCS(aug_axis, specsys) - self.logger.debug(f'Creating SpectralWCS for {self.uri} from blueprint') - chunk.energy_axis = aug_naxis_index - - if chunk.energy: - chunk.energy.ssysobs = self._get_from_list('Chunk.energy.ssysobs', index, chunk.energy.ssysobs) - chunk.energy.restfrq = self._get_from_list('Chunk.energy.restfrq', index, chunk.energy.restfrq) - chunk.energy.restwav = self._get_from_list('Chunk.energy.restwav', index, chunk.energy.restwav) - chunk.energy.velosys = self._get_from_list('Chunk.energy.velosys', index, chunk.energy.velosys) - chunk.energy.zsource = self._get_from_list('Chunk.energy.zsource', index, chunk.energy.zsource) - chunk.energy.ssyssrc = self._get_from_list('Chunk.energy.ssyssrc', index, chunk.energy.ssyssrc) - chunk.energy.velang = self._get_from_list('Chunk.energy.velang', index, chunk.energy.velang) - chunk.energy.bandpass_name = self._get_from_list( - 'Chunk.energy.bandpassName', index, chunk.energy.bandpass_name) - chunk.energy.transition = self._get_energy_transition(chunk.energy.transition) - chunk.energy.resolving_power = _to_float( - self._get_from_list('Chunk.energy.resolvingPower', index, chunk.energy.resolving_power)) - self.logger.debug('End augmentation with blueprint for energy.') - - def _try_observable_with_blueprint(self, chunk, index): - """ - A mechanism to augment the Observable WCS completely from the - blueprint. Do nothing if the WCS information cannot be correctly - created. - - :param chunk: The chunk to modify with the addition of observable - information. - :param index: The index in the blueprint for looking up plan - information. - """ - self.logger.debug('Begin augmentation with blueprint for ' - 'observable.') - aug_axis = self._two_param_constructor( - 'Chunk.observable.dependent.axis.ctype', - 'Chunk.observable.dependent.axis.cunit', index, _to_str, Axis) - aug_bin = _to_int( - self._get_from_list('Chunk.observable.dependent.bin', index)) - if aug_axis is not None and aug_bin is not None: - chunk.observable = ObservableAxis(Slice(aug_axis, aug_bin)) - chunk.observable_axis = _to_int(self._get_from_list('Chunk.observableAxis', index)) - self.logger.debug('End augmentation with blueprint for polarization.') - - def _try_polarization_with_blueprint(self, chunk, index): - """ - A mechanism to augment the Polarization WCS completely from the - blueprint. Do nothing if the WCS information cannot be correctly - created. - - :param chunk: The chunk to modify with the addition of polarization - information. - :param index: The index in the blueprint for looking up plan - information. - """ - self.logger.debug('Begin augmentation with blueprint for ' - 'polarization.') - aug_axis, aug_naxis_index = self._get_axis_wcs('polarization', chunk.polarization, index) - if aug_axis is not None: - if chunk.polarization: - chunk.polarization.axis = aug_axis - else: - chunk.polarization = PolarizationWCS(aug_axis) - self.logger.debug(f'Creating PolarizationWCS for {self.uri} from blueprint') - chunk.polarization_axis = aug_naxis_index - - self.logger.debug('End augmentation with blueprint for polarization.') - - def _try_position_range(self, index): - self.logger.debug('Try to set the range for position from blueprint, since there is no function') - aug_range = None - aug_range_c1_start = self._two_param_constructor( - 'Chunk.position.axis.range.start.coord1.pix', - 'Chunk.position.axis.range.start.coord1.val', - index, _to_float, RefCoord) - aug_range_c1_end = self._two_param_constructor( - 'Chunk.position.axis.range.end.coord1.pix', - 'Chunk.position.axis.range.end.coord1.val', - index, _to_float, RefCoord) - aug_range_c2_start = self._two_param_constructor( - 'Chunk.position.axis.range.start.coord2.pix', - 'Chunk.position.axis.range.start.coord2.val', - index, _to_float, RefCoord) - aug_range_c2_end = self._two_param_constructor( - 'Chunk.position.axis.range.end.coord2.pix', - 'Chunk.position.axis.range.end.coord2.val', - index, _to_float, RefCoord) - if (aug_range_c1_start and aug_range_c1_end and aug_range_c2_start - and aug_range_c2_end): - aug_range = CoordRange2D( - Coord2D(aug_range_c1_start, aug_range_c1_end), - Coord2D(aug_range_c2_start, aug_range_c2_end)) - self.logger.debug('Completed setting range for position') - return aug_range - - def _try_position_with_blueprint(self, chunk, index): - """ - A mechanism to augment the Position WCS completely from the blueprint. - Do nothing if the WCS information cannot be correctly created. - - :param chunk: The chunk to modify with the addition of position - information. - :param index: The index in the blueprint for looking up plan - information. - """ - self.logger.debug('Begin augmentation with blueprint for position.') - aug_axis = None - if (chunk.position is not None and chunk.position.axis is not None and chunk.position.axis.axis1 is not None - and chunk.position.axis.axis2 is not None): - # preserve the values obtained from file data - aug_x_axis = chunk.position.axis.axis1 - aug_y_axis = chunk.position.axis.axis2 - aug_x_error = chunk.position.axis.error1 - aug_y_error = chunk.position.axis.error2 - else: - aug_x_axis = self._two_param_constructor( - 'Chunk.position.axis.axis1.ctype', - 'Chunk.position.axis.axis1.cunit', index, _to_str, Axis) - aug_y_axis = self._two_param_constructor( - 'Chunk.position.axis.axis2.ctype', - 'Chunk.position.axis.axis2.cunit', index, _to_str, Axis) - aug_x_error = self._two_param_constructor( - 'Chunk.position.axis.error1.syser', - 'Chunk.position.axis.error1.rnder', index, _to_float, CoordError) - aug_y_error = self._two_param_constructor( - 'Chunk.position.axis.error2.syser', - 'Chunk.position.axis.error2.rnder', index, _to_float, CoordError) - aug_range = self._try_position_range(index) - if aug_range is None: - if chunk.position is None or chunk.position.axis is None or chunk.position.axis.function is None: - aug_dimension = self._two_param_constructor( - 'Chunk.position.axis.function.dimension.naxis1', - 'Chunk.position.axis.function.dimension.naxis2', - index, _to_int, Dimension2D) - aug_x_ref_coord = self._two_param_constructor( - 'Chunk.position.axis.function.refCoord.coord1.pix', - 'Chunk.position.axis.function.refCoord.coord1.val', - index, _to_float, RefCoord) - aug_y_ref_coord = self._two_param_constructor( - 'Chunk.position.axis.function.refCoord.coord2.pix', - 'Chunk.position.axis.function.refCoord.coord2.val', - index, _to_float, RefCoord) - aug_cd11 = _to_float(self._get_from_list('Chunk.position.axis.function.cd11', index)) - aug_cd12 = _to_float(self._get_from_list('Chunk.position.axis.function.cd12', index)) - aug_cd21 = _to_float(self._get_from_list('Chunk.position.axis.function.cd21', index)) - aug_cd22 = _to_float(self._get_from_list('Chunk.position.axis.function.cd22', index)) - - aug_ref_coord = None - if aug_x_ref_coord is not None and aug_y_ref_coord is not None: - aug_ref_coord = Coord2D(aug_x_ref_coord, aug_y_ref_coord) - self.logger.debug(f'Creating position Coord2D for {self.uri}') - - aug_function = None - if (aug_dimension is not None and aug_ref_coord is not None and - aug_cd11 is not None and aug_cd12 is not None and - aug_cd21 is not None and aug_cd22 is not None): - aug_function = CoordFunction2D(aug_dimension, aug_ref_coord, aug_cd11, aug_cd12, aug_cd21, - aug_cd22) - self.logger.debug(f'Creating position CoordFunction2D for {self.uri}') - - if (aug_x_axis is not None and aug_y_axis is not None and - aug_function is not None): - aug_axis = CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, - aug_y_error, None, None, aug_function) - self.logger.debug(f'Creating position CoordAxis2D for {self.uri}') - - chunk.position_axis_1 = _to_int(self._get_from_list('Chunk.positionAxis1', index)) - chunk.position_axis_2 = _to_int(self._get_from_list('Chunk.positionAxis2', index)) - else: - aug_axis = CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, aug_y_error, range=aug_range) - - if aug_axis is not None: - if chunk.position: - chunk.position.axis = aug_axis - else: - chunk.position = SpatialWCS(aug_axis) - self.logger.debug(f'Creating SpatialWCS for {self.uri} from blueprint') - - if chunk.position: - chunk.position.coordsys = self._get_from_list('Chunk.position.coordsys', index, chunk.position.coordsys) - chunk.position.equinox = _to_float(self._get_from_list( - 'Chunk.position.equinox', index, chunk.position.equinox)) - chunk.position.resolution = self._get_from_list( - 'Chunk.position.resolution', index, chunk.position.resolution) - self.logger.debug('End augmentation with blueprint for position.') - - def _try_range(self, index, lookup): - self.logger.debug(f'Try to set the range for {lookup}') - result = None - aug_range_start = self._two_param_constructor( - f'Chunk.{lookup}.axis.range.start.pix', - f'Chunk.{lookup}.axis.range.start.val', - index, _to_float, RefCoord) - aug_range_end = self._two_param_constructor( - f'Chunk.{lookup}.axis.range.end.pix', - f'Chunk.{lookup}.axis.range.end.val', - index, _to_float, RefCoord) - if aug_range_start and aug_range_end: - result = CoordRange1D(aug_range_start, aug_range_end) - self.logger.debug(f'Completed setting range with return for {lookup}') - return result - - def _try_time_with_blueprint(self, chunk, index): - """ - A mechanism to augment the Time WCS completely from the blueprint. - Do nothing if the WCS information cannot be correctly created. - - :param chunk: The chunk to modify with the addition of time - information. - :param index: The index in the blueprint for looking up plan - information. - """ - self.logger.debug('Begin augmentation with blueprint for temporal.') - - aug_axis, aug_axis_index = self._get_axis_wcs('time', chunk.time, index) - if aug_axis is not None: - if chunk.time: - chunk.time.axis = aug_axis - else: - chunk.time = TemporalWCS(aug_axis) - self.logger.debug(f'Creating TemporalWCS for {self.uri} from blueprint') - chunk.time_axis = aug_axis_index - - if chunk.time: - chunk.time.exposure = _to_float(self._get_from_list('Chunk.time.exposure', index, chunk.time.exposure)) - chunk.time.resolution = _to_float( - self._get_from_list('Chunk.time.resolution', index, chunk.time.resolution)) - chunk.time.timesys = _to_str(self._get_from_list('Chunk.time.timesys', index, chunk.time.timesys)) - chunk.time.trefpos = self._get_from_list('Chunk.time.trefpos', index, chunk.time.trefpos) - chunk.time.mjdref = self._get_from_list('Chunk.time.mjdref', index, chunk.time.mjdref) - - self.logger.debug('End augmentation with blueprint for temporal.') - - def _two_param_constructor(self, lookup1, lookup2, index, to_type, ctor): - """ - Helper function to build from the blueprint, a CAOM2 entity that - has two required parameters. - - :param lookup1: Blueprint lookup text for the first constructor - parameter. - :param lookup2: Blueprint lookup text for the second constructor - parameter. - :param index: Which index in the blueprint to do the lookup on. - :param to_type: Function to cast the blueprint value to a particular - type. - :param ctor: The constructor that has two parameters to build. - :return: The instance returned by the constructor, or None if any of - the values are undefined. - """ - param1 = to_type(self._get_from_list(lookup1, index)) - param2 = to_type(self._get_from_list(lookup2, index)) - new_object = None - if param1 is not None and param2 is not None: - new_object = ctor(param1, param2) - return new_object - - # TODO - is this the right implementation? - def add_parts(self, artifact, index=0): - result = False - if self.blueprint.has_chunk(index): - artifact.parts.add(Part(str(index))) - result = True - return result - - @staticmethod - def _add_keywords(keywords, current, to_set): - """ - Common code for adding keywords to a CAOM2 entity, capturing all - the weird metadata cases that happen at CADC. - - :param keywords: Keywords to add to a CAOM2 set. - :param current: Existing CAOM2 entity with a keywords attribute. - :param to_set: A CAOM2 entity with a keywords attribute. - """ - if keywords: - if isinstance(keywords, set): - to_set.keywords.update(keywords) - else: - for k in keywords.split(): - to_set.keywords.add(k) - else: - if current is not None: - # preserve the original value - to_set.keywords.update(current.keywords) - if to_set.keywords is not None and None in to_set.keywords: - to_set.keywords.remove(None) - if to_set.keywords is not None and 'none' in to_set.keywords: - to_set.keywords.remove('none') - - -class FitsParser(ContentParser): - """ - Parses a FITS file and extracts the CAOM2 related information which can - be used to augment an existing CAOM2 observation, plane or artifact. The - constructor takes either a FITS file as argument or a list of dictionaries - (FITS keyword=value) corresponding to each extension. - - The WCS-related keywords of the FITS file are consumed by the astropy.wcs - package which might display warnings with regards to compliance. - - Example 1: - parser = FitsParser(input = '/staging/700000o.fits.gz') - ... - # customize parser.headers by deleting, changing or adding attributes - - obs = Observation(collection='TEST', observation_id='700000', - algorithm='exposure') - plane = Plane(plane_id='700000-1') - obs.plane.add(plane) - - artifact = Artifact(uri='ad:CFHT/700000o.fits.gz', product_type='science', - release_type='data') - plane.artifacts.add(artifact) - - parser.augment_observation(obs) - - # further update obs - - - Example 2: - - headers = [] # list of dictionaries headers - # populate headers - parser = FitsParser(input=headers) - - parser.augment_observation(obs) - ... - - """ - - def __init__(self, src, obs_blueprint=None, uri=None): - """ - Ctor - :param src: List of headers (dictionary of FITS keywords:value) with - one header for each extension or a FITS input file. - :param obs_blueprint: externally provided blueprint - :param uri: which artifact augmentation is based on - """ - self.logger = logging.getLogger(__name__) - self._headers = [] - self.parts = 0 - self.file = '' - if isinstance(src, list): - # assume this is the list of headers - self._headers = src - else: - # assume file - self.file = src - self._headers = data_util.get_local_headers_from_fits(self.file) - if obs_blueprint: - self._blueprint = obs_blueprint - else: - self._blueprint = ObsBlueprint() - self._errors = [] - # for command-line parameter to module execution - self.uri = uri - self.apply_blueprint() - - @property - def headers(self): - """ - List of headers where each header should allow dictionary like - access to the FITS attribute in that header - :return: - """ - return self._headers - - def add_parts(self, artifact, index): - # there is one Part per extension, the name is the extension number - if ( - FitsParser._has_data_array(self._headers[index]) - and self.blueprint.has_chunk(index) - ): - if str(index) not in artifact.parts.keys(): - # TODO use extension name? - artifact.parts.add(Part(str(index))) - self.logger.debug(f'Part created for HDU {index}.') - result = True - else: - artifact.parts.add(Part(str(index))) - self.logger.debug(f'Create empty part for HDU {index}') - result = False - return result - - def apply_blueprint(self): - - # pointers that are short to type - exts = self.blueprint._extensions - wcs_std = self.blueprint._wcs_std - plan = self.blueprint._plan - - # firstly, apply the functions - if (self.blueprint._module is not None or - self.blueprint._module_instance is not None): - for key, value in plan.items(): - if ObsBlueprint.is_function(value): - if self._blueprint._module_instance is None: - plan[key] = self._execute_external(value, key, 0) - else: - plan[key] = self._execute_external_instance( - value, key, 0) - for extension in exts: - for key, value in exts[extension].items(): - if ObsBlueprint.is_function(value): - if self._blueprint._module_instance is None: - exts[extension][key] = self._execute_external( - value, key, extension) - else: - exts[extension][key] = \ - self._execute_external_instance( - value, key, extension) - - # apply overrides from blueprint to all extensions - for key, value in plan.items(): - if key in wcs_std: - if ObsBlueprint.needs_lookup(value): - # alternative attributes provided for standard wcs attrib. - for header in self.headers: - for v in value[0]: - if v in header and \ - v not in wcs_std[key].split(','): - keywords = wcs_std[key].split(',') - for keyword in keywords: - _set_by_type(header, keyword, - str(header[v])) - elif ObsBlueprint.is_function(value): - continue - elif ObsBlueprint.has_no_value(value): - continue - else: - # value provided for standard wcs attribute - if ObsBlueprint.needs_lookup(wcs_std[key]): - keywords = wcs_std[key][0] - elif ObsBlueprint.is_function(wcs_std[key]): - continue - else: - keywords = wcs_std[key].split(',') - for keyword in keywords: - for header in self.headers: - _set_by_type(header, keyword, str(value)) - - # apply overrides to the remaining extensions - for extension in exts: - if extension >= len(self.headers): - logging.error('More extensions configured {} than headers ' - '{}'.format(extension, len(self.headers))) - continue - hdr = self.headers[extension] - for key, value in exts[extension].items(): - if ObsBlueprint.is_table(value): - continue - keywords = wcs_std[key].split(',') - for keyword in keywords: - _set_by_type(hdr, keyword, value) - logging.debug( - '{}: set to {} in extension {}'.format(keyword, value, - extension)) - # apply defaults to all extensions - for key, value in plan.items(): - if ObsBlueprint.has_default_value(value): - for index, header in enumerate(self.headers): - for keywords in value[0]: - for keyword in keywords.split(','): - if (not header.get(keyword.strip()) and - keyword == keywords and # checking a string - keywords == value[0][-1]): # last item - # apply a default if a value does not already - # exist, and all possible values of - # keywords have been checked - _set_by_type(header, keyword.strip(), value[1]) - logging.debug( - '{}: set default value of {} in HDU {}.'. - format(keyword, value[1], index)) - - # TODO wcs in astropy ignores cdelt attributes when it finds a cd - # attribute even if it's in a different axis - for header in self.headers: - cd_present = False - for i in range(1, 6): - if 'CD{0}_{0}'.format(i) in header: - cd_present = True - break - if cd_present: - for i in range(1, 6): - if f'CDELT{i}' in header and \ - 'CD{0}_{0}'.format(i) not in header: - header['CD{0}_{0}'.format(i)] = \ - header[f'CDELT{i}'] - - # TODO When a projection is specified, wcslib expects corresponding - # DP arguments with NAXES attributes. Normally, omitting the attribute - # signals no distortion which is the assumption in caom2blueprint for - # energy and polarization axes. Following is a workaround for - # SIP projections. - # For more details see: - # http://www.atnf.csiro.au/people/mcalabre/WCS/dcs_20040422.pdf - for header in self.headers: - sip = False - for i in range(1, 6): - if ((f'CTYPE{i}' in header) and - isinstance(header[f'CTYPE{i}'], str) and - ('-SIP' in header[f'CTYPE{i}'])): - sip = True - break - if sip: - for i in range(1, 6): - if (f'CTYPE{i}' in header) and \ - ('-SIP' not in header[f'CTYPE{i}']) and \ - (f'DP{i}' not in header): - header[f'DP{i}'] = 'NAXES: 1' - - return - - def augment_artifact(self, artifact, index=0): - """ - Augments a given CAOM2 artifact with available FITS information - :param artifact: existing CAOM2 artifact to be augmented - """ - self.logger.debug( - 'Begin artifact augmentation for {} with {} HDUs.'.format( - artifact.uri, len(self.headers))) - - if self.blueprint.get_configed_axes_count() == 0: - raise TypeError( - 'No WCS Data. End artifact augmentation for {}.'.format( - artifact.uri)) - - for i, header in enumerate(self.headers): - if not self.add_parts(artifact, i): - # artifact-level attributes still require updating - BlueprintParser.augment_artifact(self, artifact, 0) - continue - self._wcs_parser = FitsWcsParser(header, self.file, str(i)) - super().augment_artifact(artifact, i) - - self.logger.debug( - f'End artifact augmentation for {artifact.uri}.') - - def _get_chunk_naxis(self, chunk, index=None): - # NOTE: astropy.wcs does not distinguished between WCS axes and - # data array axes. naxis in astropy.wcs represents in fact the - # number of WCS axes, whereas chunk.axis represents the naxis - # of the data array. Solution is to determine it directly from - # the header - if 'ZNAXIS' in self._headers[index]: - chunk.naxis = _to_int(self._headers[index]['ZNAXIS']) - elif 'NAXIS' in self._headers[index]: - chunk.naxis = _to_int(self._headers[index]['NAXIS']) - else: - super()._get_chunk_naxis(chunk) - - def _get_from_list(self, lookup, index, current=None): - value = None - try: - keys = self.blueprint._get(lookup) - except KeyError: - self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug( - f'Could not find {lookup!r} in caom2blueprint configuration.') - if current: - self.logger.debug( - f'{lookup}: using current value of {current!r}.') - value = current - return value - - if ObsBlueprint.needs_lookup(keys): - for ii in keys[0]: - try: - value = self.headers[index].get(ii) - if value: - self.logger.debug( - f'{lookup}: assigned value {value} based on ' - f'keyword {ii}.') - break - except (KeyError, IndexError): - if keys[0].index(ii) == len(keys[0]) - 1: - self.add_error(lookup, sys.exc_info()[1]) - # assign a default value, if one exists - if keys[1]: - if current is None: - value = keys[1] - self.logger.debug( - f'{lookup}: assigned default value {value}.') - else: - value = current - if value is None: - # checking current does not work in the general case, - # because current might legitimately be 'None' - if self._blueprint.update: - if ( - current is not None - or (current is None and isinstance(value, bool)) - ): - value = current - self.logger.debug( - f'{lookup}: used current value {value}.') - else: - # assign a default value, if one exists - if keys[1]: - if current is None: - value = keys[1] - self.logger.debug( - f'{lookup}: assigned default value {value}.') - else: - value = current - - elif (keys is not None) and (keys != ''): - if keys == 'None': - value = None - else: - value = keys - elif current: - value = current - - self.logger.debug(f'{lookup}: value is {value}') - return value - - def _get_from_table(self, lookup, extension): - """ - Return a space-delimited list of all the row values from a column. - - This is a straight FITS BINTABLE lookup. There is no support for - default values. Unless someone provides a compelling use case. - - :param lookup: where to find the column name - :param extension: which extension - :return: A string, which is a space-delimited list of all the values. - """ - value = '' - try: - keywords = self.blueprint._get(lookup, extension) - except KeyError as e: - self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug( - 'Could not find {!r} in fits2caom2 configuration.'.format( - lookup)) - raise e - - if isinstance(keywords, tuple) and keywords[0] == 'BINTABLE': - - # BINTABLE, so need to retrieve the data from the file - if self.file is not None and self.file != '': - with fits.open(self.file) as fits_data: - if fits_data[extension].header['XTENSION'] != 'BINTABLE': - raise ValueError( - 'Got {} when looking for a BINTABLE ' - 'extension.'.format( - fits_data[extension].header['XTENSION'])) - for ii in keywords[1]: - for jj in fits_data[extension].data[keywords[2]][ii]: - value = f'{jj} {value}' - - self.logger.debug(f'{lookup}: value is {value}') - return value - - def _get_set_from_list(self, lookup, index): - value = None - keywords = None - try: - keywords = self.blueprint._get(lookup) - except KeyError: - self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug(f'Could not find \'{lookup}\' in caom2blueprint ' - f'configuration.') - - if isinstance(keywords, tuple): - for ii in keywords[0]: - try: - value = self.headers[index].get(ii) - break - except KeyError: - self.add_error(lookup, sys.exc_info()[1]) - if keywords[1]: - value = keywords[1] - self.logger.debug( - '{}: assigned default value {}.'.format(lookup, - value)) - elif keywords: - value = keywords - self.logger.debug(f'{lookup}: assigned value {value}.') - - return value - - @staticmethod - def _has_data_array(header): - """ - - :param header: - :return: - """ - naxis = 0 - if 'ZNAXIS' in header: - naxis = _to_int(header['ZNAXIS']) - elif 'NAXIS' in header: - naxis = _to_int(header['NAXIS']) - if not naxis: - return False - - data_axes = 0 - for i in range(1, naxis + 1): - axis = f'NAXIS{i}' - if axis in header: - data_axis = _to_int(header[axis]) - if not data_axes: - data_axes = data_axis - else: - data_axes = data_axes * data_axis - if not data_axes: - return False - - bitpix = 0 - if 'BITPIX' in header: - bitpix = _to_int(header['BITPIX']) - if not bitpix: - return False - return True - - -class Hdf5Parser(ContentParser): - """ - Parses an HDF5 file and extracts the CAOM2 related information which - can be used to augment an existing CAOM2 observation, plane, or artifact. - - If there is per-Chunk metadata in the file, the constructor parameter - 'find_roots_here' is the address location in the file where the N Chunk - metadata starts. - - The WCS-related keywords of the HDF5 files are used to create instances of - astropy.wcs.WCS so that verify might be called. - - There is no CADC support for the equivalent of the FITS --fhead parameter - for HDF5 files, which is why the name of the file on a local disk is - required. - - How the classes work together for HDF5 files: - - build an HDF5ObsBlueprint, with _CAOM2_ELEMENT keys, and HDF5 metadata - path names as keys - - cache the metadata from an HDF5 file in the HDF5ObsBlueprint. This - caching is done in the "apply_blueprint_from_file" method in the - Hdf5Parser class, and replaces the path names in the blueprint with - the values from the HDF5 file. The caching is done so that all HDF5 - file access is isolated to one point in time. - - use the cached metadata to build astropy.wcs instances for verification - in Hdf5WcsParser. - - use the astropy.wcs instance and other blueprint metadata to fill the - CAOM2 record. - """ - - def __init__( - self, obs_blueprint, uri, h5_file, find_roots_here='sitedata' - ): - """ - :param obs_blueprint: Hdf5ObsBlueprint instance - :param uri: which artifact augmentation is based on - :param h5_file: h5py file handle - :param find_roots_here: str location where Chunk metadata starts - """ - self._file = h5_file - # where N Chunk metadata starts - self._find_roots_here = find_roots_here - # the length of the array is the number of Parts in an HDF5 file, - # and the values are HDF5 lookup path names. - self._extension_names = [] - super().__init__(obs_blueprint, uri) - # used to set the astropy wcs info, resulting in a validated wcs - # that can be used to construct a valid CAOM2 record - self._wcs_parser = None - - def apply_blueprint_from_file(self): - """ - Retrieve metadata from file, cache in the blueprint. - """ - self.logger.debug('Begin apply_blueprint_from_file') - # h5py is an extra in this package since most collections do not - # require it - import h5py - individual, multi, attributes = self._extract_path_names_from_blueprint() - filtered_individual = [ii for ii in individual.keys() if '(' in ii] - - def _extract_from_item(name, object): - """ - Function signature dictated by h5py visititems implementation. - Executed for each dataset/group in an HDF5 file. - - :param name: fully-qualified HDF5 path name - :param object: what the HDF5 path name points to - """ - if name == self._find_roots_here: - for ii, path_name in enumerate(object.keys()): - # store the names and locations of the Part/Chunk metadata - temp = f'{name}/{path_name}' - self.logger.debug(f'Adding extension {temp}') - self._extension_names.append(temp) - self._blueprint._extensions[ii] = {} - - # If it's the Part/Chunk metadata, capture it to extensions. - # Syntax of the keys described in Hdf5ObsBlueprint class. - for part_index, part_name in enumerate(self._extension_names): - if ( - name.startswith(part_name) - and isinstance(object, h5py.Dataset) - and object.dtype.names is not None - ): - for d_name in object.dtype.names: - temp_path = f'{name.replace(part_name, "")}/{d_name}' - for path_name in multi.keys(): - if path_name == temp_path: - for jj in multi.get(path_name): - self._blueprint.set( - jj, object[d_name], part_index - ) - elif (path_name.startswith(temp_path) - and '(' in path_name): - z = path_name.split('(') - if ':' in z[1]: - a = z[1].split(')')[0].split(':') - if len(a) > 2: - raise NotImplementedError - for jj in multi.get(path_name): - self._blueprint.set( - jj, - object[d_name][int(a[0])][ - int(a[1])], - part_index, - ) - else: - index = int(z[1].split(')')[0]) - for jj in multi.get(path_name): - self._blueprint.set( - jj, - object[d_name][index], - part_index, - ) - - # if it's Observation/Plane/Artifact metadata, capture it to - # the base blueprint - if isinstance(object, h5py.Dataset): - if object.dtype.names is not None: - for d_name in object.dtype.names: - temp = f'//{name}/{d_name}' - if temp in individual.keys(): - for jj in individual.get(temp): - self._blueprint.set(jj, object[d_name], 0) - else: - for ind_path in filtered_individual: - if ind_path.startswith(temp): - z = ind_path.split('(') - index = int(z[1].split(')')[0]) - for jj in individual.get(ind_path): - self._blueprint.set(jj, object[d_name][index], 0) - - if len(individual) == 0 and len(multi) == 0: - # CFHT SITELLE - self.logger.debug(f'attrs for {self.uri}') - self._extract_from_attrs(attributes) - else: - # TAOSII - self.logger.debug(f'visititems for {self.uri}') - self._file.visititems(_extract_from_item) - self.logger.debug('Done apply_blueprint_from_file') - - def _extract_from_attrs(self, attributes): - # I don't currently see any way to have more than one Part, if relying on - # attrs for metadata - part_index = 0 - # v == list of blueprint keys - for k, v in attributes.items(): - if k in self._file.attrs: - value = self._file.attrs[k] - for entry in v: - self._blueprint.set(entry, value, part_index) - - def _extract_path_names_from_blueprint(self): - """ - :return: individual - a dictionary of lists, keys are unique path names for finding metadata once per file. - Values are _CAOM2_ELEMENT strings. - multiple - a dictionary of lists, keys are unique path names for finding metadata N times per file. Values - are _CAOM2_ELEMENT strings. - attributes - a dictionary of lists, keys reference expected content from the h5py.File().attrs data - structure and its keys. - """ - individual = defaultdict(list) - multi = defaultdict(list) - attributes = defaultdict(list) - for key, value in self._blueprint._plan.items(): - if ObsBlueprint.needs_lookup(value): - for ii in value[0]: - if ii.startswith('//'): - individual[ii].append(key) - elif ii.startswith('/'): - multi[ii].append(key) - else: - attributes[ii].append(key) - return individual, multi, attributes - - def apply_blueprint(self): - self.logger.debug('Begin apply_blueprint') - self.apply_blueprint_from_file() - - # after the apply_blueprint_from_file call, all the metadata from the - # file has been applied to the blueprint, so now do the bits that - # require no access to file content - - # pointers that are short to type - exts = self._blueprint._extensions - plan = self._blueprint._plan - - # apply the functions - if (self._blueprint._module is not None or - self._blueprint._module_instance is not None): - for key, value in plan.items(): - if ObsBlueprint.is_function(value): - if self._blueprint._module_instance is None: - plan[key] = self._execute_external(value, key, 0) - else: - plan[key] = self._execute_external_instance( - value, key, 0) - for extension in exts: - for key, value in exts[extension].items(): - if ObsBlueprint.is_function(value): - if self._blueprint._module_instance is None: - exts[extension][key] = self._execute_external( - value, key, extension) - else: - exts[extension][key] = \ - self._execute_external_instance( - value, key, extension) - - # blueprint already contains all the overrides, only need to make - # sure the overrides get applied to all the extensions - for extension in exts: - for key, value in exts[extension].items(): - if ( - ObsBlueprint.is_table(value) - # already been looked up - or ObsBlueprint.needs_lookup(value) - # already been executed - or ObsBlueprint.is_function(value) - # nothing to assign - or ObsBlueprint.has_no_value(value) - ): - continue - exts[extension][key] = value - self.logger.debug( - f'{key}: set to {value} in extension {extension}') - - # if no values have been set by file lookups, function execution, - # or applying overrides, apply defaults, including to all extensions - for key, value in plan.items(): - if ObsBlueprint.needs_lookup(value) and value[1]: - # there is a default value in the blueprint that can be used - for extension in exts: - q = exts[extension].get(key) - if q is None: - exts[extension][key] = value[1] - self.logger.debug( - f'Add {key} and assign default value of ' - f'{value[1]} in extension {extension}.') - elif ObsBlueprint.needs_lookup(value): - exts[extension][key] = value[1] - self.logger.debug( - f'{key}: set value to default of {value[1]} in ' - f'extension {extension}.') - plan[key] = value[1] - self.logger.debug(f'{key}: set value to default of {value[1]}') - - self.logger.debug('Done apply_blueprint') - return - - def augment_artifact(self, artifact, index=0): - self._wcs_parser = Hdf5WcsParser(self._blueprint, 0) - super().augment_artifact(artifact, 0) - for ii in range(1, len(self._blueprint._extensions)): - self._wcs_parser = Hdf5WcsParser(self._blueprint, ii) - super().augment_artifact(artifact, ii) - - def _get_chunk_naxis(self, chunk, index): - chunk.naxis = self._get_from_list('Chunk.naxis', index, chunk.naxis) - - def add_parts(self, artifact, index=0): - artifact.parts.add(Part(str(index))) - return True - - -class WcsParser: - """ - WCS axes methods. - """ - - ENERGY_AXIS = 'energy' - POLARIZATION_AXIS = 'polarization' - TIME_AXIS = 'time' - - def __init__(self, blueprint, extension): - self._wcs = None - self.wcs = None - self._blueprint = blueprint - self._axes = { - 'ra': [0, False], - 'dec': [0, False], - 'time': [0, False], - 'energy': [0, False], - 'polarization': [0, False], - 'observable': [0, False], - 'custom': [0, False], - } - # int - index into blueprint._plan extensions - self._extension = extension - self.logger = logging.getLogger(self.__class__.__name__) - self._set_wcs() - - def _assign_cd(self, key, cd, count): - x = self._blueprint._get(key, self._extension) - if x is not None: - if ObsBlueprint.needs_lookup(x): - cd[count][count] = 1.0 - else: - cd[count][count] = x - - def assign_sanitize(self, assignee, index, key, sanitize=True): - """ - Do not want to blindly assign None to astropy.wcs attributes, so - use this method for conditional assignment. - - The current implementation is that if there is a legitimate need to - assign None to a value, either use 'set' in the Hdf5ObsBlueprint, and - specifically assign None, or execute a function to set it to None - conditionally. There will be no support for a Default value of None - with HDF5 files. - - By the time this method is called, if the value still passes the "ObsBlueprint.needs_lookup" - check, the value should be ignored for fulfilling the WCS needs of the record under construction. - """ - x = self._blueprint._get(key, self._extension) - if sanitize: - x = self._sanitize(x) - if x is not None and not ObsBlueprint.needs_lookup(x): - assignee[index] = x - - def _set_wcs(self): - num_axes = self._blueprint.get_configed_axes_count() - self._wcs = WCS(naxis=num_axes) - self.wcs = self._wcs.wcs - array_shape, crder, crpix, crval, csyer, ctype, cunit, temp = [[0] * num_axes for _ in range(8)] - cd = [temp.copy() for _ in range(num_axes)] - count = 0 - if self._blueprint._pos_axes_configed: - self._axes['ra'][1] = True - self._axes['dec'][1] = True - self._axes['ra'][0] = count - self._axes['dec'][0] = count + 1 - self.assign_sanitize(ctype, count, 'Chunk.position.axis.axis1.ctype') - self.assign_sanitize(ctype, count + 1, 'Chunk.position.axis.axis2.ctype') - self.assign_sanitize(cunit, count, 'Chunk.position.axis.axis1.cunit') - self.assign_sanitize(cunit, count + 1, 'Chunk.position.axis.axis2.cunit') - self.assign_sanitize(array_shape, count, 'Chunk.position.axis.function.dimension.naxis1') - self.assign_sanitize(array_shape, count + 1, 'Chunk.position.axis.function.dimension.naxis2') - self.assign_sanitize(crpix, count, 'Chunk.position.axis.function.refCoord.coord1.pix') - self.assign_sanitize(crpix, count + 1, 'Chunk.position.axis.function.refCoord.coord2.pix') - self.assign_sanitize(crval, count, 'Chunk.position.axis.function.refCoord.coord1.val') - self.assign_sanitize(crval, count + 1, 'Chunk.position.axis.function.refCoord.coord2.val') - x = self._blueprint._get('Chunk.position.axis.function.cd11', - self._extension) - if x is not None and not ObsBlueprint.needs_lookup(x): - cd[count][0] = x - x = self._blueprint._get('Chunk.position.axis.function.cd12', - self._extension) - if x is not None and not ObsBlueprint.needs_lookup(x): - cd[count][1] = x - x = self._blueprint._get('Chunk.position.axis.function.cd21', - self._extension) - if x is not None and not ObsBlueprint.needs_lookup(x): - cd[count + 1][0] = x - x = self._blueprint._get('Chunk.position.axis.function.cd22', - self._extension) - if x is not None and not ObsBlueprint.needs_lookup(x): - cd[count + 1][1] = x - self.assign_sanitize(crder, count, 'Chunk.position.axis.error1.rnder') - self.assign_sanitize(crder, count + 1, 'Chunk.position.axis.error2.rnder') - self.assign_sanitize(csyer, count, 'Chunk.position.axis.error1.syser') - self.assign_sanitize(csyer, count + 1, 'Chunk.position.axis.error2.syser') - count += 2 - if self._blueprint._time_axis_configed: - self._axes['time'][1] = True - self._axes['time'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.time.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.time.axis.axis.cunit', False) - self.assign_sanitize(array_shape, count, 'Chunk.time.axis.function.naxis', False) - self.assign_sanitize(crpix, count, 'Chunk.time.axis.function.refCoord.pix', False) - self.assign_sanitize(crval, count, 'Chunk.time.axis.function.refCoord.val', False) - self.assign_sanitize(crder, count, 'Chunk.time.axis.error.rnder') - self.assign_sanitize(csyer, count, 'Chunk.time.axis.error.syser') - self._assign_cd('Chunk.time.axis.function.delta', cd, count) - count += 1 - if self._blueprint._energy_axis_configed: - self._axes['energy'][1] = True - self._axes['energy'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.energy.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.energy.axis.axis.cunit', False) - self.assign_sanitize(array_shape, count, 'Chunk.energy.axis.function.naxis', False) - self.assign_sanitize(crpix, count, 'Chunk.energy.axis.function.refCoord.pix', False) - self.assign_sanitize(crval, count, 'Chunk.energy.axis.function.refCoord.val', False) - self.assign_sanitize(crder, count, 'Chunk.energy.axis.error.rnder') - self.assign_sanitize(csyer, count, 'Chunk.energy.axis.error.syser') - self._assign_cd('Chunk.energy.axis.function.delta', cd, count) - count += 1 - if self._blueprint._polarization_axis_configed: - self._axes['polarization'][1] = True - self._axes['polarization'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.polarization.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.polarization.axis.axis.cunit', False) - self.assign_sanitize(array_shape, count, 'Chunk.polarization.axis.function.naxis', False) - self.assign_sanitize(crpix, count, 'Chunk.polarization.axis.function.refCoord.pix', False) - self.assign_sanitize(crval, count, 'Chunk.polarization.axis.function.refCoord.val', False) - self._assign_cd('Chunk.polarization.axis.function.delta', cd, count) - count += 1 - if self._blueprint._obs_axis_configed: - self._axes['observable'][1] = True - self._axes['observable'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.observable.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.observable.axis.axis.cunit', False) - array_shape[count] = 1.0 - self.assign_sanitize(crpix, count, 'Chunk.observable.axis.function.refCoord.pix', False) - crval[count] = 0.0 - cd[count][count] = 1.0 - count += 1 - if self._blueprint._custom_axis_configed: - self._axes['custom'][1] = True - self._axes['custom'][0] = count - self.assign_sanitize(ctype, count, 'Chunk.custom.axis.axis.ctype', False) - self.assign_sanitize(cunit, count, 'Chunk.custom.axis.axis.cunit', False) - self.assign_sanitize(array_shape, count, 'Chunk.custom.axis.function.naxis', False) - self.assign_sanitize(crpix, count, 'Chunk.custom.axis.function.refCoord.pix', False) - self.assign_sanitize(crval, count, 'Chunk.custom.axis.function.refCoord.val', False) - self._assign_cd('Chunk.custom.axis.function.delta', cd, count) - count += 1 - - if not all(val == 0 for val in array_shape): - self._wcs.array_shape = array_shape - if not all(val == 0 for val in cunit): - self._wcs.wcs.cunit = cunit - if not all(val == 0 for val in ctype): - self._wcs.wcs.ctype = ctype - if not all(val == 0 for val in crpix): - self._wcs.wcs.crpix = crpix - if not all(val == 0 for val in crval): - self._wcs.wcs.crval = crval - if not all(val == 0 for val in crder): - self._wcs.wcs.crder = crder - if not all(val == 0 for val in csyer): - self._wcs.wcs.csyer = csyer - self._wcs.wcs.cd = cd - self._finish_position() - self._finish_time() - self._finish_energy() - - def augment_custom(self, chunk): - """ - Augments a chunk with custom WCS information - :param chunk: - :return: - """ - self.logger.debug('Begin Custom WCS augmentation.') - if chunk is None or not isinstance(chunk, Chunk): - raise ValueError(f'Chunk type mis-match for {chunk}.') - - custom_axis_index = self._get_axis_index(CUSTOM_CTYPES) - if custom_axis_index is None: - self.logger.debug('No WCS Custom info') - return - try: - custom_axis_length = self._get_axis_length(custom_axis_index + 1) - except ValueError: - self.logger.debug('No WCS Custom axis.function') - return - - if custom_axis_length: - chunk.custom_axis = custom_axis_index + 1 - naxis = CoordAxis1D(self._get_axis(custom_axis_index)) - if self.wcs.has_cd(): - delta = self.wcs.cd[custom_axis_index][custom_axis_index] - else: - delta = self.wcs.cdelt[custom_axis_index] - ref_coord = self._get_ref_coord(custom_axis_index) - if delta and ref_coord: - naxis.function = CoordFunction1D(custom_axis_length, delta, ref_coord) - if not chunk.custom: - chunk.custom = CustomWCS(naxis) - else: - chunk.custom.axis = naxis - - self.logger.debug('End Custom WCS augmentation.') - - def augment_energy(self, chunk): - """ - Augments the energy information in a chunk - :param chunk: - """ - self.logger.debug('Begin Energy WCS augmentation.') - if chunk is None or not isinstance(chunk, Chunk): - raise ValueError(f'Chunk type mis-match for {chunk}.') - - # get the energy axis - energy_axis_index = self._get_axis_index(ENERGY_CTYPES) - - if energy_axis_index is None: - self.logger.debug('No WCS Energy info.') - return - try: - energy_axis_length = self._get_axis_length(energy_axis_index + 1) - except ValueError: - self.logger.debug('No WCS Energy axis.function') - return - - if energy_axis_length: - chunk.energy_axis = energy_axis_index + 1 - naxis = CoordAxis1D(self._get_axis(energy_axis_index)) - naxis.error = self._get_coord_error(energy_axis_index) - if self.wcs.has_cd(): - delta = self.wcs.cd[energy_axis_index][energy_axis_index] - else: - delta = self.wcs.cdelt[energy_axis_index] - ref_coord = self._get_ref_coord(energy_axis_index) - if delta and ref_coord: - naxis.function = CoordFunction1D(energy_axis_length, delta, ref_coord) - - specsys = _to_str(self.wcs.specsys) - if not chunk.energy: - chunk.energy = SpectralWCS(naxis, specsys) - else: - chunk.energy.axis = naxis - chunk.energy.specsys = specsys - - chunk.energy.ssysobs = _to_str(self._sanitize(self.wcs.ssysobs)) - # wcs returns 0.0 by default - if self._sanitize(self.wcs.restfrq) != 0: - chunk.energy.restfrq = self._sanitize(self.wcs.restfrq) - if self._sanitize(self.wcs.restwav) != 0: - chunk.energy.restwav = self._sanitize(self.wcs.restwav) - chunk.energy.velosys = self._sanitize(self.wcs.velosys) - chunk.energy.zsource = self._sanitize(self.wcs.zsource) - chunk.energy.ssyssrc = _to_str(self._sanitize(self.wcs.ssyssrc)) - chunk.energy.velang = self._sanitize(self.wcs.velangl) - self.logger.debug('End Energy WCS augmentation.') - - def augment_position(self, chunk): - """ - Augments a chunk with spatial WCS information - :param chunk: - :return: - """ - self.logger.debug('Begin Spatial WCS augmentation.') - if chunk is None or not isinstance(chunk, Chunk): - raise ValueError(f'Chunk type mis-match for {chunk}.') - - position_axes_indices = self._get_position_axis() - if not position_axes_indices: - self.logger.debug('No Spatial WCS found') - return - - chunk.position_axis_1 = position_axes_indices[0] - chunk.position_axis_2 = position_axes_indices[1] - axis = self._get_spatial_axis(chunk.position_axis_1 - 1, - chunk.position_axis_2 - 1) - - if axis is None: - self.logger.debug('No WCS Position axis.function') - return - - if chunk.position: - chunk.position.axis = axis - else: - chunk.position = SpatialWCS(axis) - - chunk.position.coordsys = _to_str(self._sanitize(self.wcs.radesys)) - temp = self._sanitize(self.wcs.equinox) - if (temp is not None and 1800.0 <= temp <= 2500) or temp is None: - chunk.position.equinox = temp - - self._finish_chunk_position(chunk) - self.logger.debug('End Spatial WCS augmentation.') - - def augment_temporal(self, chunk): - """ - Augments a chunk with temporal WCS information - - :param chunk: - :return: - """ - self.logger.debug('Begin TemporalWCS augmentation.') - if chunk is None or not isinstance(chunk, Chunk): - raise ValueError(f'Chunk type mis-match for {chunk}.') - - time_axis_index = self._get_axis_index(TIME_KEYWORDS) - - if time_axis_index is None: - self.logger.debug('No WCS Time info.') - return - - chunk.time_axis = time_axis_index + 1 - # set chunk.time - self.logger.debug('Begin temporal axis augmentation.') - - try: - axis_length = self._get_axis_length(time_axis_index + 1) - except ValueError: - self.logger.debug('No WCS Temporal axis.function') - return - - if axis_length: - aug_naxis = self._get_axis(time_axis_index) - aug_error = self._get_coord_error(time_axis_index) - aug_ref_coord = self._get_ref_coord(time_axis_index) - if self.wcs.has_cd(): - delta = self.wcs.cd[time_axis_index][time_axis_index] - else: - delta = self.wcs.cdelt[time_axis_index] - if aug_ref_coord is not None: - aug_function = CoordFunction1D(axis_length, delta, aug_ref_coord) - naxis = CoordAxis1D(aug_naxis, aug_error, None, None, aug_function) - if not chunk.time: - chunk.time = TemporalWCS(naxis) - else: - chunk.time.axis = naxis - - self._finish_chunk_time(chunk) - self.logger.debug('End TemporalWCS augmentation.') - - def augment_polarization(self, chunk): - """ - Augments a chunk with polarization WCS information - :param chunk: - :return: - """ - self.logger.debug('Begin Polarization WCS augmentation.') - if chunk is None or not isinstance(chunk, Chunk): - raise ValueError(f'Chunk type mis-match for {chunk}.') - - polarization_axis_index = self._get_axis_index(POLARIZATION_CTYPES) - if polarization_axis_index is None: - self.logger.debug('No WCS Polarization info') - return - - try: - axis_length = self._get_axis_length(polarization_axis_index + 1) - except ValueError: - self.logger.debug('No WCS Polarization axis.function') - return - - if axis_length: - chunk.polarization_axis = polarization_axis_index + 1 - - naxis = CoordAxis1D(self._get_axis(polarization_axis_index)) - if self.wcs.has_cd(): - delta = self.wcs.cd[polarization_axis_index][polarization_axis_index] - else: - delta = self.wcs.cdelt[polarization_axis_index] - ref_coord = self._get_ref_coord(polarization_axis_index) - if delta and ref_coord: - naxis.function = CoordFunction1D(axis_length, delta, ref_coord) - if not chunk.polarization: - chunk.polarization = PolarizationWCS(naxis) - else: - chunk.polarization.axis = naxis - - self.logger.debug('End Polarization WCS augmentation.') - - def augment_observable(self, chunk): - """ - Augments a chunk with an observable axis. - - :param chunk: - :return: - """ - self.logger.debug('Begin Observable WCS augmentation.') - if chunk is None or not isinstance(chunk, Chunk): - raise ValueError(f'Chunk type mis-match for {chunk}.') - - observable_axis_index = self._get_axis_index(OBSERVABLE_CTYPES) - if observable_axis_index is None: - self.logger.debug('No Observable axis info') - return - - chunk.observable_axis = observable_axis_index + 1 - self._finish_chunk_observable(chunk) - self.logger.debug('End Observable WCS augmentation.') - - def _finish_chunk_observable(self, chunk): - self.logger.debug('Begin _finish_chunk_observable') - ctype = self._wcs.wcs.ctype[chunk.observable_axis-1] - cunit = self._wcs.wcs.ctype[chunk.observable_axis-1] - pix_bin = _to_int(self._wcs.wcs.crpix[chunk.observable_axis-1]) - if ctype is not None and cunit is not None and pix_bin is not None: - chunk.observable = ObservableAxis( - Slice(self._get_axis(0, ctype, cunit), pix_bin)) - self.logger.debug('End _finish_chunk_observable') - - def _finish_chunk_position(self, chunk): - self.logger.debug('Begin _finish_chunk_position') - if chunk.position.resolution is None: - try: - # JJK 30-01-23 - # In a spatial data chunk the resolution is 2 times the pixel size. We can get the pixel size from - # the wcs - temp = utils.proj_plane_pixel_scales(self._wcs) - chunk.position.resolution = temp[0] - except SingularMatrixError as e: - # cannot calculate position.resolution, ignore and continue on - self.logger.warning(f'Not calculating resolution due to {e}') - self.logger.debug('End _finish_chunk_position') - - def _finish_chunk_time(self, chunk): - self.logger.debug('Begin _finish_chunk_time') - if not math.isnan(self._wcs.wcs.xposure): - chunk.time.exposure = self._wcs.wcs.xposure - if self._wcs.wcs.timesys is not None and self._wcs.wcs.timesys != '': - chunk.time.timesys = self._wcs.wcs.timesys - if self._wcs.wcs.trefpos is not None and self._wcs.wcs.trefpos != '': - chunk.time.trefpos = self._wcs.wcs.trefpos - if self._wcs.wcs.mjdref is not None and self._wcs.wcs.mjdref[0] != '' and self._wcs.wcs.mjdref[0] != 0.0: - # the astropy value is an array of length 2, use the first value - chunk.time.mjdref = self._wcs.wcs.mjdref[0] - self.logger.debug('End _finish_chunk_time') - - def _finish_energy(self): - self.logger.debug('Begin _finish_energy') - if self._blueprint._energy_axis_configed: - x = self._blueprint._get('Chunk.energy.specsys', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.specsys = x - x = self._blueprint._get('Chunk.energy.ssysobs', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.ssysobs = x - x = self._blueprint._get('Chunk.energy.restfrq', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.restfrq = _to_float(x) - x = self._blueprint._get('Chunk.energy.restwav', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.restwav = x - x = self._blueprint._get('Chunk.energy.velosys', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.velosys = x - x = self._blueprint._get('Chunk.energy.zsource', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.zsource = x - x = self._blueprint._get('Chunk.energy.ssyssrc', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.ssyssrc = x - x = self._blueprint._get('Chunk.energy.velang', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.velangl = x - self.logger.debug('End _finish_energy') - - def _finish_position(self): - self.logger.debug('Begin _finish_position') - if self._blueprint._pos_axes_configed: - x = self._blueprint._get('Chunk.position.coordsys', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.radesys = x - x = self._blueprint._get('Chunk.position.equinox', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.equinox = _to_float(x) - self.logger.debug('End _finish_position') - - def _finish_time(self): - self.logger.debug('Begin _finish_time') - if self._blueprint._time_axis_configed: - x = self._blueprint._get('Chunk.time.exposure', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.xposure = _to_float(x) - x = self._blueprint._get('Chunk.time.timesys', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.timesys = x - x = self._blueprint._get('Chunk.time.trefpos', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.trefpos = x - x = self._blueprint._get('Chunk.time.mjdref', self._extension) - if x and not ObsBlueprint.needs_lookup(x): - self._wcs.wcs.mjdref = [x, x] - self.logger.debug('End _finish_time') - - def _get_axis(self, index, over_ctype=None, over_cunit=None): - """ Assemble a generic axis """ - aug_ctype = str(self.wcs.ctype[index]) if over_ctype is None \ - else over_ctype - aug_cunit = str(self.wcs.cunit[index]) if over_cunit is None \ - else over_cunit - if aug_cunit is not None and len(aug_cunit) == 0: - aug_cunit = None - aug_axis = Axis(aug_ctype, aug_cunit) - return aug_axis - - def _get_axis_index(self, keywords): - """ - Return the index of a specific axis type or None of it doesn't exist - :param keywords: - :return: - """ - axis = None - for i, elem in enumerate(self.wcs.ctype): - elem = elem.split('-')[0] - if elem in keywords: - axis = i - break - elif len(elem) == 0: - check = self.wcs.ctype[i] - if check in keywords: - axis = i - break - return axis - - def _get_axis_length(self, for_axis): - if self._wcs.array_shape is None: - return 0 - else: - if len(self._wcs.array_shape) == 1: - result = self._wcs.array_shape[0] - else: - result = self._wcs.array_shape[for_axis-1] - if isinstance(result, tuple): - # the blueprint is incompletely configured - raise ValueError(f'Could not find axis length for axis {for_axis}') - return _to_int(result) - - def _get_cd(self, x_index, y_index): - """ returns cd info""" - - try: - if self.wcs.has_cd(): - cd11 = self.wcs.cd[x_index][x_index] - cd12 = self.wcs.cd[x_index][y_index] - cd21 = self.wcs.cd[y_index][x_index] - cd22 = self.wcs.cd[y_index][y_index] - else: - cd11 = self.wcs.cdelt[x_index] - cd12 = self.wcs.crota[x_index] - cd21 = self.wcs.crota[y_index] - cd22 = self.wcs.cdelt[y_index] - except AttributeError: - self.logger.debug( - f'Error searching for CD* values {sys.exc_info()[1]}') - cd11 = None - cd12 = None - cd21 = None - cd22 = None - - return cd11, cd12, cd21, cd22 - - def _get_coord_error(self, index): - aug_coord_error = None - aug_csyer = self._sanitize(self.wcs.csyer[index]) - aug_crder = self._sanitize(self.wcs.crder[index]) - if aug_csyer is not None and aug_crder is not None: - aug_coord_error = CoordError(aug_csyer, aug_crder) - return aug_coord_error - - def _get_dimension(self, xindex, yindex): - aug_dimension = None - try: - xindex_axis_length = self._get_axis_length(xindex + 1) - yindex_axis_length = self._get_axis_length(yindex + 1) - except ValueError: - self.logger.debug('No WCS Energy axis.function') - return None - - if xindex_axis_length > 0 and yindex_axis_length > 0: - aug_dim1 = _to_int(xindex_axis_length) - aug_dim2 = _to_int(yindex_axis_length) - if aug_dim1 and aug_dim2: - aug_dimension = Dimension2D(aug_dim1, aug_dim2) - self.logger.debug('End 2D dimension augmentation.') - return aug_dimension - - def _get_position_axis(self): - # there are two celestial axes, get the applicable indices from - # the axis_types - xindex = self._get_axis_index(POSITION_CTYPES[0]) - yindex = self._get_axis_index(POSITION_CTYPES[1]) - - if (xindex is not None) and (yindex is not None): - return xindex + 1, yindex + 1 - elif (xindex is None) and (yindex is None): - return None - else: - raise ValueError('Found only one position axis ra/dec: {}/{} in ' - '{}'. - format(xindex, yindex, self.file)) - - def _get_ref_coord(self, index): - aug_crpix = _to_float(self._sanitize(self.wcs.crpix[index])) - aug_crval = _to_float(self._sanitize(self.wcs.crval[index])) - aug_ref_coord = None - if aug_crpix is not None and aug_crval is not None: - aug_ref_coord = RefCoord(aug_crpix, aug_crval) - return aug_ref_coord - - def _get_spatial_axis(self, xindex, yindex): - """Assemble the bits to make the axis parameter needed for - SpatialWCS construction.""" - aug_dimension = self._get_dimension(xindex, yindex) - if aug_dimension is None: - return None - - x_ref_coord = self._get_ref_coord(xindex) - y_ref_coord = self._get_ref_coord(yindex) - aug_ref_coord = None - if x_ref_coord and y_ref_coord: - aug_ref_coord = Coord2D(x_ref_coord, y_ref_coord) - - aug_cd11, aug_cd12, aug_cd21, aug_cd22 = \ - self._get_cd(xindex, yindex) - - if aug_dimension is not None and \ - aug_ref_coord is not None and \ - aug_cd11 is not None and \ - aug_cd12 is not None and \ - aug_cd21 is not None and \ - aug_cd22 is not None: - aug_function = CoordFunction2D(aug_dimension, aug_ref_coord, - aug_cd11, aug_cd12, - aug_cd21, aug_cd22) - self.logger.debug('End CoordFunction2D augmentation.') - else: - aug_function = None - - aug_axis = CoordAxis2D(self._get_axis(xindex), - self._get_axis(yindex), - self._get_coord_error(xindex), - self._get_coord_error(yindex), - None, None, aug_function) - self.logger.debug('End CoordAxis2D augmentation.') - return aug_axis - - def _sanitize(self, value): - """ - Sanitizes values from content to caom2 - :param value: - :return: - """ - if value is None: - return None - elif isinstance(value, float) and math.isnan(value): - return None - elif not str(value): - return None # empty string - else: - return value - - -class FitsWcsParser(WcsParser): - """ - Parser to augment chunks with positional, temporal, energy and polarization - information based on the WCS keywords in an extension of a FITS header. - - Note: Under the hood, this class uses the astropy.wcs package to parse the - header and any inconsistencies or missing keywords are reported back as - warnings. - """ - - def __init__(self, header, file, extension): - """ - - :param header: FITS extension header - :param file: name of FITS file - :param extension: which HDU - WCS axes methods of this class. - """ - self.logger = logging.getLogger(self.__class__.__name__) - self.log_filter = HDULoggingFilter() - self.log_filter.extension(extension) - self.logger.addFilter(self.log_filter) - logastro = logging.getLogger('astropy') - logastro.addFilter(self.log_filter) - logastro.propagate = False - header_string = header.tostring().rstrip() - header_string = header_string.replace('END' + ' ' * 77, '') - self.wcs = Wcsprm(header_string.encode('ascii')) - self.wcs.fix() - self.header = header - self.file = file - self.extension = extension - - def _finish_chunk_observable(self, chunk): - self.logger.debug('Begin _finish_chunk_observable') - ctype = self.header.get(f'CTYPE{chunk.observable_axis}') - cunit = self.header.get(f'CUNIT{chunk.observable_axis}') - pix_bin = self.header.get(f'CRPIX{chunk.observable_axis}') - if ctype is not None and cunit is not None and pix_bin is not None: - chunk.observable = ObservableAxis( - Slice(self._get_axis(0, ctype, cunit), pix_bin)) - self.logger.debug('End _finish_chunk_observable') - - def _finish_chunk_position(self, chunk): - pass - - def _finish_chunk_time(self, chunk): - """ - The expected caom2 - FITS keywords mapping is: - - time.exposure = EXPTIME - time.resolution = TIMEDEL - time.timesys = TIMESYS default UTC - time.trefpos = TREFPOS - time.mjdref = MJDREF | MJDDATE - """ - self.logger.debug('Begin _finish_chunk_time') - chunk.time.exposure = _to_float(self.header.get('EXPTIME')) - chunk.time.resolution = _to_float(self.header.get('TIMEDEL')) - chunk.time.timesys = str(self.header.get('TIMESYS', 'UTC')) - chunk.time.trefpos = self.header.get('TREFPOS', None) - chunk.time.mjdref = self.header.get('MJDREF', - self.header.get('MJDDATE')) - self.logger.debug('End _finish_chunk_time') - - def _get_axis_length(self, for_axis): - # try ZNAXIS first in order to get the size of the original - # image in case it was FITS compressed - result = _to_int(self._sanitize( - self.header.get(f'ZNAXIS{for_axis}'))) - if result is None: - result = _to_int(self._sanitize( - self.header.get(f'NAXIS{for_axis}'))) - if result is None: - msg = f'Could not find axis length for axis {for_axis}' - raise ValueError(msg) - return result - - -class Hdf5WcsParser(WcsParser): - """ - This class initializes an astropy.wcs instance with metadata from an - Hdf5ObsBlueprint populated using an Hdf5Parser. - """ - - def __init__(self, blueprint, extension): - """ - :param blueprint: ObsBlueprint - """ - super().__init__(blueprint, extension) - - def _get_axis_index(self, keywords): - result = self._axes['custom'][0] - if 'RA' in keywords: - result = self._axes['ra'][0] - elif 'DEC' in keywords: - result = self._axes['dec'][0] - elif 'TIME' in keywords: - result = self._axes['time'][0] - elif 'FREQ' in keywords: - result = self._axes['energy'][0] - elif 'STOKES' in keywords: - result = self._axes['polarization'][0] - elif 'FLUX' in keywords: - result = self._axes['observable'][0] - return result - - -def _to_str(value): - return str(value).strip() if value is not None else None - - -def _to_float(value): - return float(value) if value is not None else None - - -def _to_int(value): - return int(value) if value is not None else None - - -def _to_int_32(value): - if value is None: - return None - elif isinstance(value, str): - return int_32(value) - else: - return value - - -def _to_checksum_uri(value): - if value is None: - return None - elif isinstance(value, ChecksumURI): - return value - else: - return ChecksumURI(value) - - -def _set_by_type(header, keyword, value): - """astropy documentation says that the type of the second - parameter in the 'set' call is 'str', and then warns of expectations - for floating-point values when the code does that, so make float values - into floats, and int values into ints.""" - float_value = None - int_value = None - - try: - float_value = float(value) - except ValueError: - pass - - try: - int_value = int(value) - except ValueError: - pass - - if (float_value and not str(value).isdecimal() or - re.match(r'0\.0*', str(value))): - header.set(keyword, float_value) - elif int_value: - header.set(keyword, int_value) - else: - header.set(keyword, value) - - def get_external_headers(external_url): try: session = requests.Session() diff --git a/caom2utils/caom2utils/legacy.py b/caom2utils/caom2utils/legacy.py index 298fc9a3..338132ca 100755 --- a/caom2utils/caom2utils/legacy.py +++ b/caom2utils/caom2utils/legacy.py @@ -69,7 +69,9 @@ import logging import sys +from . import blueprints from . import caom2blueprint +from . import wcs_parsers import traceback APP_NAME = 'fits2caom2' @@ -370,17 +372,17 @@ def _update_axis_info(parser, defaults, overrides, config): for key, value in i.items(): if (key.startswith('CTYPE')) and key[-1].isdigit(): value = value.split('-')[0] - if value in caom2blueprint.ENERGY_CTYPES: + if value in wcs_parsers.ENERGY_CTYPES: energy_axis = key[-1] - elif value in caom2blueprint.POLARIZATION_CTYPES: + elif value in wcs_parsers.POLARIZATION_CTYPES: polarization_axis = key[-1] - elif value in caom2blueprint.TIME_KEYWORDS: + elif value in wcs_parsers.TIME_KEYWORDS: time_axis = key[-1] - elif value in caom2blueprint.POSITION_CTYPES[0]: + elif value in wcs_parsers.POSITION_CTYPES[0]: ra_axis = key[-1] - elif value in caom2blueprint.POSITION_CTYPES[1]: + elif value in wcs_parsers.POSITION_CTYPES[1]: dec_axis = key[-1] - elif value in caom2blueprint.OBSERVABLE_CTYPES: + elif value in wcs_parsers.OBSERVABLE_CTYPES: obs_axis = key[-1] else: raise ValueError(f'Unrecognized CTYPE: {value}') @@ -546,9 +548,9 @@ def main_app(): obs_blueprint = {} for i, uri in enumerate(args.fileURI): if '.h5' in uri: - obs_blueprint[uri] = caom2blueprint.Hdf5ObsBlueprint() + obs_blueprint[uri] = blueprints.Hdf5ObsBlueprint() else: - obs_blueprint[uri] = caom2blueprint.ObsBlueprint() + obs_blueprint[uri] = blueprints.ObsBlueprint() if config: result = update_blueprint(obs_blueprint[uri], uri, config, defaults, overrides) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py new file mode 100644 index 00000000..d140015a --- /dev/null +++ b/caom2utils/caom2utils/parsers.py @@ -0,0 +1,2159 @@ +# *********************************************************************** +# ****************** CANADIAN ASTRONOMY DATA CENTRE ******************* +# ************* CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +# +# (c) 2024. (c) 2024. +# Government of Canada Gouvernement du Canada +# National Research Council Conseil national de recherches +# Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +# All rights reserved Tous droits réservés +# +# NRC disclaims any warranties, Le CNRC dénie toute garantie +# expressed, implied, or énoncée, implicite ou légale, +# statutory, of any kind with de quelque nature que ce +# respect to the software, soit, concernant le logiciel, +# including without limitation y compris sans restriction +# any warranty of merchantability toute garantie de valeur +# or fitness for a particular marchande ou de pertinence +# purpose. NRC shall not be pour un usage particulier. +# liable in any event for any Le CNRC ne pourra en aucun cas +# damages, whether direct or être tenu responsable de tout +# indirect, special or general, dommage, direct ou indirect, +# consequential or incidental, particulier ou général, +# arising from the use of the accessoire ou fortuit, résultant +# software. Neither the name de l'utilisation du logiciel. Ni +# of the National Research le nom du Conseil National de +# Council of Canada nor the Recherches du Canada ni les noms +# names of its contributors may de ses participants ne peuvent +# be used to endorse or promote être utilisés pour approuver ou +# products derived from this promouvoir les produits dérivés +# software without specific prior de ce logiciel sans autorisation +# written permission. préalable et particulière +# par écrit. +# +# This file is part of the Ce fichier fait partie du projet +# OpenCADC project. OpenCADC. +# +# OpenCADC is free software: OpenCADC est un logiciel libre ; +# you can redistribute it and/or vous pouvez le redistribuer ou le +# modify it under the terms of modifier suivant les termes de +# the GNU Affero General Public la “GNU Affero General Public +# License as published by the License” telle que publiée +# Free Software Foundation, par la Free Software Foundation +# either version 3 of the : soit la version 3 de cette +# License, or (at your option) licence, soit (à votre gré) +# any later version. toute version ultérieure. +# +# OpenCADC is distributed in the OpenCADC est distribué +# hope that it will be useful, dans l’espoir qu’il vous +# but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +# without even the implied GARANTIE : sans même la garantie +# warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +# or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +# PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +# General Public License for Générale Publique GNU Affero +# more details. pour plus de détails. +# +# You should have received Vous devriez avoir reçu une +# a copy of the GNU Affero copie de la Licence Générale +# General Public License along Publique GNU Affero avec +# with OpenCADC. If not, see OpenCADC ; si ce n’est +# . pas le cas, consultez : +# . +# +# Revision: 4 +# +# *********************************************************************** +# + +import logging +import re +import sys +import traceback + +from astropy.io import fits +from astropy.time import Time +from collections import defaultdict +from datetime import datetime + +import caom2 + +from caom2utils import data_util +from caom2utils.blueprints import ObsBlueprint, _to_int, _to_int_32, _to_float, _to_str +from caom2utils.wcs_parsers import FitsWcsParser, Hdf5WcsParser, WcsParser + + +class Caom2Exception(Exception): + """Exception raised when an attempt to create or update a CAOM2 record + fails for some reason.""" + pass + + +class BlueprintParser: + """ + Extract CAOM2 metadata from files with no WCS information. + """ + def __init__(self, obs_blueprint=None, uri=None): + if obs_blueprint: + self._blueprint = obs_blueprint + else: + self._blueprint = ObsBlueprint() + self._errors = [] + self.logger = logging.getLogger(__name__) + self.uri = uri + self.apply_blueprint() + + @property + def blueprint(self): + return self._blueprint + + @blueprint.setter + def blueprint(self, value): + self._blueprint = value + self.apply_blueprint() + + def apply_blueprint(self): + plan = self.blueprint._plan + + # first apply the functions + if (self.blueprint._module is not None or + self.blueprint._module_instance is not None): + for key, value in plan.items(): + if ObsBlueprint.is_function(value): + if self._blueprint._module_instance is None: + plan[key] = self._execute_external(value, key, 0) + else: + plan[key] = self._execute_external_instance( + value, key, 0) + + # apply defaults + for key, value in plan.items(): + if ObsBlueprint.has_default_value(value): + # there is a default value set + if key in plan: + plan[key] = value[1] + + def augment_observation(self, observation, artifact_uri, product_id=None): + """ + Augments a given observation with plane structure only. + :param observation: existing CAOM2 observation to be augmented. + :param artifact_uri: the key for finding the artifact to augment + :param product_id: the key for finding for the plane to augment + """ + self.logger.debug( + f'Begin CAOM2 observation augmentation for URI {artifact_uri}.') + if observation is None or not isinstance(observation, caom2.Observation): + raise ValueError( + f'Observation type mis-match for {observation}.') + + observation.meta_release = self._get_datetime(self._get_from_list( + 'Observation.metaRelease', index=0, + current=observation.meta_release)) + observation.meta_read_groups = self._get_from_list( + 'Observation.metaReadGroups', index=0, + current=observation.meta_read_groups) + observation.meta_producer = self._get_from_list( + 'Observation.metaProducer', index=0, + current=observation.meta_producer) + + plane = None + if not product_id: + product_id = self._get_from_list('Plane.productID', index=0) + if product_id is None: + raise ValueError('product ID required') + + for ii in observation.planes: + if observation.planes[ii].product_id == product_id: + plane = observation.planes[product_id] + break + if plane is None: + plane = caom2.Plane(product_id=product_id) + observation.planes[product_id] = plane + self.augment_plane(plane, artifact_uri) + self.logger.debug( + f'End CAOM2 observation augmentation for {artifact_uri}.') + + def augment_plane(self, plane, artifact_uri): + """ + Augments a given plane with artifact structure only. + :param plane: existing CAOM2 plane to be augmented. + :param artifact_uri: + """ + self.logger.debug( + f'Begin CAOM2 plane augmentation for {artifact_uri}.') + if plane is None or not isinstance(plane, caom2.Plane): + raise ValueError(f'Plane type mis-match for {plane}') + + plane.meta_release = self._get_datetime(self._get_from_list( + 'Plane.metaRelease', index=0, current=plane.meta_release)) + plane.data_release = self._get_datetime(self._get_from_list( + 'Plane.dataRelease', index=0, current=plane.data_release)) + plane.data_product_type = self._to_data_product_type( + self._get_from_list('Plane.dataProductType', index=0, + current=plane.data_product_type)) + plane.calibration_level = self._to_calibration_level(_to_int_32( + self._get_from_list('Plane.calibrationLevel', index=0, + current=plane.calibration_level))) + plane.meta_producer = self._get_from_list( + 'Plane.metaProducer', index=0, current=plane.meta_producer) + + artifact = None + for ii in plane.artifacts: + artifact = plane.artifacts[ii] + if artifact.uri == artifact_uri: + break + if artifact is None or artifact.uri != artifact_uri: + artifact = caom2.Artifact(artifact_uri, self._to_product_type( + self._get_from_list('Artifact.productType', index=0)), + self._to_release_type(self._get_from_list( + 'Artifact.releaseType', index=0))) + plane.artifacts[artifact_uri] = artifact + self.augment_artifact(artifact, 0) + self.logger.debug( + f'End CAOM2 plane augmentation for {artifact_uri}.') + + def augment_artifact(self, artifact, index): + """ + Augments a given CAOM2 artifact with available information + :param artifact: existing CAOM2 artifact to be augmented + :param index: int Part name, used in specializing classes + """ + self.logger.debug(f'Begin CAOM2 artifact augmentation for {self.uri}.') + if artifact is None or not isinstance(artifact, caom2.Artifact): + raise ValueError( + f'Artifact type mis-match for {artifact}') + + artifact.product_type = self._to_product_type(self._get_from_list( + 'Artifact.productType', index=0, current=artifact.product_type)) + artifact.release_type = self._to_release_type(self._get_from_list( + 'Artifact.releaseType', index=0, current=artifact.release_type)) + artifact.content_type = self._get_from_list( + 'Artifact.contentType', index=0, current=artifact.content_type) + artifact.content_length = self._get_from_list( + 'Artifact.contentLength', index=0, current=artifact.content_length) + artifact.content_checksum = _to_checksum_uri(self._get_from_list( + 'Artifact.contentChecksum', index=0, + current=artifact.content_checksum)) + artifact.content_release = self._get_from_list( + 'Artifact.contentRelease', index=0, + current=artifact.content_release) + artifact.content_read_groups = self._get_from_list( + 'Artifact.contentReadGroups', index=0, + current=artifact.content_read_groups) + artifact.meta_producer = self._get_from_list( + 'Artifact.metaProducer', index=0, current=artifact.meta_producer) + self.logger.debug(f'End CAOM2 artifact augmentation for {self.uri}.') + + def _get_from_list(self, lookup, index, current=None): + value = None + try: + keywords = self.blueprint._get(lookup) + except KeyError: + self.add_error(lookup, sys.exc_info()[1]) + self.logger.debug( + f'Could not find {lookup} in configuration.') + if current: + self.logger.debug( + f'{lookup}: using current value of {current!r}.') + value = current + return value + if (keywords is not None and not ObsBlueprint.needs_lookup(keywords) + and not ObsBlueprint.is_function(keywords)): + value = keywords + elif self._blueprint.update: + # The first clause: boolean attributes are used to represent + # three different values: True, False, and unknown. For boolean + # attributes _only_ assessed that the risk of setting to None + # accidentally was better than being unable to set a value of + # 'unknown'. + # + # The second clause: the default value for the current parameter + # in the method signature is 'None', so do not want to + # inadvertently assign the default value. + # + if isinstance(value, bool) or current is not None: + value = current + + self.logger.debug(f'{lookup}: value is {value}') + return value + + def _get_set_from_list(self, lookup, index): + value = None + keywords = None + try: + keywords = self.blueprint._get(lookup) + except KeyError: + self.add_error(lookup, sys.exc_info()[1]) + self.logger.debug(f'Could not find \'{lookup}\' in caom2blueprint ' + f'configuration.') + + # if there's something useful as a value in the keywords, + # extract it + if keywords: + if ObsBlueprint.needs_lookup(keywords): + # if there's a default value use it + if keywords[1]: + value = keywords[1] + self.logger.debug( + f'{lookup}: assigned default value {value}.') + elif not ObsBlueprint.is_function(keywords): + value = keywords + self.logger.debug(f'{lookup}: assigned value {value}.') + return value + + def add_error(self, key, message): + self._errors.append('{} {} {}'.format( + datetime.now().strftime('%Y-%m-%dT%H:%M:%S'), key, message)) + + def _to_data_product_type(self, value): + return self._to_enum_type(value, caom2.DataProductType) + + def _to_calibration_level(self, value): + return self._to_enum_type(value, caom2.CalibrationLevel) + + def _to_product_type(self, value): + return self._to_enum_type(value, caom2.ProductType) + + def _to_release_type(self, value): + return self._to_enum_type(value, caom2.ReleaseType) + + def _to_enum_type(self, value, to_enum_type): + if value is None: + raise ValueError( + f'Must set a value of {to_enum_type.__name__} for ' + f'{self.uri}.') + elif isinstance(value, to_enum_type): + return value + else: + return to_enum_type(value) + + def _execute_external(self, value, key, extension): + """Execute a function supplied by a user, assign a value to a + blueprint entry. The input parameters passed to the function are the + headers as read in by astropy, or the artifact uri. + + :param value the name of the function to apply. + :param key: + :param extension: the current extension name or number. + """ + # determine which of the possible values for parameter the user + # is hoping for + if 'uri' in value: + parameter = self.uri + elif 'header' in value and isinstance(self, FitsParser): + parameter = self._headers[extension] + elif isinstance(self, FitsParser): + parameter = {'uri': self.uri, + 'header': self._headers[extension]} + else: + if hasattr(self, '_file'): + parameter = {'base': self._file} + else: + parameter = {'uri': self.uri, + 'header': None} + + result = '' + execute = None + try: + execute = getattr(self.blueprint._module, value.split('(')[0]) + except Exception as e: + msg = 'Failed to find {}.{} for {}'.format( + self.blueprint._module.__name__, value.split('(')[0], key) + self.logger.error(msg) + self._errors.append(msg) + tb = traceback.format_exc() + self.logger.debug(tb) + self.logger.error(e) + try: + result = execute(parameter) + self.logger.debug(f'Key {key} calculated value of {result} using {value} type {type(result)}') + except Exception as e: + msg = 'Failed to execute {} for {} in {}'.format( + execute.__name__, key, self.uri) + self.logger.error(msg) + self.logger.debug('Input parameter was {}, value was {}'.format( + parameter, value)) + self._errors.append(msg) + tb = traceback.format_exc() + self.logger.debug(tb) + self.logger.error(e) + return result + + def _execute_external_instance(self, value, key, extension): + """Execute a function supplied by a user, assign a value to a + blueprint entry. The input parameters passed to the function are the + headers as read in by astropy, or the artifact uri. + + :param value the name of the function to apply. + :param key: + :param extension: the current extension name or number. + :raise Caom2Exception exception raised when there is a recognizable + error in the information being used to create a CAOM2 record. A + correct and consistent CAOM2 record cannot be created from the + input metadata. The client should treat the Observation instance + under construction as invalid. + """ + result = '' + try: + execute = getattr( + self.blueprint._module_instance, value.split('(')[0]) + except Exception as e: + msg = 'Failed to find {}.{} for {}'.format( + self.blueprint._module_instance.__class__.__name__, + value.split('(')[0], key) + self.logger.error(msg) + self._errors.append(msg) + tb = traceback.format_exc() + self.logger.debug(tb) + self.logger.error(e) + return result + try: + result = execute(extension) + self.logger.debug('Key {} calculated value of {} using {}'.format(key, result, value)) + except ValueError as e2: + # DB 23-03-22 + # Anything that you can do to make the CAOM2 record creation fail + # in this case of bad WCS metadata would be useful. Use + # ValueError because that happens to be what astropy is throwing + # for a SkyCoord construction failure. + raise Caom2Exception(e2) + except Exception as e: + msg = 'Failed to execute {} for {} in {}'.format( + execute, key, self.uri) + self.logger.error(msg) + self.logger.debug('Input value was {}'.format(value)) + self._errors.append(msg) + tb = traceback.format_exc() + self.logger.debug(tb) + self.logger.error(e) + return result + + def _get_datetime(self, from_value): + """ + Ensure datetime values are in MJD. Really. Just not yet. + :param from_value: + :return: + """ + if from_value: + if isinstance(from_value, datetime): + return from_value + elif isinstance(from_value, Time): + return from_value.datetime + else: + result = None + # CFHT 2003/03/29,01:34:54 + # CFHT 2003/03/29 + # DDO 12/02/95 + for dt_format in ['%Y-%m-%dT%H:%M:%S', '%Y-%m-%dT%H:%M:%S.%f', + '%Y-%m-%d %H:%M:%S.%f', '%Y-%m-%d', + '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S', + '%Y/%m/%d,%H:%M:%S', '%Y/%m/%d', + '%d/%m/%y', '%d/%m/%y %H:%M:%S', '%d-%m-%Y']: + try: + result = datetime.strptime(from_value, dt_format) + except ValueError: + pass + + if result is None: + self.logger.error('Cannot parse datetime {}'.format( + from_value)) + self.add_error('get_datetime', sys.exc_info()[1]) + return result + else: + return None + + +class ContentParser(BlueprintParser): + + def __init__(self, obs_blueprint=None, uri=None): + super().__init__(obs_blueprint, uri) + self._wcs_parser = WcsParser(obs_blueprint, extension=0) + + def _get_chunk_naxis(self, chunk, index): + chunk.naxis = self._get_from_list( + 'Chunk.naxis', index, self.blueprint.get_configed_axes_count()) + + def augment_artifact(self, artifact, index): + """ + Augments a given CAOM2 artifact with available content information + :param artifact: existing CAOM2 artifact to be augmented + :param index: int Part name + """ + super().augment_artifact(artifact, index) + + self.logger.debug( + f'Begin content artifact augmentation for {artifact.uri}') + + if self.blueprint.get_configed_axes_count() == 0: + raise TypeError( + f'No WCS Data. End content artifact augmentation for ' + f'{artifact.uri}.') + + if self.add_parts(artifact, index): + part = artifact.parts[str(index)] + part.product_type = self._get_from_list('Part.productType', index) + part.meta_producer = self._get_from_list('Part.metaProducer', index=0, current=part.meta_producer) + + # each Part has one Chunk, if it's not an empty part as determined + # just previously + if not part.chunks: + part.chunks.append(caom2.Chunk()) + chunk = part.chunks[0] + chunk.meta_producer = self._get_from_list('Chunk.metaProducer', index=0, current=chunk.meta_producer) + + self._get_chunk_naxis(chunk, index) + + # order by which the blueprint is used to set WCS information: + # 1 - try to construct the information for an axis from WCS information + # 2 - if the WCS information is insufficient, try to construct the information from the blueprint + # 3 - Always try to fill the range metadata from the blueprint. + if self.blueprint._pos_axes_configed: + self._wcs_parser.augment_position(chunk) + self._try_position_with_blueprint(chunk, index) + + if self.blueprint._energy_axis_configed: + self._wcs_parser.augment_energy(chunk) + self._try_energy_with_blueprint(chunk, index) + + if self.blueprint._time_axis_configed: + self._wcs_parser.augment_temporal(chunk) + self._try_time_with_blueprint(chunk, index) + + if self.blueprint._polarization_axis_configed: + self._wcs_parser.augment_polarization(chunk) + self._try_polarization_with_blueprint(chunk, index) + + if self.blueprint._obs_axis_configed: + self._wcs_parser.augment_observable(chunk) + self._try_observable_with_blueprint(chunk, index) + + if self.blueprint._custom_axis_configed: + self._wcs_parser.augment_custom(chunk) + self._try_custom_with_blueprint(chunk, index) + + self.logger.debug( + f'End content artifact augmentation for {artifact.uri}.') + + def augment_observation(self, observation, artifact_uri, product_id=None): + """ + Augments a given observation with available content information. + :param observation: existing CAOM2 observation to be augmented. + :param artifact_uri: the key for finding the artifact to augment + :param product_id: the key for finding for the plane to augment + """ + super().augment_observation(observation, artifact_uri, product_id) + self.logger.debug( + f'Begin content observation augmentation for URI {artifact_uri}.') + members = self._get_members(observation) + if members: + if isinstance(members, caom2.TypedSet): + for m in members: + observation.members.add(m) + else: + for m in members.split(): + observation.members.add(caom2.ObservationURI(m)) + observation.algorithm = self._get_algorithm(observation) + + observation.sequence_number = _to_int(self._get_from_list( + 'Observation.sequenceNumber', index=0)) + observation.intent = self._get_from_list( + 'Observation.intent', 0, (caom2.ObservationIntentType.SCIENCE if + observation.intent is None else + observation.intent)) + observation.type = self._get_from_list('Observation.type', 0, + current=observation.type) + observation.meta_release = self._get_datetime( + self._get_from_list('Observation.metaRelease', 0, + current=observation.meta_release)) + observation.meta_read_groups = self._get_from_list( + 'Observation.metaReadGroups', 0) + observation.meta_producer = self._get_from_list( + 'Observation.metaProducer', 0, current=observation.meta_producer) + observation.requirements = self._get_requirements( + observation.requirements) + observation.instrument = self._get_instrument(observation.instrument) + observation.proposal = self._get_proposal(observation.proposal) + observation.target = self._get_target(observation.target) + observation.target_position = self._get_target_position( + observation.target_position) + observation.telescope = self._get_telescope(observation.telescope) + observation.environment = self._get_environment( + observation.environment) + self.logger.debug( + f'End content observation augmentation for {artifact_uri}.') + + def augment_plane(self, plane, artifact_uri): + """ + Augments a given plane with available content information. + :param plane: existing CAOM2 plane to be augmented. + :param artifact_uri: + """ + super().augment_plane(plane, artifact_uri) + self.logger.debug( + f'Begin content plane augmentation for {artifact_uri}.') + + plane.meta_release = self._get_datetime(self._get_from_list( + 'Plane.metaRelease', index=0, current=plane.meta_release)) + plane.data_release = self._get_datetime(self._get_from_list( + 'Plane.dataRelease', index=0)) + plane.data_product_type = self._to_data_product_type( + self._get_from_list('Plane.dataProductType', index=0, + current=plane.data_product_type)) + plane.calibration_level = self._to_calibration_level(_to_int_32( + self._get_from_list('Plane.calibrationLevel', index=0, + current=plane.calibration_level))) + plane.meta_producer = self._get_from_list( + 'Plane.metaProducer', index=0, current=plane.meta_producer) + plane.observable = self._get_observable(current=plane.observable) + plane.provenance = self._get_provenance(plane.provenance) + plane.metrics = self._get_metrics(current=plane.metrics) + plane.quality = self._get_quality(current=plane.quality) + + self.logger.debug( + f'End content plane augmentation for {artifact_uri}.') + + def _get_algorithm(self, obs): + """ + Create an Algorithm instance populated with available content + information. + :return: Algorithm + """ + self.logger.debug('Begin Algorithm augmentation.') + # TODO DEFAULT VALUE + name = self._get_from_list('Observation.algorithm.name', index=0, + current=obs.algorithm.name) + if name is not None and name == 'exposure' and isinstance(obs, caom2.DerivedObservation): + # stop the raising of a ValueError when adding a Plane representing a SimpleObservation to a + # DerivedObservation under construction. It results in attempting to change Algorithm.name value to + # 'exposure' otherwise. + result = obs.algorithm + else: + result = caom2.Algorithm(str(name)) if name else None + self.logger.debug('End Algorithm augmentation.') + return result + + def _get_energy_transition(self, current): + """ + Create an EnergyTransition instance populated with available content + information. + :return: EnergyTransition + """ + self.logger.debug('Begin EnergyTransition augmentation.') + species = self._get_from_list( + 'Chunk.energy.transition.species', index=0, + current=None if current is None else current.species) + transition = self._get_from_list( + 'Chunk.energy.transition.transition', index=0, + current=None if current is None else current.transition) + result = None + if species is not None and transition is not None: + result = caom2.EnergyTransition(species, transition) + self.logger.debug('End EnergyTransition augmentation.') + return result + + def _get_environment(self, current): + """ + Create an Environment instance populated with available content + information. + :current Environment instance, if one already exists in the + Observation + :return: Environment + """ + self.logger.debug('Begin Environment augmentation.') + seeing = self._get_from_list( + 'Observation.environment.seeing', index=0, + current=None if current is None else current.seeing) + humidity = _to_float( + self._get_from_list( + 'Observation.environment.humidity', index=0, + current=None if current is None else current.humidity)) + elevation = self._get_from_list( + 'Observation.environment.elevation', index=0, + current=None if current is None else current.elevation) + tau = self._get_from_list( + 'Observation.environment.tau', index=0, + current=None if current is None else current.tau) + wavelength_tau = self._get_from_list( + 'Observation.environment.wavelengthTau', index=0, + current=None if current is None else current.wavelength_tau) + ambient = _to_float( + self._get_from_list( + 'Observation.environment.ambientTemp', index=0, + current=None if current is None else current.ambient_temp)) + photometric = self._cast_as_bool(self._get_from_list( + 'Observation.environment.photometric', index=0, + current=None if current is None else current.photometric)) + enviro = None + if seeing or humidity or elevation or tau or wavelength_tau or ambient: + enviro = caom2.Environment() + enviro.seeing = seeing + enviro.humidity = humidity + enviro.elevation = elevation + enviro.tau = tau + enviro.wavelength_tau = wavelength_tau + enviro.ambient_temp = ambient + enviro.photometric = photometric + self.logger.debug('End Environment augmentation.') + return enviro + + def _get_instrument(self, current): + """ + Create an Instrument instance populated with available content + information. + :return: Instrument + """ + self.logger.debug('Begin Instrument augmentation.') + name = self._get_from_list( + 'Observation.instrument.name', index=0, + current=None if current is None else current.name) + keywords = self._get_set_from_list( + 'Observation.instrument.keywords', index=0) + instr = None + if name: + instr = caom2.Instrument(str(name)) + ContentParser._add_keywords(keywords, current, instr) + self.logger.debug('End Instrument augmentation.') + return instr + + def _get_members(self, obs): + """ + Returns the members of a derived observation (if specified) + :param obs: observation to augment + :return: members value + """ + members = None + self.logger.debug('Begin Members augmentation.') + if (isinstance(obs, caom2.SimpleObservation) and + (self.blueprint._get('DerivedObservation.members') or + self.blueprint._get('CompositeObservation.members'))): + raise TypeError( + 'Cannot apply blueprint for DerivedObservation to a ' + 'simple observation') + elif isinstance(obs, caom2.DerivedObservation): + lookup = self.blueprint._get('DerivedObservation.members', + extension=1) + if ObsBlueprint.is_table(lookup) and len(self.headers) > 1: + member_list = self._get_from_table( + 'DerivedObservation.members', 1) + # ensure the members are good little ObservationURIs + if member_list.startswith('caom:'): + members = member_list + else: + members = ' '.join(['caom:{}/{}'.format( + obs.collection, i) if not i.startswith('caom') else i + for i in member_list.split()]) + else: + if obs.members is None: + members = self._get_from_list( + 'DerivedObservation.members', index=0) + else: + members = self._get_from_list( + 'DerivedObservation.members', index=0, + current=obs.members) + elif isinstance(obs, caom2.CompositeObservation): + lookup = self.blueprint._get('CompositeObservation.members', + extension=1) + if ObsBlueprint.is_table(lookup) and len(self.headers) > 1: + member_list = self._get_from_table( + 'CompositeObservation.members', 1) + # ensure the members are good little ObservationURIs + if member_list.startswith('caom:'): + members = member_list + else: + members = ' '.join(['caom:{}/{}'.format( + obs.collection, i) if not i.startswith('caom') else i + for i in member_list.split()]) + else: + if obs.members is None: + members = self._get_from_list( + 'CompositeObservation.members', index=0) + else: + members = self._get_from_list( + 'CompositeObservation.members', index=0, + current=obs.members) + self.logger.debug('End Members augmentation.') + return members + + def _get_metrics(self, current): + """ + Create a Metrics instance populated with available content information. + :return: Metrics + """ + self.logger.debug('Begin Metrics augmentation.') + source_number_density = self._get_from_list( + 'Plane.metrics.sourceNumberDensity', index=0, + current=None if current is None else current.source_number_density) + background = self._get_from_list( + 'Plane.metrics.background', index=0, + current=None if current is None else current.background) + background_stddev = self._get_from_list( + 'Plane.metrics.backgroundStddev', index=0, + current=None if current is None else current.background_std_dev) + flux_density_limit = self._get_from_list( + 'Plane.metrics.fluxDensityLimit', index=0, + current=None if current is None else current.flux_density_limit) + mag_limit = self._get_from_list( + 'Plane.metrics.magLimit', index=0, + current=None if current is None else current.mag_limit) + sample_snr = self._get_from_list( + 'Plane.metrics.sampleSNR', index=0, + current=None if current is None else current.sample_snr) + + metrics = None + if (source_number_density or background or background_stddev or + flux_density_limit or mag_limit or sample_snr): + metrics = caom2.Metrics() + metrics.source_number_density = source_number_density + metrics.background = background + metrics.background_std_dev = background_stddev + metrics.flux_density_limit = flux_density_limit + metrics.mag_limit = mag_limit + metrics.sample_snr = sample_snr + self.logger.debug('End Metrics augmentation.') + return metrics + + def _get_axis_wcs(self, label, wcs, index): + """Helper function to construct a CoordAxis1D instance, with all + it's members, from the blueprint. + + :param label: axis name - must be one of 'custom', 'energy', 'time', or 'polarization', as it's used for the + blueprint lookup. + :param index: which blueprint index to find a value in + :return an instance of CoordAxis1D + """ + self.logger.debug(f'Begin {label} axis construction from blueprint.') + + aug_axis = None + aug_error = None + if wcs is not None and wcs.axis is not None and wcs.axis.axis is not None: + aug_axis = wcs.axis.axis + aug_error = wcs.axis.error + else: + aug_axis_ctype = self._get_from_list(f'Chunk.{label}.axis.axis.ctype', index) + aug_axis_cunit = self._get_from_list(f'Chunk.{label}.axis.axis.cunit', index) + if aug_axis_ctype is not None: + aug_axis = caom2.Axis(aug_axis_ctype, aug_axis_cunit) + self.logger.debug(f'Creating {label} Axis for {self.uri} from blueprint') + + aug_error = self._two_param_constructor( + f'Chunk.{label}.axis.error.syser', + f'Chunk.{label}.axis.error.rnder', + index, _to_float, caom2.CoordError) + + aug_naxis = None + aug_range = self._try_range(index, label) + aug_naxis_index = None + if aug_axis is not None: + if aug_range is None: + if wcs is None or wcs.axis is None or wcs.axis.function is None: + aug_ref_coord = self._two_param_constructor( + f'Chunk.{label}.axis.function.refCoord.pix', + f'Chunk.{label}.axis.function.refCoord.val', + index, _to_float, caom2.RefCoord) + aug_delta = _to_float(self._get_from_list(f'Chunk.{label}.axis.function.delta', index)) + aug_length = _to_int(self._get_from_list(f'Chunk.{label}.axis.function.naxis', index)) + aug_function = None + if aug_length is not None and aug_delta is not None and aug_ref_coord is not None: + aug_function = caom2.CoordFunction1D(aug_length, aug_delta, aug_ref_coord) + aug_naxis = caom2.CoordAxis1D(aug_axis, aug_error, None, None, aug_function) + if aug_function is not None: + # if the WCS is described with a Function, cutouts can be supported, so specify an axis + aug_naxis_index = _to_int(self._get_from_list(f'Chunk.{label}Axis', index)) + self.logger.debug(f'Creating function {label} CoordAxis1D for {self.uri} from blueprint') + else: + aug_naxis = caom2.CoordAxis1D(axis=aug_axis, error=aug_error, range=aug_range) + self.logger.debug(f'Creating range {label} CoordAxis1D for {self.uri} from blueprint') + + self.logger.debug(f'End {label} axis construction from blueprint.') + return aug_naxis, aug_naxis_index + + def _get_observable(self, current): + """ + Create a Observable instance populated with available content + information. + :return: Observable + """ + self.logger.debug('Begin Observable augmentation.') + ucd = self._get_from_list( + 'Plane.observable.ucd', index=0, + current=None if current is None else current.ucd) + observable = caom2.Observable(ucd) if ucd else None + self.logger.debug('End Observable augmentation.') + return observable + + def _get_proposal(self, current): + """ + Create a Proposal instance populated with available content + information. + :return: Proposal + """ + self.logger.debug('Begin Proposal augmentation.') + prop_id = self._get_from_list( + 'Observation.proposal.id', index=0, + current=None if current is None else current.id) + pi = self._get_from_list( + 'Observation.proposal.pi', index=0, + current=None if current is None else current.pi_name) + project = self._get_from_list( + 'Observation.proposal.project', index=0, + current=None if current is None else current.project) + title = self._get_from_list( + 'Observation.proposal.title', index=0, + current=None if current is None else current.title) + keywords = self._get_set_from_list( + 'Observation.proposal.keywords', index=0) + proposal = current + if prop_id: + proposal = caom2.Proposal(str(prop_id), pi, project, title) + ContentParser._add_keywords(keywords, current, proposal) + self.logger.debug(f'End Proposal augmentation {prop_id}.') + return proposal + + def _get_provenance(self, current): + """ + Create a Provenance instance populated with available Content + information. + :return: Provenance + """ + self.logger.debug('Begin Provenance augmentation.') + name = _to_str( + self._get_from_list( + 'Plane.provenance.name', index=0, + current=None if current is None else current.name)) + p_version = _to_str(self._get_from_list( + 'Plane.provenance.version', index=0, + current=None if current is None else current.version)) + project = _to_str( + self._get_from_list( + 'Plane.provenance.project', index=0, + current=None if current is None else current.project)) + producer = _to_str( + self._get_from_list( + 'Plane.provenance.producer', index=0, + current=None if current is None else current.producer)) + run_id = _to_str( + self._get_from_list( + 'Plane.provenance.runID', index=0, + current=None if current is None else current.run_id)) + reference = _to_str( + self._get_from_list( + 'Plane.provenance.reference', index=0, + current=None if current is None else current.reference)) + last_executed = self._get_datetime( + self._get_from_list( + 'Plane.provenance.lastExecuted', index=0, + current=None if current is None else current.last_executed)) + keywords = self._get_set_from_list( + 'Plane.provenance.keywords', index=0) + inputs = self._get_set_from_list('Plane.provenance.inputs', index=0) + prov = None + if name: + prov = caom2.Provenance(name, p_version, project, producer, run_id, + reference, last_executed) + ContentParser._add_keywords(keywords, current, prov) + if inputs: + if isinstance(inputs, caom2.TypedSet): + for i in inputs: + prov.inputs.add(i) + else: + for i in inputs.split(): + prov.inputs.add(caom2.PlaneURI(str(i))) + else: + if current is not None and len(current.inputs) > 0: + # preserve the original value + prov.inputs.update(current.inputs) + self.logger.debug('End Provenance augmentation.') + return prov + + def _get_quality(self, current): + """ + Create a Quality instance populated with available content information. + :return: Quality + """ + self.logger.debug('Begin Quality augmentation.') + flag = self._get_from_list( + 'Plane.dataQuality', index=0, + current=None if current is None else current.flag) + quality = caom2.DataQuality(flag) if flag else None + self.logger.debug('End Quality augmentation.') + return quality + + def _get_requirements(self, current): + """ + Create a Requirements instance populated with available content + information. + :return: Requirements + """ + self.logger.debug('Begin Requirement augmentation.') + flag = self._get_from_list( + 'Observation.requirements.flag', index=0, + current=None if current is None else current.flag) + reqts = caom2.Requirements(flag) if flag else None + self.logger.debug('End Requirement augmentation.') + return reqts + + def _get_target(self, current): + """ + Create a Target instance populated with available content information. + :return: Target + """ + self.logger.debug('Begin Target augmentation.') + name = self._get_from_list( + 'Observation.target.name', index=0, + current=None if current is None else current.name) + target_type = self._get_from_list( + 'Observation.target.type', index=0, + current=None if current is None else current.target_type) + standard = self._cast_as_bool(self._get_from_list( + 'Observation.target.standard', index=0, + current=None if current is None else current.standard)) + redshift = self._get_from_list( + 'Observation.target.redshift', index=0, + current=None if current is None else current.redshift) + keywords = self._get_set_from_list( + 'Observation.target.keywords', index=0) + moving = self._cast_as_bool( + self._get_from_list( + 'Observation.target.moving', index=0, + current=None if current is None else current.moving)) + target_id = _to_str(self._get_from_list( + 'Observation.target.targetID', index=0, + current=None if current is None else current.target_id)) + target = None + if name: + target = caom2.Target(str(name), target_type, standard, redshift, + moving=moving, target_id=target_id) + ContentParser._add_keywords(keywords, current, target) + self.logger.debug('End Target augmentation.') + return target + + def _get_target_position(self, current): + """ + Create a Target Position instance populated with available content + information. + :return: Target Position + """ + self.logger.debug('Begin CAOM2 TargetPosition augmentation.') + x = self._get_from_list( + 'Observation.target_position.point.cval1', index=0, + current=None if current is None else current.coordinates.cval1) + y = self._get_from_list( + 'Observation.target_position.point.cval2', index=0, + current=None if current is None else current.coordinates.cval2) + coordsys = self._get_from_list( + 'Observation.target_position.coordsys', index=0, + current=None if current is None else current.coordsys) + equinox = self._get_from_list( + 'Observation.target_position.equinox', index=0, + current=None if current is None else current.equinox) + aug_target_position = None + if x and y: + aug_point = caom2.Point(x, y) + aug_target_position = caom2.TargetPosition(aug_point, coordsys) + aug_target_position.equinox = _to_float(equinox) + self.logger.debug('End CAOM2 TargetPosition augmentation.') + return aug_target_position + + def _get_telescope(self, current): + """ + Create a Telescope instance populated with available content + information. + :return: Telescope + """ + self.logger.debug('Begin Telescope augmentation.') + name = self._get_from_list( + 'Observation.telescope.name', index=0, + current=None if current is None else current.name) + geo_x = _to_float( + self._get_from_list( + 'Observation.telescope.geoLocationX', index=0, + current=None if current is None else current.geo_location_x)) + geo_y = _to_float( + self._get_from_list( + 'Observation.telescope.geoLocationY', index=0, + current=None if current is None else current.geo_location_y)) + geo_z = _to_float( + self._get_from_list( + 'Observation.telescope.geoLocationZ', index=0, + current=None if current is None else current.geo_location_z)) + keywords = self._get_set_from_list( + 'Observation.telescope.keywords', index=0) + aug_tel = None + if name: + aug_tel = caom2.Telescope(str(name), geo_x, geo_y, geo_z) + ContentParser._add_keywords(keywords, current, aug_tel) + self.logger.debug('End Telescope augmentation.') + return aug_tel + + def _cast_as_bool(self, from_value): + """ + Make lower case Java booleans into capitalized python booleans. + :param from_value: Something that represents a boolean value + :return: a python boolean value + """ + if isinstance(from_value, bool): + return from_value + result = None + # so far, these are the only options that are coming in from the + # config files - may need to add more as more types are experienced + if from_value == 'false': + result = False + elif from_value == 'true': + result = True + return result + + def _try_custom_with_blueprint(self, chunk, index): + """ + A mechanism to augment the Custom WCS completely from the blueprint. Do nothing if the WCS information cannot + be correctly created. + + :param chunk: The chunk to modify with the addition of custom information. + :param index: The index in the blueprint for looking up plan information. + """ + self.logger.debug('Begin augmentation with blueprint for custom.') + aug_naxis, aug_naxis_index = self._get_axis_wcs('custom', chunk.custom, index) + if aug_naxis is None: + self.logger.debug('No blueprint custom information.') + else: + # always create a new CustomWCS instance because there's no setter for 'axis' parameter + chunk.custom = caom2.CustomWCS(aug_naxis) + chunk.custom_axis = aug_naxis_index + self.logger.debug(f'Updating CustomWCS for {self.uri}.') + self.logger.debug('End augmentation with blueprint for custom.') + + def _try_energy_with_blueprint(self, chunk, index): + """ + A mechanism to augment the Energy WCS completely from the blueprint. + Do nothing if the WCS information cannot be correctly created. + + :param chunk: The chunk to modify with the addition of energy + information. + :param index: The index in the blueprint for looking up plan + information. + """ + self.logger.debug('Begin augmentation with blueprint for energy.') + aug_axis, aug_naxis_index = self._get_axis_wcs('energy', chunk.energy, index) + specsys = _to_str(self._get_from_list('Chunk.energy.specsys', index)) + if aug_axis is None: + self.logger.debug('No blueprint energy information.') + else: + if chunk.energy: + chunk.energy.axis = aug_axis + chunk.energy.specsys = specsys + else: + chunk.energy = caom2.SpectralWCS(aug_axis, specsys) + self.logger.debug(f'Creating SpectralWCS for {self.uri} from blueprint') + chunk.energy_axis = aug_naxis_index + + if chunk.energy: + chunk.energy.ssysobs = self._get_from_list('Chunk.energy.ssysobs', index, chunk.energy.ssysobs) + chunk.energy.restfrq = self._get_from_list('Chunk.energy.restfrq', index, chunk.energy.restfrq) + chunk.energy.restwav = self._get_from_list('Chunk.energy.restwav', index, chunk.energy.restwav) + chunk.energy.velosys = self._get_from_list('Chunk.energy.velosys', index, chunk.energy.velosys) + chunk.energy.zsource = self._get_from_list('Chunk.energy.zsource', index, chunk.energy.zsource) + chunk.energy.ssyssrc = self._get_from_list('Chunk.energy.ssyssrc', index, chunk.energy.ssyssrc) + chunk.energy.velang = self._get_from_list('Chunk.energy.velang', index, chunk.energy.velang) + chunk.energy.bandpass_name = self._get_from_list( + 'Chunk.energy.bandpassName', index, chunk.energy.bandpass_name) + chunk.energy.transition = self._get_energy_transition(chunk.energy.transition) + chunk.energy.resolving_power = _to_float( + self._get_from_list('Chunk.energy.resolvingPower', index, chunk.energy.resolving_power)) + self.logger.debug('End augmentation with blueprint for energy.') + + def _try_observable_with_blueprint(self, chunk, index): + """ + A mechanism to augment the Observable WCS completely from the + blueprint. Do nothing if the WCS information cannot be correctly + created. + + :param chunk: The chunk to modify with the addition of observable + information. + :param index: The index in the blueprint for looking up plan + information. + """ + self.logger.debug('Begin augmentation with blueprint for ' + 'observable.') + aug_axis = self._two_param_constructor( + 'Chunk.observable.dependent.axis.ctype', + 'Chunk.observable.dependent.axis.cunit', index, _to_str, caom2.Axis) + aug_bin = _to_int( + self._get_from_list('Chunk.observable.dependent.bin', index)) + if aug_axis is not None and aug_bin is not None: + chunk.observable = caom2.ObservableAxis(caom2.Slice(aug_axis, aug_bin)) + chunk.observable_axis = _to_int(self._get_from_list('Chunk.observableAxis', index)) + self.logger.debug('End augmentation with blueprint for polarization.') + + def _try_polarization_with_blueprint(self, chunk, index): + """ + A mechanism to augment the Polarization WCS completely from the + blueprint. Do nothing if the WCS information cannot be correctly + created. + + :param chunk: The chunk to modify with the addition of polarization + information. + :param index: The index in the blueprint for looking up plan + information. + """ + self.logger.debug('Begin augmentation with blueprint for ' + 'polarization.') + aug_axis, aug_naxis_index = self._get_axis_wcs('polarization', chunk.polarization, index) + if aug_axis is not None: + if chunk.polarization: + chunk.polarization.axis = aug_axis + else: + chunk.polarization = caom2.PolarizationWCS(aug_axis) + self.logger.debug(f'Creating PolarizationWCS for {self.uri} from blueprint') + chunk.polarization_axis = aug_naxis_index + + self.logger.debug('End augmentation with blueprint for polarization.') + + def _try_position_range(self, index): + self.logger.debug('Try to set the range for position from blueprint, since there is no function') + aug_range = None + aug_range_c1_start = self._two_param_constructor( + 'Chunk.position.axis.range.start.coord1.pix', + 'Chunk.position.axis.range.start.coord1.val', + index, _to_float, caom2.RefCoord) + aug_range_c1_end = self._two_param_constructor( + 'Chunk.position.axis.range.end.coord1.pix', + 'Chunk.position.axis.range.end.coord1.val', + index, _to_float, caom2.RefCoord) + aug_range_c2_start = self._two_param_constructor( + 'Chunk.position.axis.range.start.coord2.pix', + 'Chunk.position.axis.range.start.coord2.val', + index, _to_float, caom2.RefCoord) + aug_range_c2_end = self._two_param_constructor( + 'Chunk.position.axis.range.end.coord2.pix', + 'Chunk.position.axis.range.end.coord2.val', + index, _to_float, caom2.RefCoord) + if (aug_range_c1_start and aug_range_c1_end and aug_range_c2_start + and aug_range_c2_end): + aug_range = caom2.CoordRange2D( + caom2.Coord2D(aug_range_c1_start, aug_range_c1_end), + caom2.Coord2D(aug_range_c2_start, aug_range_c2_end)) + self.logger.debug('Completed setting range for position') + return aug_range + + def _try_position_with_blueprint(self, chunk, index): + """ + A mechanism to augment the Position WCS completely from the blueprint. + Do nothing if the WCS information cannot be correctly created. + + :param chunk: The chunk to modify with the addition of position + information. + :param index: The index in the blueprint for looking up plan + information. + """ + self.logger.debug('Begin augmentation with blueprint for position.') + aug_axis = None + if (chunk.position is not None and chunk.position.axis is not None and chunk.position.axis.axis1 is not None + and chunk.position.axis.axis2 is not None): + # preserve the values obtained from file data + aug_x_axis = chunk.position.axis.axis1 + aug_y_axis = chunk.position.axis.axis2 + aug_x_error = chunk.position.axis.error1 + aug_y_error = chunk.position.axis.error2 + else: + aug_x_axis = self._two_param_constructor( + 'Chunk.position.axis.axis1.ctype', + 'Chunk.position.axis.axis1.cunit', index, _to_str, caom2.Axis) + aug_y_axis = self._two_param_constructor( + 'Chunk.position.axis.axis2.ctype', + 'Chunk.position.axis.axis2.cunit', index, _to_str, caom2.Axis) + aug_x_error = self._two_param_constructor( + 'Chunk.position.axis.error1.syser', + 'Chunk.position.axis.error1.rnder', index, _to_float, caom2.CoordError) + aug_y_error = self._two_param_constructor( + 'Chunk.position.axis.error2.syser', + 'Chunk.position.axis.error2.rnder', index, _to_float, caom2.CoordError) + aug_range = self._try_position_range(index) + if aug_range is None: + if chunk.position is None or chunk.position.axis is None or chunk.position.axis.function is None: + aug_dimension = self._two_param_constructor( + 'Chunk.position.axis.function.dimension.naxis1', + 'Chunk.position.axis.function.dimension.naxis2', + index, _to_int, caom2.Dimension2D) + aug_x_ref_coord = self._two_param_constructor( + 'Chunk.position.axis.function.refCoord.coord1.pix', + 'Chunk.position.axis.function.refCoord.coord1.val', + index, _to_float, caom2.RefCoord) + aug_y_ref_coord = self._two_param_constructor( + 'Chunk.position.axis.function.refCoord.coord2.pix', + 'Chunk.position.axis.function.refCoord.coord2.val', + index, _to_float, caom2.RefCoord) + aug_cd11 = _to_float(self._get_from_list('Chunk.position.axis.function.cd11', index)) + aug_cd12 = _to_float(self._get_from_list('Chunk.position.axis.function.cd12', index)) + aug_cd21 = _to_float(self._get_from_list('Chunk.position.axis.function.cd21', index)) + aug_cd22 = _to_float(self._get_from_list('Chunk.position.axis.function.cd22', index)) + + aug_ref_coord = None + if aug_x_ref_coord is not None and aug_y_ref_coord is not None: + aug_ref_coord = caom2.Coord2D(aug_x_ref_coord, aug_y_ref_coord) + self.logger.debug(f'Creating position Coord2D for {self.uri}') + + aug_function = None + if (aug_dimension is not None and aug_ref_coord is not None and + aug_cd11 is not None and aug_cd12 is not None and + aug_cd21 is not None and aug_cd22 is not None): + aug_function = caom2.CoordFunction2D(aug_dimension, aug_ref_coord, aug_cd11, aug_cd12, aug_cd21, + aug_cd22) + self.logger.debug(f'Creating position CoordFunction2D for {self.uri}') + + if (aug_x_axis is not None and aug_y_axis is not None and + aug_function is not None): + aug_axis = caom2.CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, + aug_y_error, None, None, aug_function) + self.logger.debug(f'Creating position CoordAxis2D for {self.uri}') + + chunk.position_axis_1 = _to_int(self._get_from_list('Chunk.positionAxis1', index)) + chunk.position_axis_2 = _to_int(self._get_from_list('Chunk.positionAxis2', index)) + else: + aug_axis = caom2.CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, aug_y_error, range=aug_range) + + if aug_axis is not None: + if chunk.position: + chunk.position.axis = aug_axis + else: + chunk.position = caom2.SpatialWCS(aug_axis) + self.logger.debug(f'Creating SpatialWCS for {self.uri} from blueprint') + + if chunk.position: + chunk.position.coordsys = self._get_from_list('Chunk.position.coordsys', index, chunk.position.coordsys) + chunk.position.equinox = _to_float(self._get_from_list( + 'Chunk.position.equinox', index, chunk.position.equinox)) + chunk.position.resolution = self._get_from_list( + 'Chunk.position.resolution', index, chunk.position.resolution) + self.logger.debug('End augmentation with blueprint for position.') + + def _try_range(self, index, lookup): + self.logger.debug(f'Try to set the range for {lookup}') + result = None + aug_range_start = self._two_param_constructor( + f'Chunk.{lookup}.axis.range.start.pix', + f'Chunk.{lookup}.axis.range.start.val', + index, _to_float, caom2.RefCoord) + aug_range_end = self._two_param_constructor( + f'Chunk.{lookup}.axis.range.end.pix', + f'Chunk.{lookup}.axis.range.end.val', + index, _to_float, caom2.RefCoord) + if aug_range_start and aug_range_end: + result = caom2.CoordRange1D(aug_range_start, aug_range_end) + self.logger.debug(f'Completed setting range with return for {lookup}') + return result + + def _try_time_with_blueprint(self, chunk, index): + """ + A mechanism to augment the Time WCS completely from the blueprint. + Do nothing if the WCS information cannot be correctly created. + + :param chunk: The chunk to modify with the addition of time + information. + :param index: The index in the blueprint for looking up plan + information. + """ + self.logger.debug('Begin augmentation with blueprint for temporal.') + + aug_axis, aug_axis_index = self._get_axis_wcs('time', chunk.time, index) + if aug_axis is not None: + if chunk.time: + chunk.time.axis = aug_axis + else: + chunk.time = caom2.TemporalWCS(aug_axis) + self.logger.debug(f'Creating TemporalWCS for {self.uri} from blueprint') + chunk.time_axis = aug_axis_index + + if chunk.time: + chunk.time.exposure = _to_float(self._get_from_list('Chunk.time.exposure', index, chunk.time.exposure)) + chunk.time.resolution = _to_float( + self._get_from_list('Chunk.time.resolution', index, chunk.time.resolution)) + chunk.time.timesys = _to_str(self._get_from_list('Chunk.time.timesys', index, chunk.time.timesys)) + chunk.time.trefpos = self._get_from_list('Chunk.time.trefpos', index, chunk.time.trefpos) + chunk.time.mjdref = self._get_from_list('Chunk.time.mjdref', index, chunk.time.mjdref) + + self.logger.debug('End augmentation with blueprint for temporal.') + + def _two_param_constructor(self, lookup1, lookup2, index, to_type, ctor): + """ + Helper function to build from the blueprint, a CAOM2 entity that + has two required parameters. + + :param lookup1: Blueprint lookup text for the first constructor + parameter. + :param lookup2: Blueprint lookup text for the second constructor + parameter. + :param index: Which index in the blueprint to do the lookup on. + :param to_type: Function to cast the blueprint value to a particular + type. + :param ctor: The constructor that has two parameters to build. + :return: The instance returned by the constructor, or None if any of + the values are undefined. + """ + param1 = to_type(self._get_from_list(lookup1, index)) + param2 = to_type(self._get_from_list(lookup2, index)) + new_object = None + if param1 is not None and param2 is not None: + new_object = ctor(param1, param2) + return new_object + + # TODO - is this the right implementation? + def add_parts(self, artifact, index=0): + result = False + if self.blueprint.has_chunk(index): + artifact.parts.add(caom2.Part(str(index))) + result = True + return result + + @staticmethod + def _add_keywords(keywords, current, to_set): + """ + Common code for adding keywords to a CAOM2 entity, capturing all + the weird metadata cases that happen at CADC. + + :param keywords: Keywords to add to a CAOM2 set. + :param current: Existing CAOM2 entity with a keywords attribute. + :param to_set: A CAOM2 entity with a keywords attribute. + """ + if keywords: + if isinstance(keywords, set): + to_set.keywords.update(keywords) + else: + for k in keywords.split(): + to_set.keywords.add(k) + else: + if current is not None: + # preserve the original value + to_set.keywords.update(current.keywords) + if to_set.keywords is not None and None in to_set.keywords: + to_set.keywords.remove(None) + if to_set.keywords is not None and 'none' in to_set.keywords: + to_set.keywords.remove('none') + + +class FitsParser(ContentParser): + """ + Parses a FITS file and extracts the CAOM2 related information which can + be used to augment an existing CAOM2 observation, plane or artifact. The + constructor takes either a FITS file as argument or a list of dictionaries + (FITS keyword=value) corresponding to each extension. + + The WCS-related keywords of the FITS file are consumed by the astropy.wcs + package which might display warnings with regards to compliance. + + Example 1: + parser = FitsParser(input = '/staging/700000o.fits.gz') + ... + # customize parser.headers by deleting, changing or adding attributes + + obs = Observation(collection='TEST', observation_id='700000', + algorithm='exposure') + plane = Plane(plane_id='700000-1') + obs.plane.add(plane) + + artifact = Artifact(uri='ad:CFHT/700000o.fits.gz', product_type='science', + release_type='data') + plane.artifacts.add(artifact) + + parser.augment_observation(obs) + + # further update obs + + + Example 2: + + headers = [] # list of dictionaries headers + # populate headers + parser = FitsParser(input=headers) + + parser.augment_observation(obs) + ... + + """ + + def __init__(self, src, obs_blueprint=None, uri=None): + """ + Ctor + :param src: List of headers (dictionary of FITS keywords:value) with + one header for each extension or a FITS input file. + :param obs_blueprint: externally provided blueprint + :param uri: which artifact augmentation is based on + """ + self.logger = logging.getLogger(__name__) + self._headers = [] + self.parts = 0 + self.file = '' + if isinstance(src, list): + # assume this is the list of headers + self._headers = src + else: + # assume file + self.file = src + self._headers = data_util.get_local_headers_from_fits(self.file) + if obs_blueprint: + self._blueprint = obs_blueprint + else: + self._blueprint = ObsBlueprint() + self._errors = [] + # for command-line parameter to module execution + self.uri = uri + self.apply_blueprint() + + @property + def headers(self): + """ + List of headers where each header should allow dictionary like + access to the FITS attribute in that header + :return: + """ + return self._headers + + def add_parts(self, artifact, index): + # there is one Part per extension, the name is the extension number + if ( + FitsParser._has_data_array(self._headers[index]) + and self.blueprint.has_chunk(index) + ): + if str(index) not in artifact.parts.keys(): + # TODO use extension name? + artifact.parts.add(caom2.Part(str(index))) + self.logger.debug(f'Part created for HDU {index}.') + result = True + else: + artifact.parts.add(caom2.Part(str(index))) + self.logger.debug(f'Create empty part for HDU {index}') + result = False + return result + + def apply_blueprint(self): + + # pointers that are short to type + exts = self.blueprint._extensions + wcs_std = self.blueprint._wcs_std + plan = self.blueprint._plan + + # firstly, apply the functions + if (self.blueprint._module is not None or + self.blueprint._module_instance is not None): + for key, value in plan.items(): + if ObsBlueprint.is_function(value): + if self._blueprint._module_instance is None: + plan[key] = self._execute_external(value, key, 0) + else: + plan[key] = self._execute_external_instance( + value, key, 0) + for extension in exts: + for key, value in exts[extension].items(): + if ObsBlueprint.is_function(value): + if self._blueprint._module_instance is None: + exts[extension][key] = self._execute_external( + value, key, extension) + else: + exts[extension][key] = \ + self._execute_external_instance( + value, key, extension) + + # apply overrides from blueprint to all extensions + for key, value in plan.items(): + if key in wcs_std: + if ObsBlueprint.needs_lookup(value): + # alternative attributes provided for standard wcs attrib. + for header in self.headers: + for v in value[0]: + if v in header and \ + v not in wcs_std[key].split(','): + keywords = wcs_std[key].split(',') + for keyword in keywords: + _set_by_type(header, keyword, + str(header[v])) + elif ObsBlueprint.is_function(value): + continue + elif ObsBlueprint.has_no_value(value): + continue + else: + # value provided for standard wcs attribute + if ObsBlueprint.needs_lookup(wcs_std[key]): + keywords = wcs_std[key][0] + elif ObsBlueprint.is_function(wcs_std[key]): + continue + else: + keywords = wcs_std[key].split(',') + for keyword in keywords: + for header in self.headers: + _set_by_type(header, keyword, str(value)) + + # apply overrides to the remaining extensions + for extension in exts: + if extension >= len(self.headers): + logging.error('More extensions configured {} than headers ' + '{}'.format(extension, len(self.headers))) + continue + hdr = self.headers[extension] + for key, value in exts[extension].items(): + if ObsBlueprint.is_table(value): + continue + keywords = wcs_std[key].split(',') + for keyword in keywords: + _set_by_type(hdr, keyword, value) + logging.debug( + '{}: set to {} in extension {}'.format(keyword, value, + extension)) + # apply defaults to all extensions + for key, value in plan.items(): + if ObsBlueprint.has_default_value(value): + for index, header in enumerate(self.headers): + for keywords in value[0]: + for keyword in keywords.split(','): + if (not header.get(keyword.strip()) and + keyword == keywords and # checking a string + keywords == value[0][-1]): # last item + # apply a default if a value does not already + # exist, and all possible values of + # keywords have been checked + _set_by_type(header, keyword.strip(), value[1]) + logging.debug( + '{}: set default value of {} in HDU {}.'. + format(keyword, value[1], index)) + + # TODO wcs in astropy ignores cdelt attributes when it finds a cd + # attribute even if it's in a different axis + for header in self.headers: + cd_present = False + for i in range(1, 6): + if 'CD{0}_{0}'.format(i) in header: + cd_present = True + break + if cd_present: + for i in range(1, 6): + if f'CDELT{i}' in header and \ + 'CD{0}_{0}'.format(i) not in header: + header['CD{0}_{0}'.format(i)] = \ + header[f'CDELT{i}'] + + # TODO When a projection is specified, wcslib expects corresponding + # DP arguments with NAXES attributes. Normally, omitting the attribute + # signals no distortion which is the assumption in caom2blueprint for + # energy and polarization axes. Following is a workaround for + # SIP projections. + # For more details see: + # http://www.atnf.csiro.au/people/mcalabre/WCS/dcs_20040422.pdf + for header in self.headers: + sip = False + for i in range(1, 6): + if ((f'CTYPE{i}' in header) and + isinstance(header[f'CTYPE{i}'], str) and + ('-SIP' in header[f'CTYPE{i}'])): + sip = True + break + if sip: + for i in range(1, 6): + if (f'CTYPE{i}' in header) and \ + ('-SIP' not in header[f'CTYPE{i}']) and \ + (f'DP{i}' not in header): + header[f'DP{i}'] = 'NAXES: 1' + + return + + def augment_artifact(self, artifact, index=0): + """ + Augments a given CAOM2 artifact with available FITS information + :param artifact: existing CAOM2 artifact to be augmented + """ + self.logger.debug( + 'Begin artifact augmentation for {} with {} HDUs.'.format( + artifact.uri, len(self.headers))) + + if self.blueprint.get_configed_axes_count() == 0: + raise TypeError( + 'No WCS Data. End artifact augmentation for {}.'.format( + artifact.uri)) + + for i, header in enumerate(self.headers): + if not self.add_parts(artifact, i): + # artifact-level attributes still require updating + BlueprintParser.augment_artifact(self, artifact, 0) + continue + self._wcs_parser = FitsWcsParser(header, self.file, str(i)) + super().augment_artifact(artifact, i) + + self.logger.debug( + f'End artifact augmentation for {artifact.uri}.') + + def _get_chunk_naxis(self, chunk, index=None): + # NOTE: astropy.wcs does not distinguished between WCS axes and + # data array axes. naxis in astropy.wcs represents in fact the + # number of WCS axes, whereas chunk.axis represents the naxis + # of the data array. Solution is to determine it directly from + # the header + if 'ZNAXIS' in self._headers[index]: + chunk.naxis = _to_int(self._headers[index]['ZNAXIS']) + elif 'NAXIS' in self._headers[index]: + chunk.naxis = _to_int(self._headers[index]['NAXIS']) + else: + super()._get_chunk_naxis(chunk) + + def _get_from_list(self, lookup, index, current=None): + value = None + try: + keys = self.blueprint._get(lookup) + except KeyError: + self.add_error(lookup, sys.exc_info()[1]) + self.logger.debug( + f'Could not find {lookup!r} in caom2blueprint configuration.') + if current: + self.logger.debug( + f'{lookup}: using current value of {current!r}.') + value = current + return value + + if ObsBlueprint.needs_lookup(keys): + for ii in keys[0]: + try: + value = self.headers[index].get(ii) + if value: + self.logger.debug( + f'{lookup}: assigned value {value} based on ' + f'keyword {ii}.') + break + except (KeyError, IndexError): + if keys[0].index(ii) == len(keys[0]) - 1: + self.add_error(lookup, sys.exc_info()[1]) + # assign a default value, if one exists + if keys[1]: + if current is None: + value = keys[1] + self.logger.debug( + f'{lookup}: assigned default value {value}.') + else: + value = current + if value is None: + # checking current does not work in the general case, + # because current might legitimately be 'None' + if self._blueprint.update: + if ( + current is not None + or (current is None and isinstance(value, bool)) + ): + value = current + self.logger.debug( + f'{lookup}: used current value {value}.') + else: + # assign a default value, if one exists + if keys[1]: + if current is None: + value = keys[1] + self.logger.debug( + f'{lookup}: assigned default value {value}.') + else: + value = current + + elif (keys is not None) and (keys != ''): + if keys == 'None': + value = None + else: + value = keys + elif current: + value = current + + self.logger.debug(f'{lookup}: value is {value}') + return value + + def _get_from_table(self, lookup, extension): + """ + Return a space-delimited list of all the row values from a column. + + This is a straight FITS BINTABLE lookup. There is no support for + default values. Unless someone provides a compelling use case. + + :param lookup: where to find the column name + :param extension: which extension + :return: A string, which is a space-delimited list of all the values. + """ + value = '' + try: + keywords = self.blueprint._get(lookup, extension) + except KeyError as e: + self.add_error(lookup, sys.exc_info()[1]) + self.logger.debug( + 'Could not find {!r} in fits2caom2 configuration.'.format( + lookup)) + raise e + + if isinstance(keywords, tuple) and keywords[0] == 'BINTABLE': + + # BINTABLE, so need to retrieve the data from the file + if self.file is not None and self.file != '': + with fits.open(self.file) as fits_data: + if fits_data[extension].header['XTENSION'] != 'BINTABLE': + raise ValueError( + 'Got {} when looking for a BINTABLE ' + 'extension.'.format( + fits_data[extension].header['XTENSION'])) + for ii in keywords[1]: + for jj in fits_data[extension].data[keywords[2]][ii]: + value = f'{jj} {value}' + + self.logger.debug(f'{lookup}: value is {value}') + return value + + def _get_set_from_list(self, lookup, index): + value = None + keywords = None + try: + keywords = self.blueprint._get(lookup) + except KeyError: + self.add_error(lookup, sys.exc_info()[1]) + self.logger.debug(f'Could not find \'{lookup}\' in caom2blueprint ' + f'configuration.') + + if isinstance(keywords, tuple): + for ii in keywords[0]: + try: + value = self.headers[index].get(ii) + break + except KeyError: + self.add_error(lookup, sys.exc_info()[1]) + if keywords[1]: + value = keywords[1] + self.logger.debug( + '{}: assigned default value {}.'.format(lookup, + value)) + elif keywords: + value = keywords + self.logger.debug(f'{lookup}: assigned value {value}.') + + return value + + @staticmethod + def _has_data_array(header): + """ + + :param header: + :return: + """ + naxis = 0 + if 'ZNAXIS' in header: + naxis = _to_int(header['ZNAXIS']) + elif 'NAXIS' in header: + naxis = _to_int(header['NAXIS']) + if not naxis: + return False + + data_axes = 0 + for i in range(1, naxis + 1): + axis = f'NAXIS{i}' + if axis in header: + data_axis = _to_int(header[axis]) + if not data_axes: + data_axes = data_axis + else: + data_axes = data_axes * data_axis + if not data_axes: + return False + + bitpix = 0 + if 'BITPIX' in header: + bitpix = _to_int(header['BITPIX']) + if not bitpix: + return False + return True + + +class Hdf5Parser(ContentParser): + """ + Parses an HDF5 file and extracts the CAOM2 related information which + can be used to augment an existing CAOM2 observation, plane, or artifact. + + If there is per-Chunk metadata in the file, the constructor parameter + 'find_roots_here' is the address location in the file where the N Chunk + metadata starts. + + The WCS-related keywords of the HDF5 files are used to create instances of + astropy.wcs.WCS so that verify might be called. + + There is no CADC support for the equivalent of the FITS --fhead parameter + for HDF5 files, which is why the name of the file on a local disk is + required. + + How the classes work together for HDF5 files: + - build an HDF5ObsBlueprint, with _CAOM2_ELEMENT keys, and HDF5 metadata + path names as keys + - cache the metadata from an HDF5 file in the HDF5ObsBlueprint. This + caching is done in the "apply_blueprint_from_file" method in the + Hdf5Parser class, and replaces the path names in the blueprint with + the values from the HDF5 file. The caching is done so that all HDF5 + file access is isolated to one point in time. + - use the cached metadata to build astropy.wcs instances for verification + in Hdf5WcsParser. + - use the astropy.wcs instance and other blueprint metadata to fill the + CAOM2 record. + """ + + def __init__( + self, obs_blueprint, uri, h5_file, find_roots_here='sitedata' + ): + """ + :param obs_blueprint: Hdf5ObsBlueprint instance + :param uri: which artifact augmentation is based on + :param h5_file: h5py file handle + :param find_roots_here: str location where Chunk metadata starts + """ + self._file = h5_file + # where N Chunk metadata starts + self._find_roots_here = find_roots_here + # the length of the array is the number of Parts in an HDF5 file, + # and the values are HDF5 lookup path names. + self._extension_names = [] + super().__init__(obs_blueprint, uri) + # used to set the astropy wcs info, resulting in a validated wcs + # that can be used to construct a valid CAOM2 record + self._wcs_parser = None + + def apply_blueprint_from_file(self): + """ + Retrieve metadata from file, cache in the blueprint. + """ + self.logger.debug('Begin apply_blueprint_from_file') + # h5py is an extra in this package since most collections do not + # require it + import h5py + individual, multi, attributes = self._extract_path_names_from_blueprint() + filtered_individual = [ii for ii in individual.keys() if '(' in ii] + + def _extract_from_item(name, object): + """ + Function signature dictated by h5py visititems implementation. + Executed for each dataset/group in an HDF5 file. + + :param name: fully-qualified HDF5 path name + :param object: what the HDF5 path name points to + """ + if name == self._find_roots_here: + for ii, path_name in enumerate(object.keys()): + # store the names and locations of the Part/Chunk metadata + temp = f'{name}/{path_name}' + self.logger.debug(f'Adding extension {temp}') + self._extension_names.append(temp) + self._blueprint._extensions[ii] = {} + + # If it's the Part/Chunk metadata, capture it to extensions. + # Syntax of the keys described in Hdf5ObsBlueprint class. + for part_index, part_name in enumerate(self._extension_names): + if ( + name.startswith(part_name) + and isinstance(object, h5py.Dataset) + and object.dtype.names is not None + ): + for d_name in object.dtype.names: + temp_path = f'{name.replace(part_name, "")}/{d_name}' + for path_name in multi.keys(): + if path_name == temp_path: + for jj in multi.get(path_name): + self._blueprint.set( + jj, object[d_name], part_index + ) + elif (path_name.startswith(temp_path) + and '(' in path_name): + z = path_name.split('(') + if ':' in z[1]: + a = z[1].split(')')[0].split(':') + if len(a) > 2: + raise NotImplementedError + for jj in multi.get(path_name): + self._blueprint.set( + jj, + object[d_name][int(a[0])][ + int(a[1])], + part_index, + ) + else: + index = int(z[1].split(')')[0]) + for jj in multi.get(path_name): + self._blueprint.set( + jj, + object[d_name][index], + part_index, + ) + + # if it's Observation/Plane/Artifact metadata, capture it to + # the base blueprint + if isinstance(object, h5py.Dataset): + if object.dtype.names is not None: + for d_name in object.dtype.names: + temp = f'//{name}/{d_name}' + if temp in individual.keys(): + for jj in individual.get(temp): + self._blueprint.set(jj, object[d_name], 0) + else: + for ind_path in filtered_individual: + if ind_path.startswith(temp): + z = ind_path.split('(') + index = int(z[1].split(')')[0]) + for jj in individual.get(ind_path): + self._blueprint.set(jj, object[d_name][index], 0) + + if len(individual) == 0 and len(multi) == 0: + # CFHT SITELLE + self.logger.debug(f'attrs for {self.uri}') + self._extract_from_attrs(attributes) + else: + # TAOSII + self.logger.debug(f'visititems for {self.uri}') + self._file.visititems(_extract_from_item) + self.logger.debug('Done apply_blueprint_from_file') + + def _extract_from_attrs(self, attributes): + # I don't currently see any way to have more than one Part, if relying on + # attrs for metadata + part_index = 0 + # v == list of blueprint keys + for k, v in attributes.items(): + if k in self._file.attrs: + value = self._file.attrs[k] + for entry in v: + self._blueprint.set(entry, value, part_index) + + def _extract_path_names_from_blueprint(self): + """ + :return: individual - a dictionary of lists, keys are unique path names for finding metadata once per file. + Values are _CAOM2_ELEMENT strings. + multiple - a dictionary of lists, keys are unique path names for finding metadata N times per file. Values + are _CAOM2_ELEMENT strings. + attributes - a dictionary of lists, keys reference expected content from the h5py.File().attrs data + structure and its keys. + """ + individual = defaultdict(list) + multi = defaultdict(list) + attributes = defaultdict(list) + for key, value in self._blueprint._plan.items(): + if ObsBlueprint.needs_lookup(value): + for ii in value[0]: + if ii.startswith('//'): + individual[ii].append(key) + elif ii.startswith('/'): + multi[ii].append(key) + else: + attributes[ii].append(key) + return individual, multi, attributes + + def apply_blueprint(self): + self.logger.debug('Begin apply_blueprint') + self.apply_blueprint_from_file() + + # after the apply_blueprint_from_file call, all the metadata from the + # file has been applied to the blueprint, so now do the bits that + # require no access to file content + + # pointers that are short to type + exts = self._blueprint._extensions + plan = self._blueprint._plan + + # apply the functions + if (self._blueprint._module is not None or + self._blueprint._module_instance is not None): + for key, value in plan.items(): + if ObsBlueprint.is_function(value): + if self._blueprint._module_instance is None: + plan[key] = self._execute_external(value, key, 0) + else: + plan[key] = self._execute_external_instance( + value, key, 0) + for extension in exts: + for key, value in exts[extension].items(): + if ObsBlueprint.is_function(value): + if self._blueprint._module_instance is None: + exts[extension][key] = self._execute_external( + value, key, extension) + else: + exts[extension][key] = \ + self._execute_external_instance( + value, key, extension) + + # blueprint already contains all the overrides, only need to make + # sure the overrides get applied to all the extensions + for extension in exts: + for key, value in exts[extension].items(): + if ( + ObsBlueprint.is_table(value) + # already been looked up + or ObsBlueprint.needs_lookup(value) + # already been executed + or ObsBlueprint.is_function(value) + # nothing to assign + or ObsBlueprint.has_no_value(value) + ): + continue + exts[extension][key] = value + self.logger.debug( + f'{key}: set to {value} in extension {extension}') + + # if no values have been set by file lookups, function execution, + # or applying overrides, apply defaults, including to all extensions + for key, value in plan.items(): + if ObsBlueprint.needs_lookup(value) and value[1]: + # there is a default value in the blueprint that can be used + for extension in exts: + q = exts[extension].get(key) + if q is None: + exts[extension][key] = value[1] + self.logger.debug( + f'Add {key} and assign default value of ' + f'{value[1]} in extension {extension}.') + elif ObsBlueprint.needs_lookup(value): + exts[extension][key] = value[1] + self.logger.debug( + f'{key}: set value to default of {value[1]} in ' + f'extension {extension}.') + plan[key] = value[1] + self.logger.debug(f'{key}: set value to default of {value[1]}') + + self.logger.debug('Done apply_blueprint') + return + + def augment_artifact(self, artifact, index=0): + self._wcs_parser = Hdf5WcsParser(self._blueprint, 0) + super().augment_artifact(artifact, 0) + for ii in range(1, len(self._blueprint._extensions)): + self._wcs_parser = Hdf5WcsParser(self._blueprint, ii) + super().augment_artifact(artifact, ii) + + def _get_chunk_naxis(self, chunk, index): + chunk.naxis = self._get_from_list('Chunk.naxis', index, chunk.naxis) + + def add_parts(self, artifact, index=0): + artifact.parts.add(caom2.Part(str(index))) + return True + + +def _set_by_type(header, keyword, value): + """astropy documentation says that the type of the second + parameter in the 'set' call is 'str', and then warns of expectations + for floating-point values when the code does that, so make float values + into floats, and int values into ints.""" + float_value = None + int_value = None + + try: + float_value = float(value) + except ValueError: + pass + + try: + int_value = int(value) + except ValueError: + pass + + if (float_value and not str(value).isdecimal() or + re.match(r'0\.0*', str(value))): + header.set(keyword, float_value) + elif int_value: + header.set(keyword, int_value) + else: + header.set(keyword, value) + + +def _to_checksum_uri(value): + if value is None: + return None + elif isinstance(value, caom2.ChecksumURI): + return value + else: + return caom2.ChecksumURI(value) diff --git a/caom2utils/caom2utils/tests/test_convert_from_java.py b/caom2utils/caom2utils/tests/test_convert_from_java.py index 8dd10cfd..96bed86f 100644 --- a/caom2utils/caom2utils/tests/test_convert_from_java.py +++ b/caom2utils/caom2utils/tests/test_convert_from_java.py @@ -66,7 +66,7 @@ # *********************************************************************** # -from caom2utils import ObsBlueprint +from caom2utils.blueprints import ObsBlueprint from caom2utils.legacy import ConvertFromJava, load_config, apply_java_config from caom2utils.legacy import _JAVA_CAOM2_CONFIG diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index 9eaacbc0..e248f3d7 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -70,14 +70,14 @@ from astropy.wcs import WCS as awcs from cadcutils import net from cadcdata import FileInfo -from caom2utils import FitsParser, FitsWcsParser, main_app, update_blueprint -from caom2utils import Hdf5Parser, Hdf5WcsParser, ContentParser -from caom2utils import Hdf5ObsBlueprint -from caom2utils import ObsBlueprint, BlueprintParser, gen_proc -from caom2utils import get_gen_proc_arg_parser, augment +from caom2utils import main_app, update_blueprint +from caom2utils.parsers import BlueprintParser, FitsParser, Hdf5Parser +from caom2utils.blueprints import Hdf5ObsBlueprint, ObsBlueprint +from caom2utils import augment, gen_proc, get_gen_proc_arg_parser from caom2utils.legacy import load_config from caom2utils.caom2blueprint import _visit, _load_plugin from caom2utils.caom2blueprint import _get_and_update_artifact_meta +from caom2utils.wcs_parsers import FitsWcsParser, Hdf5WcsParser from caom2 import ObservationWriter, SimpleObservation, Algorithm, Artifact, ProductType, ReleaseType, DataProductType from caom2 import get_differences, obs_reader_writer, ObservationReader, Chunk, ObservationIntentType, ChecksumURI diff --git a/caom2utils/caom2utils/tests/test_obs_blueprint.py b/caom2utils/caom2utils/tests/test_obs_blueprint.py index 2a1f8a62..bfe05d6c 100644 --- a/caom2utils/caom2utils/tests/test_obs_blueprint.py +++ b/caom2utils/caom2utils/tests/test_obs_blueprint.py @@ -66,7 +66,7 @@ # *********************************************************************** # -from caom2utils import ObsBlueprint +from caom2utils.blueprints import ObsBlueprint import pytest import sys diff --git a/caom2utils/caom2utils/wcs_parsers.py b/caom2utils/caom2utils/wcs_parsers.py new file mode 100644 index 00000000..11bc1d20 --- /dev/null +++ b/caom2utils/caom2utils/wcs_parsers.py @@ -0,0 +1,923 @@ +# *********************************************************************** +# ****************** CANADIAN ASTRONOMY DATA CENTRE ******************* +# ************* CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +# +# (c) 2024. (c) 2024. +# Government of Canada Gouvernement du Canada +# National Research Council Conseil national de recherches +# Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +# All rights reserved Tous droits réservés +# +# NRC disclaims any warranties, Le CNRC dénie toute garantie +# expressed, implied, or énoncée, implicite ou légale, +# statutory, of any kind with de quelque nature que ce +# respect to the software, soit, concernant le logiciel, +# including without limitation y compris sans restriction +# any warranty of merchantability toute garantie de valeur +# or fitness for a particular marchande ou de pertinence +# purpose. NRC shall not be pour un usage particulier. +# liable in any event for any Le CNRC ne pourra en aucun cas +# damages, whether direct or être tenu responsable de tout +# indirect, special or general, dommage, direct ou indirect, +# consequential or incidental, particulier ou général, +# arising from the use of the accessoire ou fortuit, résultant +# software. Neither the name de l'utilisation du logiciel. Ni +# of the National Research le nom du Conseil National de +# Council of Canada nor the Recherches du Canada ni les noms +# names of its contributors may de ses participants ne peuvent +# be used to endorse or promote être utilisés pour approuver ou +# products derived from this promouvoir les produits dérivés +# software without specific prior de ce logiciel sans autorisation +# written permission. préalable et particulière +# par écrit. +# +# This file is part of the Ce fichier fait partie du projet +# OpenCADC project. OpenCADC. +# +# OpenCADC is free software: OpenCADC est un logiciel libre ; +# you can redistribute it and/or vous pouvez le redistribuer ou le +# modify it under the terms of modifier suivant les termes de +# the GNU Affero General Public la “GNU Affero General Public +# License as published by the License” telle que publiée +# Free Software Foundation, par la Free Software Foundation +# either version 3 of the : soit la version 3 de cette +# License, or (at your option) licence, soit (à votre gré) +# any later version. toute version ultérieure. +# +# OpenCADC is distributed in the OpenCADC est distribué +# hope that it will be useful, dans l’espoir qu’il vous +# but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +# without even the implied GARANTIE : sans même la garantie +# warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +# or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +# PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +# General Public License for Générale Publique GNU Affero +# more details. pour plus de détails. +# +# You should have received Vous devriez avoir reçu une +# a copy of the GNU Affero copie de la Licence Générale +# General Public License along Publique GNU Affero avec +# with OpenCADC. If not, see OpenCADC ; si ce n’est +# . pas le cas, consultez : +# . +# +# Revision: 4 +# +# *********************************************************************** +# + +import logging +import math +import sys + +from astropy.wcs import SingularMatrixError, utils, Wcsprm, WCS +from caom2 import ( + Axis, Chunk, Coord2D, CoordAxis1D, CoordAxis2D, CoordError, CoordFunction1D, CoordFunction2D, CustomWCS, + Dimension2D, + ObservableAxis, PolarizationWCS, RefCoord, Slice, + SpatialWCS, SpectralWCS, + TemporalWCS +) +from caom2utils.blueprints import ObsBlueprint, _to_float, _to_int, _to_str + + +CUSTOM_CTYPES = [ + 'RM', + 'FDEP' +] + +POSITION_CTYPES = [ + ['RA', + 'GLON', + 'ELON', + 'HLON', + 'SLON'], + ['DEC', + 'GLAT', + 'ELAT', + 'HLAT', + 'SLAT'] +] + +ENERGY_CTYPES = [ + 'FREQ', + 'ENER', + 'WAVN', + 'VRAD', + 'WAVE', + 'VOPT', + 'ZOPT', + 'AWAV', + 'VELO', + 'BETA'] + +# From http://hea-www.cfa.harvard.edu/~arots/TimeWCS/ +TIME_KEYWORDS = [ + 'TIME', + 'TAI', + 'TT', + 'TDT', + 'ET', + 'IAT', + 'UT1', + 'UTC', + 'GMT', + 'GPS', + 'TCG', + 'TCB', + 'TDB', + 'LOCAL'] + +POLARIZATION_CTYPES = ['STOKES'] + +OBSERVABLE_CTYPES = [ + 'observable', + 'FLUX'] + + +class HDULoggingFilter(logging.Filter): + """Add the HDU number to logging messages as a default.""" + + def __init__(self): + super().__init__() + self._extension = -1 + + def filter(self, record): + record.hdu = self._extension + return True + + def extension(self, value): + self._extension = value + + +class WcsParser: + """ + WCS axes methods. + """ + + ENERGY_AXIS = 'energy' + POLARIZATION_AXIS = 'polarization' + TIME_AXIS = 'time' + + def __init__(self, blueprint, extension): + self._wcs = None + self.wcs = None + self._blueprint = blueprint + self._axes = { + 'ra': [0, False], + 'dec': [0, False], + 'time': [0, False], + 'energy': [0, False], + 'polarization': [0, False], + 'observable': [0, False], + 'custom': [0, False], + } + # int - index into blueprint._plan extensions + self._extension = extension + self.logger = logging.getLogger(self.__class__.__name__) + self._set_wcs() + + def _assign_cd(self, key, cd, count): + x = self._blueprint._get(key, self._extension) + if x is not None: + if ObsBlueprint.needs_lookup(x): + cd[count][count] = 1.0 + else: + cd[count][count] = x + + def assign_sanitize(self, assignee, index, key, sanitize=True): + """ + Do not want to blindly assign None to astropy.wcs attributes, so + use this method for conditional assignment. + + The current implementation is that if there is a legitimate need to + assign None to a value, either use 'set' in the Hdf5ObsBlueprint, and + specifically assign None, or execute a function to set it to None + conditionally. There will be no support for a Default value of None + with HDF5 files. + + By the time this method is called, if the value still passes the "ObsBlueprint.needs_lookup" + check, the value should be ignored for fulfilling the WCS needs of the record under construction. + """ + x = self._blueprint._get(key, self._extension) + if sanitize: + x = self._sanitize(x) + if x is not None and not ObsBlueprint.needs_lookup(x): + assignee[index] = x + + def _set_wcs(self): + num_axes = self._blueprint.get_configed_axes_count() + self._wcs = WCS(naxis=num_axes) + self.wcs = self._wcs.wcs + array_shape, crder, crpix, crval, csyer, ctype, cunit, temp = [[0] * num_axes for _ in range(8)] + cd = [temp.copy() for _ in range(num_axes)] + count = 0 + if self._blueprint._pos_axes_configed: + self._axes['ra'][1] = True + self._axes['dec'][1] = True + self._axes['ra'][0] = count + self._axes['dec'][0] = count + 1 + self.assign_sanitize(ctype, count, 'Chunk.position.axis.axis1.ctype') + self.assign_sanitize(ctype, count + 1, 'Chunk.position.axis.axis2.ctype') + self.assign_sanitize(cunit, count, 'Chunk.position.axis.axis1.cunit') + self.assign_sanitize(cunit, count + 1, 'Chunk.position.axis.axis2.cunit') + self.assign_sanitize(array_shape, count, 'Chunk.position.axis.function.dimension.naxis1') + self.assign_sanitize(array_shape, count + 1, 'Chunk.position.axis.function.dimension.naxis2') + self.assign_sanitize(crpix, count, 'Chunk.position.axis.function.refCoord.coord1.pix') + self.assign_sanitize(crpix, count + 1, 'Chunk.position.axis.function.refCoord.coord2.pix') + self.assign_sanitize(crval, count, 'Chunk.position.axis.function.refCoord.coord1.val') + self.assign_sanitize(crval, count + 1, 'Chunk.position.axis.function.refCoord.coord2.val') + x = self._blueprint._get('Chunk.position.axis.function.cd11', + self._extension) + if x is not None and not ObsBlueprint.needs_lookup(x): + cd[count][0] = x + x = self._blueprint._get('Chunk.position.axis.function.cd12', + self._extension) + if x is not None and not ObsBlueprint.needs_lookup(x): + cd[count][1] = x + x = self._blueprint._get('Chunk.position.axis.function.cd21', + self._extension) + if x is not None and not ObsBlueprint.needs_lookup(x): + cd[count + 1][0] = x + x = self._blueprint._get('Chunk.position.axis.function.cd22', + self._extension) + if x is not None and not ObsBlueprint.needs_lookup(x): + cd[count + 1][1] = x + self.assign_sanitize(crder, count, 'Chunk.position.axis.error1.rnder') + self.assign_sanitize(crder, count + 1, 'Chunk.position.axis.error2.rnder') + self.assign_sanitize(csyer, count, 'Chunk.position.axis.error1.syser') + self.assign_sanitize(csyer, count + 1, 'Chunk.position.axis.error2.syser') + count += 2 + if self._blueprint._time_axis_configed: + self._axes['time'][1] = True + self._axes['time'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.time.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.time.axis.axis.cunit', False) + self.assign_sanitize(array_shape, count, 'Chunk.time.axis.function.naxis', False) + self.assign_sanitize(crpix, count, 'Chunk.time.axis.function.refCoord.pix', False) + self.assign_sanitize(crval, count, 'Chunk.time.axis.function.refCoord.val', False) + self.assign_sanitize(crder, count, 'Chunk.time.axis.error.rnder') + self.assign_sanitize(csyer, count, 'Chunk.time.axis.error.syser') + self._assign_cd('Chunk.time.axis.function.delta', cd, count) + count += 1 + if self._blueprint._energy_axis_configed: + self._axes['energy'][1] = True + self._axes['energy'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.energy.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.energy.axis.axis.cunit', False) + self.assign_sanitize(array_shape, count, 'Chunk.energy.axis.function.naxis', False) + self.assign_sanitize(crpix, count, 'Chunk.energy.axis.function.refCoord.pix', False) + self.assign_sanitize(crval, count, 'Chunk.energy.axis.function.refCoord.val', False) + self.assign_sanitize(crder, count, 'Chunk.energy.axis.error.rnder') + self.assign_sanitize(csyer, count, 'Chunk.energy.axis.error.syser') + self._assign_cd('Chunk.energy.axis.function.delta', cd, count) + count += 1 + if self._blueprint._polarization_axis_configed: + self._axes['polarization'][1] = True + self._axes['polarization'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.polarization.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.polarization.axis.axis.cunit', False) + self.assign_sanitize(array_shape, count, 'Chunk.polarization.axis.function.naxis', False) + self.assign_sanitize(crpix, count, 'Chunk.polarization.axis.function.refCoord.pix', False) + self.assign_sanitize(crval, count, 'Chunk.polarization.axis.function.refCoord.val', False) + self._assign_cd('Chunk.polarization.axis.function.delta', cd, count) + count += 1 + if self._blueprint._obs_axis_configed: + self._axes['observable'][1] = True + self._axes['observable'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.observable.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.observable.axis.axis.cunit', False) + array_shape[count] = 1.0 + self.assign_sanitize(crpix, count, 'Chunk.observable.axis.function.refCoord.pix', False) + crval[count] = 0.0 + cd[count][count] = 1.0 + count += 1 + if self._blueprint._custom_axis_configed: + self._axes['custom'][1] = True + self._axes['custom'][0] = count + self.assign_sanitize(ctype, count, 'Chunk.custom.axis.axis.ctype', False) + self.assign_sanitize(cunit, count, 'Chunk.custom.axis.axis.cunit', False) + self.assign_sanitize(array_shape, count, 'Chunk.custom.axis.function.naxis', False) + self.assign_sanitize(crpix, count, 'Chunk.custom.axis.function.refCoord.pix', False) + self.assign_sanitize(crval, count, 'Chunk.custom.axis.function.refCoord.val', False) + self._assign_cd('Chunk.custom.axis.function.delta', cd, count) + count += 1 + + if not all(val == 0 for val in array_shape): + self._wcs.array_shape = array_shape + if not all(val == 0 for val in cunit): + self._wcs.wcs.cunit = cunit + if not all(val == 0 for val in ctype): + self._wcs.wcs.ctype = ctype + if not all(val == 0 for val in crpix): + self._wcs.wcs.crpix = crpix + if not all(val == 0 for val in crval): + self._wcs.wcs.crval = crval + if not all(val == 0 for val in crder): + self._wcs.wcs.crder = crder + if not all(val == 0 for val in csyer): + self._wcs.wcs.csyer = csyer + self._wcs.wcs.cd = cd + self._finish_position() + self._finish_time() + self._finish_energy() + + def augment_custom(self, chunk): + """ + Augments a chunk with custom WCS information + :param chunk: + :return: + """ + self.logger.debug('Begin Custom WCS augmentation.') + if chunk is None or not isinstance(chunk, Chunk): + raise ValueError(f'Chunk type mis-match for {chunk}.') + + custom_axis_index = self._get_axis_index(CUSTOM_CTYPES) + if custom_axis_index is None: + self.logger.debug('No WCS Custom info') + return + try: + custom_axis_length = self._get_axis_length(custom_axis_index + 1) + except ValueError: + self.logger.debug('No WCS Custom axis.function') + return + + if custom_axis_length: + chunk.custom_axis = custom_axis_index + 1 + naxis = CoordAxis1D(self._get_axis(custom_axis_index)) + if self.wcs.has_cd(): + delta = self.wcs.cd[custom_axis_index][custom_axis_index] + else: + delta = self.wcs.cdelt[custom_axis_index] + ref_coord = self._get_ref_coord(custom_axis_index) + if delta and ref_coord: + naxis.function = CoordFunction1D(custom_axis_length, delta, ref_coord) + if not chunk.custom: + chunk.custom = CustomWCS(naxis) + else: + chunk.custom.axis = naxis + + self.logger.debug('End Custom WCS augmentation.') + + def augment_energy(self, chunk): + """ + Augments the energy information in a chunk + :param chunk: + """ + self.logger.debug('Begin Energy WCS augmentation.') + if chunk is None or not isinstance(chunk, Chunk): + raise ValueError(f'Chunk type mis-match for {chunk}.') + + # get the energy axis + energy_axis_index = self._get_axis_index(ENERGY_CTYPES) + + if energy_axis_index is None: + self.logger.debug('No WCS Energy info.') + return + try: + energy_axis_length = self._get_axis_length(energy_axis_index + 1) + except ValueError: + self.logger.debug('No WCS Energy axis.function') + return + + if energy_axis_length: + chunk.energy_axis = energy_axis_index + 1 + naxis = CoordAxis1D(self._get_axis(energy_axis_index)) + naxis.error = self._get_coord_error(energy_axis_index) + if self.wcs.has_cd(): + delta = self.wcs.cd[energy_axis_index][energy_axis_index] + else: + delta = self.wcs.cdelt[energy_axis_index] + ref_coord = self._get_ref_coord(energy_axis_index) + if delta and ref_coord: + naxis.function = CoordFunction1D(energy_axis_length, delta, ref_coord) + + specsys = _to_str(self.wcs.specsys) + if not chunk.energy: + chunk.energy = SpectralWCS(naxis, specsys) + else: + chunk.energy.axis = naxis + chunk.energy.specsys = specsys + + chunk.energy.ssysobs = _to_str(self._sanitize(self.wcs.ssysobs)) + # wcs returns 0.0 by default + if self._sanitize(self.wcs.restfrq) != 0: + chunk.energy.restfrq = self._sanitize(self.wcs.restfrq) + if self._sanitize(self.wcs.restwav) != 0: + chunk.energy.restwav = self._sanitize(self.wcs.restwav) + chunk.energy.velosys = self._sanitize(self.wcs.velosys) + chunk.energy.zsource = self._sanitize(self.wcs.zsource) + chunk.energy.ssyssrc = _to_str(self._sanitize(self.wcs.ssyssrc)) + chunk.energy.velang = self._sanitize(self.wcs.velangl) + self.logger.debug('End Energy WCS augmentation.') + + def augment_position(self, chunk): + """ + Augments a chunk with spatial WCS information + :param chunk: + :return: + """ + self.logger.debug('Begin Spatial WCS augmentation.') + if chunk is None or not isinstance(chunk, Chunk): + raise ValueError(f'Chunk type mis-match for {chunk}.') + + position_axes_indices = self._get_position_axis() + if not position_axes_indices: + self.logger.debug('No Spatial WCS found') + return + + chunk.position_axis_1 = position_axes_indices[0] + chunk.position_axis_2 = position_axes_indices[1] + axis = self._get_spatial_axis(chunk.position_axis_1 - 1, + chunk.position_axis_2 - 1) + + if axis is None: + self.logger.debug('No WCS Position axis.function') + return + + if chunk.position: + chunk.position.axis = axis + else: + chunk.position = SpatialWCS(axis) + + chunk.position.coordsys = _to_str(self._sanitize(self.wcs.radesys)) + temp = self._sanitize(self.wcs.equinox) + if (temp is not None and 1800.0 <= temp <= 2500) or temp is None: + chunk.position.equinox = temp + + self._finish_chunk_position(chunk) + self.logger.debug('End Spatial WCS augmentation.') + + def augment_temporal(self, chunk): + """ + Augments a chunk with temporal WCS information + + :param chunk: + :return: + """ + self.logger.debug('Begin TemporalWCS augmentation.') + if chunk is None or not isinstance(chunk, Chunk): + raise ValueError(f'Chunk type mis-match for {chunk}.') + + time_axis_index = self._get_axis_index(TIME_KEYWORDS) + + if time_axis_index is None: + self.logger.debug('No WCS Time info.') + return + + chunk.time_axis = time_axis_index + 1 + # set chunk.time + self.logger.debug('Begin temporal axis augmentation.') + + try: + axis_length = self._get_axis_length(time_axis_index + 1) + except ValueError: + self.logger.debug('No WCS Temporal axis.function') + return + + if axis_length: + aug_naxis = self._get_axis(time_axis_index) + aug_error = self._get_coord_error(time_axis_index) + aug_ref_coord = self._get_ref_coord(time_axis_index) + if self.wcs.has_cd(): + delta = self.wcs.cd[time_axis_index][time_axis_index] + else: + delta = self.wcs.cdelt[time_axis_index] + if aug_ref_coord is not None: + aug_function = CoordFunction1D(axis_length, delta, aug_ref_coord) + naxis = CoordAxis1D(aug_naxis, aug_error, None, None, aug_function) + if not chunk.time: + chunk.time = TemporalWCS(naxis) + else: + chunk.time.axis = naxis + + self._finish_chunk_time(chunk) + self.logger.debug('End TemporalWCS augmentation.') + + def augment_polarization(self, chunk): + """ + Augments a chunk with polarization WCS information + :param chunk: + :return: + """ + self.logger.debug('Begin Polarization WCS augmentation.') + if chunk is None or not isinstance(chunk, Chunk): + raise ValueError(f'Chunk type mis-match for {chunk}.') + + polarization_axis_index = self._get_axis_index(POLARIZATION_CTYPES) + if polarization_axis_index is None: + self.logger.debug('No WCS Polarization info') + return + + try: + axis_length = self._get_axis_length(polarization_axis_index + 1) + except ValueError: + self.logger.debug('No WCS Polarization axis.function') + return + + if axis_length: + chunk.polarization_axis = polarization_axis_index + 1 + + naxis = CoordAxis1D(self._get_axis(polarization_axis_index)) + if self.wcs.has_cd(): + delta = self.wcs.cd[polarization_axis_index][polarization_axis_index] + else: + delta = self.wcs.cdelt[polarization_axis_index] + ref_coord = self._get_ref_coord(polarization_axis_index) + if delta and ref_coord: + naxis.function = CoordFunction1D(axis_length, delta, ref_coord) + if not chunk.polarization: + chunk.polarization = PolarizationWCS(naxis) + else: + chunk.polarization.axis = naxis + + self.logger.debug('End Polarization WCS augmentation.') + + def augment_observable(self, chunk): + """ + Augments a chunk with an observable axis. + + :param chunk: + :return: + """ + self.logger.debug('Begin Observable WCS augmentation.') + if chunk is None or not isinstance(chunk, Chunk): + raise ValueError(f'Chunk type mis-match for {chunk}.') + + observable_axis_index = self._get_axis_index(OBSERVABLE_CTYPES) + if observable_axis_index is None: + self.logger.debug('No Observable axis info') + return + + chunk.observable_axis = observable_axis_index + 1 + self._finish_chunk_observable(chunk) + self.logger.debug('End Observable WCS augmentation.') + + def _finish_chunk_observable(self, chunk): + self.logger.debug('Begin _finish_chunk_observable') + ctype = self._wcs.wcs.ctype[chunk.observable_axis-1] + cunit = self._wcs.wcs.ctype[chunk.observable_axis-1] + pix_bin = _to_int(self._wcs.wcs.crpix[chunk.observable_axis-1]) + if ctype is not None and cunit is not None and pix_bin is not None: + chunk.observable = ObservableAxis( + Slice(self._get_axis(0, ctype, cunit), pix_bin)) + self.logger.debug('End _finish_chunk_observable') + + def _finish_chunk_position(self, chunk): + self.logger.debug('Begin _finish_chunk_position') + if chunk.position.resolution is None: + try: + # JJK 30-01-23 + # In a spatial data chunk the resolution is 2 times the pixel size. We can get the pixel size from + # the wcs + temp = utils.proj_plane_pixel_scales(self._wcs) + chunk.position.resolution = temp[0] + except SingularMatrixError as e: + # cannot calculate position.resolution, ignore and continue on + self.logger.warning(f'Not calculating resolution due to {e}') + self.logger.debug('End _finish_chunk_position') + + def _finish_chunk_time(self, chunk): + self.logger.debug('Begin _finish_chunk_time') + if not math.isnan(self._wcs.wcs.xposure): + chunk.time.exposure = self._wcs.wcs.xposure + if self._wcs.wcs.timesys is not None and self._wcs.wcs.timesys != '': + chunk.time.timesys = self._wcs.wcs.timesys + if self._wcs.wcs.trefpos is not None and self._wcs.wcs.trefpos != '': + chunk.time.trefpos = self._wcs.wcs.trefpos + if self._wcs.wcs.mjdref is not None and self._wcs.wcs.mjdref[0] != '' and self._wcs.wcs.mjdref[0] != 0.0: + # the astropy value is an array of length 2, use the first value + chunk.time.mjdref = self._wcs.wcs.mjdref[0] + self.logger.debug('End _finish_chunk_time') + + def _finish_energy(self): + self.logger.debug('Begin _finish_energy') + if self._blueprint._energy_axis_configed: + x = self._blueprint._get('Chunk.energy.specsys', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.specsys = x + x = self._blueprint._get('Chunk.energy.ssysobs', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.ssysobs = x + x = self._blueprint._get('Chunk.energy.restfrq', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.restfrq = _to_float(x) + x = self._blueprint._get('Chunk.energy.restwav', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.restwav = x + x = self._blueprint._get('Chunk.energy.velosys', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.velosys = x + x = self._blueprint._get('Chunk.energy.zsource', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.zsource = x + x = self._blueprint._get('Chunk.energy.ssyssrc', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.ssyssrc = x + x = self._blueprint._get('Chunk.energy.velang', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.velangl = x + self.logger.debug('End _finish_energy') + + def _finish_position(self): + self.logger.debug('Begin _finish_position') + if self._blueprint._pos_axes_configed: + x = self._blueprint._get('Chunk.position.coordsys', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.radesys = x + x = self._blueprint._get('Chunk.position.equinox', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.equinox = _to_float(x) + self.logger.debug('End _finish_position') + + def _finish_time(self): + self.logger.debug('Begin _finish_time') + if self._blueprint._time_axis_configed: + x = self._blueprint._get('Chunk.time.exposure', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.xposure = _to_float(x) + x = self._blueprint._get('Chunk.time.timesys', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.timesys = x + x = self._blueprint._get('Chunk.time.trefpos', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.trefpos = x + x = self._blueprint._get('Chunk.time.mjdref', self._extension) + if x and not ObsBlueprint.needs_lookup(x): + self._wcs.wcs.mjdref = [x, x] + self.logger.debug('End _finish_time') + + def _get_axis(self, index, over_ctype=None, over_cunit=None): + """ Assemble a generic axis """ + aug_ctype = str(self.wcs.ctype[index]) if over_ctype is None \ + else over_ctype + aug_cunit = str(self.wcs.cunit[index]) if over_cunit is None \ + else over_cunit + if aug_cunit is not None and len(aug_cunit) == 0: + aug_cunit = None + aug_axis = Axis(aug_ctype, aug_cunit) + return aug_axis + + def _get_axis_index(self, keywords): + """ + Return the index of a specific axis type or None of it doesn't exist + :param keywords: + :return: + """ + axis = None + for i, elem in enumerate(self.wcs.ctype): + elem = elem.split('-')[0] + if elem in keywords: + axis = i + break + elif len(elem) == 0: + check = self.wcs.ctype[i] + if check in keywords: + axis = i + break + return axis + + def _get_axis_length(self, for_axis): + if self._wcs.array_shape is None: + return 0 + else: + if len(self._wcs.array_shape) == 1: + result = self._wcs.array_shape[0] + else: + result = self._wcs.array_shape[for_axis-1] + if isinstance(result, tuple): + # the blueprint is incompletely configured + raise ValueError(f'Could not find axis length for axis {for_axis}') + return _to_int(result) + + def _get_cd(self, x_index, y_index): + """ returns cd info""" + + try: + if self.wcs.has_cd(): + cd11 = self.wcs.cd[x_index][x_index] + cd12 = self.wcs.cd[x_index][y_index] + cd21 = self.wcs.cd[y_index][x_index] + cd22 = self.wcs.cd[y_index][y_index] + else: + cd11 = self.wcs.cdelt[x_index] + cd12 = self.wcs.crota[x_index] + cd21 = self.wcs.crota[y_index] + cd22 = self.wcs.cdelt[y_index] + except AttributeError: + self.logger.debug( + f'Error searching for CD* values {sys.exc_info()[1]}') + cd11 = None + cd12 = None + cd21 = None + cd22 = None + + return cd11, cd12, cd21, cd22 + + def _get_coord_error(self, index): + aug_coord_error = None + aug_csyer = self._sanitize(self.wcs.csyer[index]) + aug_crder = self._sanitize(self.wcs.crder[index]) + if aug_csyer is not None and aug_crder is not None: + aug_coord_error = CoordError(aug_csyer, aug_crder) + return aug_coord_error + + def _get_dimension(self, xindex, yindex): + aug_dimension = None + try: + xindex_axis_length = self._get_axis_length(xindex + 1) + yindex_axis_length = self._get_axis_length(yindex + 1) + except ValueError: + self.logger.debug('No WCS Energy axis.function') + return None + + if xindex_axis_length > 0 and yindex_axis_length > 0: + aug_dim1 = _to_int(xindex_axis_length) + aug_dim2 = _to_int(yindex_axis_length) + if aug_dim1 and aug_dim2: + aug_dimension = Dimension2D(aug_dim1, aug_dim2) + self.logger.debug('End 2D dimension augmentation.') + return aug_dimension + + def _get_position_axis(self): + # there are two celestial axes, get the applicable indices from + # the axis_types + xindex = self._get_axis_index(POSITION_CTYPES[0]) + yindex = self._get_axis_index(POSITION_CTYPES[1]) + + if (xindex is not None) and (yindex is not None): + return xindex + 1, yindex + 1 + elif (xindex is None) and (yindex is None): + return None + else: + raise ValueError('Found only one position axis ra/dec: {}/{} in ' + '{}'. + format(xindex, yindex, self.file)) + + def _get_ref_coord(self, index): + aug_crpix = _to_float(self._sanitize(self.wcs.crpix[index])) + aug_crval = _to_float(self._sanitize(self.wcs.crval[index])) + aug_ref_coord = None + if aug_crpix is not None and aug_crval is not None: + aug_ref_coord = RefCoord(aug_crpix, aug_crval) + return aug_ref_coord + + def _get_spatial_axis(self, xindex, yindex): + """Assemble the bits to make the axis parameter needed for + SpatialWCS construction.""" + aug_dimension = self._get_dimension(xindex, yindex) + if aug_dimension is None: + return None + + x_ref_coord = self._get_ref_coord(xindex) + y_ref_coord = self._get_ref_coord(yindex) + aug_ref_coord = None + if x_ref_coord and y_ref_coord: + aug_ref_coord = Coord2D(x_ref_coord, y_ref_coord) + + aug_cd11, aug_cd12, aug_cd21, aug_cd22 = \ + self._get_cd(xindex, yindex) + + if aug_dimension is not None and \ + aug_ref_coord is not None and \ + aug_cd11 is not None and \ + aug_cd12 is not None and \ + aug_cd21 is not None and \ + aug_cd22 is not None: + aug_function = CoordFunction2D(aug_dimension, aug_ref_coord, + aug_cd11, aug_cd12, + aug_cd21, aug_cd22) + self.logger.debug('End CoordFunction2D augmentation.') + else: + aug_function = None + + aug_axis = CoordAxis2D(self._get_axis(xindex), + self._get_axis(yindex), + self._get_coord_error(xindex), + self._get_coord_error(yindex), + None, None, aug_function) + self.logger.debug('End CoordAxis2D augmentation.') + return aug_axis + + def _sanitize(self, value): + """ + Sanitizes values from content to caom2 + :param value: + :return: + """ + if value is None: + return None + elif isinstance(value, float) and math.isnan(value): + return None + elif not str(value): + return None # empty string + else: + return value + + +class FitsWcsParser(WcsParser): + """ + Parser to augment chunks with positional, temporal, energy and polarization + information based on the WCS keywords in an extension of a FITS header. + + Note: Under the hood, this class uses the astropy.wcs package to parse the + header and any inconsistencies or missing keywords are reported back as + warnings. + """ + + def __init__(self, header, file, extension): + """ + + :param header: FITS extension header + :param file: name of FITS file + :param extension: which HDU + WCS axes methods of this class. + """ + self.logger = logging.getLogger(self.__class__.__name__) + self.log_filter = HDULoggingFilter() + self.log_filter.extension(extension) + self.logger.addFilter(self.log_filter) + logastro = logging.getLogger('astropy') + logastro.addFilter(self.log_filter) + logastro.propagate = False + header_string = header.tostring().rstrip() + header_string = header_string.replace('END' + ' ' * 77, '') + self.wcs = Wcsprm(header_string.encode('ascii')) + self.wcs.fix() + self.header = header + self.file = file + self.extension = extension + + def _finish_chunk_observable(self, chunk): + self.logger.debug('Begin _finish_chunk_observable') + ctype = self.header.get(f'CTYPE{chunk.observable_axis}') + cunit = self.header.get(f'CUNIT{chunk.observable_axis}') + pix_bin = self.header.get(f'CRPIX{chunk.observable_axis}') + if ctype is not None and cunit is not None and pix_bin is not None: + chunk.observable = ObservableAxis( + Slice(self._get_axis(0, ctype, cunit), pix_bin)) + self.logger.debug('End _finish_chunk_observable') + + def _finish_chunk_position(self, chunk): + pass + + def _finish_chunk_time(self, chunk): + """ + The expected caom2 - FITS keywords mapping is: + + time.exposure = EXPTIME + time.resolution = TIMEDEL + time.timesys = TIMESYS default UTC + time.trefpos = TREFPOS + time.mjdref = MJDREF | MJDDATE + """ + self.logger.debug('Begin _finish_chunk_time') + chunk.time.exposure = _to_float(self.header.get('EXPTIME')) + chunk.time.resolution = _to_float(self.header.get('TIMEDEL')) + chunk.time.timesys = str(self.header.get('TIMESYS', 'UTC')) + chunk.time.trefpos = self.header.get('TREFPOS', None) + chunk.time.mjdref = self.header.get('MJDREF', + self.header.get('MJDDATE')) + self.logger.debug('End _finish_chunk_time') + + def _get_axis_length(self, for_axis): + # try ZNAXIS first in order to get the size of the original + # image in case it was FITS compressed + result = _to_int(self._sanitize( + self.header.get(f'ZNAXIS{for_axis}'))) + if result is None: + result = _to_int(self._sanitize( + self.header.get(f'NAXIS{for_axis}'))) + if result is None: + msg = f'Could not find axis length for axis {for_axis}' + raise ValueError(msg) + return result + + +class Hdf5WcsParser(WcsParser): + """ + This class initializes an astropy.wcs instance with metadata from an + Hdf5ObsBlueprint populated using an Hdf5Parser. + """ + + def __init__(self, blueprint, extension): + """ + :param blueprint: ObsBlueprint + """ + super().__init__(blueprint, extension) + + def _get_axis_index(self, keywords): + result = self._axes['custom'][0] + if 'RA' in keywords: + result = self._axes['ra'][0] + elif 'DEC' in keywords: + result = self._axes['dec'][0] + elif 'TIME' in keywords: + result = self._axes['time'][0] + elif 'FREQ' in keywords: + result = self._axes['energy'][0] + elif 'STOKES' in keywords: + result = self._axes['polarization'][0] + elif 'FLUX' in keywords: + result = self._axes['observable'][0] + return result From 142cfca558c4c6dad866531c91484e29c9444bcb Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Tue, 16 Jan 2024 16:22:33 -0800 Subject: [PATCH 20/36] refactor_caom2utils - black. --- caom2utils/caom2utils/__init__.py | 9 +- caom2utils/caom2utils/blueprints.py | 718 ++++++--------- caom2utils/caom2utils/caom2blueprint.py | 480 +++++----- caom2utils/caom2utils/caomvalidator.py | 15 +- caom2utils/caom2utils/data_util.py | 24 +- caom2utils/caom2utils/legacy.py | 144 +-- caom2utils/caom2utils/parsers.py | 865 +++++++++--------- caom2utils/caom2utils/polygonvalidator.py | 41 +- caom2utils/caom2utils/tests/conftest.py | 1 + .../caom2utils/tests/test_caomvalidator.py | 21 +- .../caom2utils/tests/test_collections.py | 93 +- .../tests/test_convert_from_java.py | 46 +- .../caom2utils/tests/test_custom_axis_util.py | 148 ++- caom2utils/caom2utils/tests/test_data_util.py | 52 +- .../caom2utils/tests/test_fits2caom2.py | 805 ++++++++-------- .../caom2utils/tests/test_obs_blueprint.py | 104 +-- .../caom2utils/tests/test_polygonvalidator.py | 152 ++- caom2utils/caom2utils/tests/test_si_uris.py | 15 +- .../caom2utils/tests/test_wcsvalidator.py | 110 ++- caom2utils/caom2utils/wcs_parsers.py | 166 ++-- caom2utils/caom2utils/wcs_util.py | 164 ++-- caom2utils/caom2utils/wcsvalidator.py | 80 +- 22 files changed, 1942 insertions(+), 2311 deletions(-) diff --git a/caom2utils/caom2utils/__init__.py b/caom2utils/caom2utils/__init__.py index e6fdda70..ff0143ff 100755 --- a/caom2utils/caom2utils/__init__.py +++ b/caom2utils/caom2utils/__init__.py @@ -7,11 +7,12 @@ from .data_util import * # noqa from .caom2blueprint import * # noqa from .legacy import * # noqa -from .wcs_util import * # noqa -from .wcsvalidator import * # noqa -from .caomvalidator import * # noqa -from .polygonvalidator import * # noqa +from .wcs_util import * # noqa +from .wcsvalidator import * # noqa +from .caomvalidator import * # noqa +from .polygonvalidator import * # noqa import logging + logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/caom2utils/caom2utils/blueprints.py b/caom2utils/caom2utils/blueprints.py index f4dd3a7d..952e551c 100644 --- a/caom2utils/caom2utils/blueprints.py +++ b/caom2utils/caom2utils/blueprints.py @@ -86,6 +86,7 @@ class classproperty: """ Class property used for CAOM2_ELEMENTS in ObsBleprint """ + def __init__(self, f): self.f = f @@ -129,6 +130,7 @@ class ObsBlueprint: print(ob) """ + _CAOM2_ELEMENTS = [ 'CompositeObservation.members', 'DerivedObservation.members', @@ -140,18 +142,14 @@ class ObsBlueprint: 'Observation.metaReadGroups', 'Observation.metaProducer', 'Observation.requirements.flag', - 'Observation.algorithm.name', - 'Observation.instrument.name', 'Observation.instrument.keywords', - 'Observation.proposal.id', 'Observation.proposal.pi', 'Observation.proposal.project', 'Observation.proposal.title', 'Observation.proposal.keywords', - 'Observation.target.name', 'Observation.target.type', 'Observation.target.standard', @@ -159,18 +157,15 @@ class ObsBlueprint: 'Observation.target.keywords', 'Observation.target.moving', 'Observation.target.targetID', - 'Observation.target_position.point.cval1', 'Observation.target_position.point.cval2', 'Observation.target_position.coordsys', 'Observation.target_position.equinox', - 'Observation.telescope.name', 'Observation.telescope.geoLocationX', 'Observation.telescope.geoLocationY', 'Observation.telescope.geoLocationZ', 'Observation.telescope.keywords', - 'Observation.environment.seeing', 'Observation.environment.humidity', 'Observation.environment.elevation', @@ -178,7 +173,6 @@ class ObsBlueprint: 'Observation.environment.wavelengthTau', 'Observation.environment.ambientTemp', 'Observation.environment.photometric', - 'Plane.productID', 'Plane.metaRelease', 'Plane.dataRelease', @@ -188,7 +182,6 @@ class ObsBlueprint: 'Plane.metaReadGroups', 'Plane.dataReadGroups', 'Plane.metaProducer', - 'Plane.provenance.name', 'Plane.provenance.version', 'Plane.provenance.project', @@ -198,16 +191,13 @@ class ObsBlueprint: 'Plane.provenance.lastExecuted', 'Plane.provenance.keywords', 'Plane.provenance.inputs', - 'Plane.metrics.sourceNumberDensity', 'Plane.metrics.background', 'Plane.metrics.backgroundStddev', 'Plane.metrics.fluxDensityLimit', 'Plane.metrics.magLimit', 'Plane.metrics.sampleSNR', - 'Plane.observable.ucd', - 'Artifact.productType', 'Artifact.releaseType', 'Artifact.contentChecksum', @@ -217,11 +207,9 @@ class ObsBlueprint: 'Artifact.contentReadGroups', 'Artifact.uri', 'Artifact.metaProducer', - 'Part.name', 'Part.productType', 'Part.metaProducer', - 'Chunk', 'Chunk.naxis', 'Chunk.observableAxis', @@ -231,14 +219,12 @@ class ObsBlueprint: 'Chunk.timeAxis', 'Chunk.polarizationAxis', 'Chunk.metaProducer', - 'Chunk.observable.dependent.bin', 'Chunk.observable.dependent.axis.ctype', 'Chunk.observable.dependent.axis.cunit', 'Chunk.observable.independent.bin', 'Chunk.observable.independent.axis.ctype', 'Chunk.observable.independent.axis.cunit', - 'Chunk.position.coordsys', 'Chunk.position.equinox', 'Chunk.position.resolution', @@ -268,7 +254,6 @@ class ObsBlueprint: 'Chunk.position.axis.range.end.coord1.val', 'Chunk.position.axis.range.end.coord2.pix', 'Chunk.position.axis.range.end.coord2.val', - 'Chunk.energy.specsys', 'Chunk.energy.ssysobs', 'Chunk.energy.restfrq', @@ -295,7 +280,6 @@ class ObsBlueprint: 'Chunk.energy.axis.range.start.val', 'Chunk.energy.axis.range.end.pix', 'Chunk.energy.axis.range.end.val', - 'Chunk.polarization.axis.axis.ctype', 'Chunk.polarization.axis.axis.cunit', 'Chunk.polarization.axis.bounds.samples', @@ -309,7 +293,6 @@ class ObsBlueprint: 'Chunk.polarization.axis.range.start.val', 'Chunk.polarization.axis.range.end.pix', 'Chunk.polarization.axis.range.end.val', - 'Chunk.time.exposure', 'Chunk.time.resolution', 'Chunk.time.timesys', @@ -328,11 +311,9 @@ class ObsBlueprint: 'Chunk.time.axis.range.start.val', 'Chunk.time.axis.range.end.pix', 'Chunk.time.axis.range.end.val', - 'Chunk.observable.axis.axis.ctype', 'Chunk.observable.axis.axis.cunit', 'Chunk.observable.axis.function.refCoord.pix', - 'Chunk.custom.axis.axis.ctype', 'Chunk.custom.axis.axis.cunit', 'Chunk.custom.axis.bounds.samples', @@ -345,17 +326,24 @@ class ObsBlueprint: 'Chunk.custom.axis.range.start.pix', 'Chunk.custom.axis.range.start.val', 'Chunk.custom.axis.range.end.pix', - 'Chunk.custom.axis.range.end.val' - ] + 'Chunk.custom.axis.range.end.val', + ] # replace _CAOM2_ELEMENTS in __doc__ with the real elements - __doc__ = __doc__.replace('_CAOM2_ELEMENTS', '\n'.join(['\t\t{}'.format( - elem) for elem in _CAOM2_ELEMENTS])) - - def __init__(self, position_axes=None, energy_axis=None, - polarization_axis=None, time_axis=None, - obs_axis=None, custom_axis=None, module=None, - update=True, instantiated_class=None): + __doc__ = __doc__.replace('_CAOM2_ELEMENTS', '\n'.join(['\t\t{}'.format(elem) for elem in _CAOM2_ELEMENTS])) + + def __init__( + self, + position_axes=None, + energy_axis=None, + polarization_axis=None, + time_axis=None, + obs_axis=None, + custom_axis=None, + module=None, + update=True, + instantiated_class=None, + ): """ Ctor :param position_axes: tuple of form (int, int) indicating the indexes @@ -369,49 +357,44 @@ def __init__(self, position_axes=None, energy_axis=None, importlib.import_module if a value is provided. """ - if position_axes and isinstance(position_axes, tuple) and\ - (len(position_axes) != 2): - raise ValueError( - 'Invalid position axis: {}. Must be tuple with 2 elements'. - format(str(position_axes))) + if position_axes and isinstance(position_axes, tuple) and (len(position_axes) != 2): + raise ValueError('Invalid position axis: {}. Must be tuple with 2 elements'.format(str(position_axes))) self.logger = logging.getLogger(__name__) # this is the default blueprint self._plan = {} - tmp = {'Observation.metaRelease': - (['DATE', 'DATE-OBS', 'UTCOBS', 'UTCDATE', - 'UTC-DATE', 'MJDOBS', 'MJD_OBS'], None), - 'Observation.instrument.name': (['INSTRUME'], None), - 'Observation.type': (['OBSTYPE'], None), - 'Observation.environment.ambientTemp': (['TEMPERAT'], - None), - # set the default for SimpleObservation construction - 'Observation.algorithm.name': (['PROCNAME'], 'exposure'), - 'Observation.instrument.keywords': (['INSTMODE'], None), - 'Observation.proposal.id': (['RUNID'], None), - 'Observation.target.name': (['OBJECT'], None), - 'Observation.telescope.name': (['TELESCOP'], None), - 'Observation.telescope.geoLocationX': (['OBSGEO-X'], - None), - 'Observation.telescope.geoLocationY': (['OBSGEO-Y'], - None), - 'Observation.telescope.geoLocationZ': (['OBSGEO-Z'], - None), - 'Observation.observationID': (['OBSID'], None), - 'Plane.calibrationLevel': ([], CalibrationLevel.RAW_STANDARD), - 'Plane.dataProductType': ([], DataProductType.IMAGE), - 'Plane.metaRelease': (['RELEASE', 'REL_DATE'], None), - 'Plane.dataRelease': (['RELEASE', 'REL_DATE'], None), - 'Plane.productID': (['RUNID'], None), - 'Plane.provenance.name': (['XPRVNAME'], None), - 'Plane.provenance.project': (['ADC_ARCH'], None), - 'Plane.provenance.producer': (['ORIGIN'], None), - 'Plane.provenance.reference': (['XREFER'], None), - 'Plane.provenance.lastExecuted': (['DATE-FTS'], None), - 'Artifact.releaseType': ([], ReleaseType.DATA), - 'Chunk': 'include' - } + tmp = { + 'Observation.metaRelease': ( + ['DATE', 'DATE-OBS', 'UTCOBS', 'UTCDATE', 'UTC-DATE', 'MJDOBS', 'MJD_OBS'], + None, + ), + 'Observation.instrument.name': (['INSTRUME'], None), + 'Observation.type': (['OBSTYPE'], None), + 'Observation.environment.ambientTemp': (['TEMPERAT'], None), + # set the default for SimpleObservation construction + 'Observation.algorithm.name': (['PROCNAME'], 'exposure'), + 'Observation.instrument.keywords': (['INSTMODE'], None), + 'Observation.proposal.id': (['RUNID'], None), + 'Observation.target.name': (['OBJECT'], None), + 'Observation.telescope.name': (['TELESCOP'], None), + 'Observation.telescope.geoLocationX': (['OBSGEO-X'], None), + 'Observation.telescope.geoLocationY': (['OBSGEO-Y'], None), + 'Observation.telescope.geoLocationZ': (['OBSGEO-Z'], None), + 'Observation.observationID': (['OBSID'], None), + 'Plane.calibrationLevel': ([], CalibrationLevel.RAW_STANDARD), + 'Plane.dataProductType': ([], DataProductType.IMAGE), + 'Plane.metaRelease': (['RELEASE', 'REL_DATE'], None), + 'Plane.dataRelease': (['RELEASE', 'REL_DATE'], None), + 'Plane.productID': (['RUNID'], None), + 'Plane.provenance.name': (['XPRVNAME'], None), + 'Plane.provenance.project': (['ADC_ARCH'], None), + 'Plane.provenance.producer': (['ORIGIN'], None), + 'Plane.provenance.reference': (['XREFER'], None), + 'Plane.provenance.lastExecuted': (['DATE-FTS'], None), + 'Artifact.releaseType': ([], ReleaseType.DATA), + 'Chunk': 'include', + } # using the tmp to make sure that the keywords are valid for key in tmp: self.set(key, tmp[key]) @@ -420,9 +403,7 @@ def __init__(self, position_axes=None, energy_axis=None, # contains the standard WCS keywords in the FITS file expected by the # astropy.WCS package. - self._wcs_std = { - 'Chunk.naxis': 'ZNAXIS,NAXIS' - } + self._wcs_std = {'Chunk.naxis': 'ZNAXIS,NAXIS'} self._pos_axes_configed = False self._energy_axis_configed = False self._time_axis_configed = False @@ -465,7 +446,8 @@ def __init__(self, position_axes=None, energy_axis=None, 'obs': (0, False), 'polarization': (0, False), 'ra': (0, False), - 'time': (0, False)} + 'time': (0, False), + } def configure_custom_axis(self, axis, override=True): """ @@ -477,32 +459,23 @@ def configure_custom_axis(self, axis, override=True): :return: """ if self._custom_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured custom axis.') + self.logger.debug('Attempt to configure already-configured custom axis.') return if override: - self.set('Chunk.custom.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.custom.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.custom.axis.function.naxis', - ([f'NAXIS{axis}'], None)) - self.set('Chunk.custom.axis.function.delta', - ([f'CDELT{axis}'], None)) - self.set('Chunk.custom.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - self.set('Chunk.custom.axis.function.refCoord.val', - ([f'CRVAL{axis}'], None)) + self.set('Chunk.custom.axis.axis.ctype', ([f'CTYPE{axis}'], None)) + self.set('Chunk.custom.axis.axis.cunit', ([f'CUNIT{axis}'], None)) + self.set('Chunk.custom.axis.function.naxis', ([f'NAXIS{axis}'], None)) + self.set('Chunk.custom.axis.function.delta', ([f'CDELT{axis}'], None)) + self.set('Chunk.custom.axis.function.refCoord.pix', ([f'CRPIX{axis}'], None)) + self.set('Chunk.custom.axis.function.refCoord.val', ([f'CRVAL{axis}'], None)) self._wcs_std['Chunk.custom.axis.axis.ctype'] = f'CTYPE{axis}' self._wcs_std['Chunk.custom.axis.axis.cunit'] = f'CUNIT{axis}' self._wcs_std['Chunk.custom.axis.function.naxis'] = f'NAXIS{axis}' self._wcs_std['Chunk.custom.axis.function.delta'] = f'CDELT{axis}' - self._wcs_std['Chunk.custom.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' - self._wcs_std['Chunk.custom.axis.function.refCoord.val'] = \ - f'CRVAL{axis}' + self._wcs_std['Chunk.custom.axis.function.refCoord.pix'] = f'CRPIX{axis}' + self._wcs_std['Chunk.custom.axis.function.refCoord.val'] = f'CRVAL{axis}' self._custom_axis_configed = True @@ -515,91 +488,52 @@ def configure_position_axes(self, axes, override=True): :return: """ if self._pos_axes_configed: - self.logger.debug( - 'Attempt to configure already-configured position axes.') + self.logger.debug('Attempt to configure already-configured position axes.') return if override: self.set('Chunk.position.coordsys', (['RADESYS'], None)) self.set('Chunk.position.equinox', (['EQUINOX', 'EPOCH'], None)) - self.set('Chunk.position.axis.axis1.ctype', - ([f'CTYPE{axes[0]}'], None)) - self.set('Chunk.position.axis.axis1.cunit', - ([f'CUNIT{axes[0]}'], None)) - self.set('Chunk.position.axis.axis2.ctype', - ([f'CTYPE{axes[1]}'], None)) - self.set('Chunk.position.axis.axis2.cunit', - ([f'CUNIT{axes[1]}'], None)) - self.set('Chunk.position.axis.error1.syser', - ([f'CSYER{axes[0]}'], None)) - self.set('Chunk.position.axis.error1.rnder', - ([f'CRDER{axes[0]}'], None)) - self.set('Chunk.position.axis.error2.syser', - ([f'CSYER{axes[1]}'], None)) - self.set('Chunk.position.axis.error2.rnder', - ([f'CRDER{axes[1]}'], None)) - self.set('Chunk.position.axis.function.cd11', - ([f'CD{axes[0]}_{axes[0]}'], None)) - self.set('Chunk.position.axis.function.cd12', - ([f'CD{axes[0]}_{axes[1]}'], None)) - self.set('Chunk.position.axis.function.cd21', - ([f'CD{axes[1]}_{axes[0]}'], None)) - self.set('Chunk.position.axis.function.cd22', - ([f'CD{axes[1]}_{axes[1]}'], None)) - self.set('Chunk.position.axis.function.dimension.naxis1', - ([f'ZNAXIS{axes[0]}', - f'NAXIS{axes[0]}'], None)) - self.set('Chunk.position.axis.function.dimension.naxis2', - ([f'ZNAXIS{axes[1]}', - f'NAXIS{axes[1]}'], None)) - self.set('Chunk.position.axis.function.refCoord.coord1.pix', - ([f'CRPIX{axes[0]}'], None)) - self.set('Chunk.position.axis.function.refCoord.coord1.val', - ([f'CRVAL{axes[0]}'], None)) - self.set('Chunk.position.axis.function.refCoord.coord2.pix', - ([f'CRPIX{axes[1]}'], None)) - self.set('Chunk.position.axis.function.refCoord.coord2.val', - ([f'CRVAL{axes[1]}'], None)) + self.set('Chunk.position.axis.axis1.ctype', ([f'CTYPE{axes[0]}'], None)) + self.set('Chunk.position.axis.axis1.cunit', ([f'CUNIT{axes[0]}'], None)) + self.set('Chunk.position.axis.axis2.ctype', ([f'CTYPE{axes[1]}'], None)) + self.set('Chunk.position.axis.axis2.cunit', ([f'CUNIT{axes[1]}'], None)) + self.set('Chunk.position.axis.error1.syser', ([f'CSYER{axes[0]}'], None)) + self.set('Chunk.position.axis.error1.rnder', ([f'CRDER{axes[0]}'], None)) + self.set('Chunk.position.axis.error2.syser', ([f'CSYER{axes[1]}'], None)) + self.set('Chunk.position.axis.error2.rnder', ([f'CRDER{axes[1]}'], None)) + self.set('Chunk.position.axis.function.cd11', ([f'CD{axes[0]}_{axes[0]}'], None)) + self.set('Chunk.position.axis.function.cd12', ([f'CD{axes[0]}_{axes[1]}'], None)) + self.set('Chunk.position.axis.function.cd21', ([f'CD{axes[1]}_{axes[0]}'], None)) + self.set('Chunk.position.axis.function.cd22', ([f'CD{axes[1]}_{axes[1]}'], None)) + self.set('Chunk.position.axis.function.dimension.naxis1', ([f'ZNAXIS{axes[0]}', f'NAXIS{axes[0]}'], None)) + self.set('Chunk.position.axis.function.dimension.naxis2', ([f'ZNAXIS{axes[1]}', f'NAXIS{axes[1]}'], None)) + self.set('Chunk.position.axis.function.refCoord.coord1.pix', ([f'CRPIX{axes[0]}'], None)) + self.set('Chunk.position.axis.function.refCoord.coord1.val', ([f'CRVAL{axes[0]}'], None)) + self.set('Chunk.position.axis.function.refCoord.coord2.pix', ([f'CRPIX{axes[1]}'], None)) + self.set('Chunk.position.axis.function.refCoord.coord2.val', ([f'CRVAL{axes[1]}'], None)) self._wcs_std['Chunk.position.coordsys'] = 'RADESYS' self._wcs_std['Chunk.position.equinox'] = 'EQUINOX' - self._wcs_std['Chunk.position.axis.axis1.ctype'] = \ - f'CTYPE{axes[0]}' - self._wcs_std['Chunk.position.axis.axis1.cunit'] = \ - f'CUNIT{axes[0]}' - self._wcs_std['Chunk.position.axis.axis2.ctype'] = \ - f'CTYPE{axes[1]}' - self._wcs_std['Chunk.position.axis.axis2.cunit'] = \ - f'CUNIT{axes[1]}' - self._wcs_std['Chunk.position.axis.error1.syser'] = \ - f'CSYER{axes[0]}' - self._wcs_std['Chunk.position.axis.error1.rnder'] = \ - f'CRDER{axes[0]}' - self._wcs_std['Chunk.position.axis.error2.syser'] = \ - f'CSYER{axes[1]}' - self._wcs_std['Chunk.position.axis.error2.rnder'] = \ - f'CRDER{axes[1]}' - self._wcs_std['Chunk.position.axis.function.cd11'] = \ - f'CD{axes[0]}_{axes[0]}' - self._wcs_std['Chunk.position.axis.function.cd12'] = \ - f'CD{axes[0]}_{axes[1]}' - self._wcs_std['Chunk.position.axis.function.cd21'] = \ - f'CD{axes[1]}_{axes[0]}' - self._wcs_std['Chunk.position.axis.function.cd22'] = \ - f'CD{axes[1]}_{axes[1]}' - self._wcs_std['Chunk.position.axis.function.dimension.naxis1'] = \ - f'NAXIS{axes[0]}' - self._wcs_std['Chunk.position.axis.function.dimension.naxis2'] = \ - f'NAXIS{axes[1]}' - self._wcs_std['Chunk.position.axis.function.refCoord.coord1.pix'] \ - = f'CRPIX{axes[0]}' - self._wcs_std['Chunk.position.axis.function.refCoord.coord1.val'] \ - = f'CRVAL{axes[0]}' - self._wcs_std['Chunk.position.axis.function.refCoord.coord2.pix'] \ - = f'CRPIX{axes[1]}' - self._wcs_std['Chunk.position.axis.function.refCoord.coord2.val'] \ - = f'CRVAL{axes[1]}' + self._wcs_std['Chunk.position.axis.axis1.ctype'] = f'CTYPE{axes[0]}' + self._wcs_std['Chunk.position.axis.axis1.cunit'] = f'CUNIT{axes[0]}' + self._wcs_std['Chunk.position.axis.axis2.ctype'] = f'CTYPE{axes[1]}' + self._wcs_std['Chunk.position.axis.axis2.cunit'] = f'CUNIT{axes[1]}' + self._wcs_std['Chunk.position.axis.error1.syser'] = f'CSYER{axes[0]}' + self._wcs_std['Chunk.position.axis.error1.rnder'] = f'CRDER{axes[0]}' + self._wcs_std['Chunk.position.axis.error2.syser'] = f'CSYER{axes[1]}' + self._wcs_std['Chunk.position.axis.error2.rnder'] = f'CRDER{axes[1]}' + self._wcs_std['Chunk.position.axis.function.cd11'] = f'CD{axes[0]}_{axes[0]}' + self._wcs_std['Chunk.position.axis.function.cd12'] = f'CD{axes[0]}_{axes[1]}' + self._wcs_std['Chunk.position.axis.function.cd21'] = f'CD{axes[1]}_{axes[0]}' + self._wcs_std['Chunk.position.axis.function.cd22'] = f'CD{axes[1]}_{axes[1]}' + self._wcs_std['Chunk.position.axis.function.dimension.naxis1'] = f'NAXIS{axes[0]}' + self._wcs_std['Chunk.position.axis.function.dimension.naxis2'] = f'NAXIS{axes[1]}' + self._wcs_std['Chunk.position.axis.function.refCoord.coord1.pix'] = f'CRPIX{axes[0]}' + self._wcs_std['Chunk.position.axis.function.refCoord.coord1.val'] = f'CRVAL{axes[0]}' + self._wcs_std['Chunk.position.axis.function.refCoord.coord2.pix'] = f'CRPIX{axes[1]}' + self._wcs_std['Chunk.position.axis.function.refCoord.coord2.val'] = f'CRVAL{axes[1]}' self._pos_axes_configed = True @@ -613,8 +547,7 @@ def configure_energy_axis(self, axis, override=True): :return: """ if self._energy_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured energy axis.') + self.logger.debug('Attempt to configure already-configured energy axis.') return if override: @@ -630,22 +563,14 @@ def configure_energy_axis(self, axis, override=True): self.set('Chunk.energy.bandpassName', ([], None)) self.set('Chunk.energy.resolvingPower', ([], None)) - self.set('Chunk.energy.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.energy.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.energy.axis.error.syser', - ([f'CSYER{axis}'], None)) - self.set('Chunk.energy.axis.error.rnder', - ([f'CRDER{axis}'], None)) - self.set('Chunk.energy.axis.function.naxis', - ([f'NAXIS{axis}'], None)) - self.set('Chunk.energy.axis.function.delta', - ([f'CDELT{axis}'], None)) - self.set('Chunk.energy.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - self.set('Chunk.energy.axis.function.refCoord.val', - ([f'CRVAL{axis}'], None)) + self.set('Chunk.energy.axis.axis.ctype', ([f'CTYPE{axis}'], None)) + self.set('Chunk.energy.axis.axis.cunit', ([f'CUNIT{axis}'], None)) + self.set('Chunk.energy.axis.error.syser', ([f'CSYER{axis}'], None)) + self.set('Chunk.energy.axis.error.rnder', ([f'CRDER{axis}'], None)) + self.set('Chunk.energy.axis.function.naxis', ([f'NAXIS{axis}'], None)) + self.set('Chunk.energy.axis.function.delta', ([f'CDELT{axis}'], None)) + self.set('Chunk.energy.axis.function.refCoord.pix', ([f'CRPIX{axis}'], None)) + self.set('Chunk.energy.axis.function.refCoord.val', ([f'CRVAL{axis}'], None)) self._wcs_std['Chunk.energy.specsys'] = 'SPECSYS' self._wcs_std['Chunk.energy.ssysobs'] = 'SSYSOBS' @@ -656,22 +581,14 @@ def configure_energy_axis(self, axis, override=True): self._wcs_std['Chunk.energy.ssyssrc'] = 'SSYSSRC' self._wcs_std['Chunk.energy.velang'] = 'VELANG' - self._wcs_std['Chunk.energy.axis.axis.ctype'] = \ - f'CTYPE{axis}' - self._wcs_std['Chunk.energy.axis.axis.cunit'] = \ - f'CUNIT{axis}' - self._wcs_std['Chunk.energy.axis.error.syser'] = \ - f'CSYER{axis}' - self._wcs_std['Chunk.energy.axis.error.rnder'] = \ - f'CRDER{axis}' - self._wcs_std['Chunk.energy.axis.function.naxis'] = \ - f'NAXIS{axis}' - self._wcs_std['Chunk.energy.axis.function.delta'] = \ - f'CDELT{axis}' - self._wcs_std['Chunk.energy.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' - self._wcs_std['Chunk.energy.axis.function.refCoord.val'] = \ - f'CRVAL{axis}' + self._wcs_std['Chunk.energy.axis.axis.ctype'] = f'CTYPE{axis}' + self._wcs_std['Chunk.energy.axis.axis.cunit'] = f'CUNIT{axis}' + self._wcs_std['Chunk.energy.axis.error.syser'] = f'CSYER{axis}' + self._wcs_std['Chunk.energy.axis.error.rnder'] = f'CRDER{axis}' + self._wcs_std['Chunk.energy.axis.function.naxis'] = f'NAXIS{axis}' + self._wcs_std['Chunk.energy.axis.function.delta'] = f'CDELT{axis}' + self._wcs_std['Chunk.energy.axis.function.refCoord.pix'] = f'CRPIX{axis}' + self._wcs_std['Chunk.energy.axis.function.refCoord.val'] = f'CRVAL{axis}' self._energy_axis_configed = True @@ -685,36 +602,23 @@ def configure_polarization_axis(self, axis, override=True): :return: """ if self._polarization_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured polarization axis.') + self.logger.debug('Attempt to configure already-configured polarization axis.') return if override: - self.set('Chunk.polarization.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.polarization.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.polarization.axis.function.naxis', - ([f'NAXIS{axis}'], None)) - self.set('Chunk.polarization.axis.function.delta', - ([f'CDELT{axis}'], None)) - self.set('Chunk.polarization.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - self.set('Chunk.polarization.axis.function.refCoord.val', - ([f'CRVAL{axis}'], None)) - - self._wcs_std['Chunk.polarization.axis.axis.ctype'] = \ - f'CTYPE{axis}' - self._wcs_std['Chunk.polarization.axis.axis.cunit'] = \ - f'CUNIT{axis}' - self._wcs_std['Chunk.polarization.axis.function.naxis'] = \ - f'NAXIS{axis}' - self._wcs_std['Chunk.polarization.axis.function.delta'] = \ - f'CDELT{axis}' - self._wcs_std['Chunk.polarization.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' - self._wcs_std['Chunk.polarization.axis.function.refCoord.val'] = \ - f'CRVAL{axis}' + self.set('Chunk.polarization.axis.axis.ctype', ([f'CTYPE{axis}'], None)) + self.set('Chunk.polarization.axis.axis.cunit', ([f'CUNIT{axis}'], None)) + self.set('Chunk.polarization.axis.function.naxis', ([f'NAXIS{axis}'], None)) + self.set('Chunk.polarization.axis.function.delta', ([f'CDELT{axis}'], None)) + self.set('Chunk.polarization.axis.function.refCoord.pix', ([f'CRPIX{axis}'], None)) + self.set('Chunk.polarization.axis.function.refCoord.val', ([f'CRVAL{axis}'], None)) + + self._wcs_std['Chunk.polarization.axis.axis.ctype'] = f'CTYPE{axis}' + self._wcs_std['Chunk.polarization.axis.axis.cunit'] = f'CUNIT{axis}' + self._wcs_std['Chunk.polarization.axis.function.naxis'] = f'NAXIS{axis}' + self._wcs_std['Chunk.polarization.axis.function.delta'] = f'CDELT{axis}' + self._wcs_std['Chunk.polarization.axis.function.refCoord.pix'] = f'CRPIX{axis}' + self._wcs_std['Chunk.polarization.axis.function.refCoord.val'] = f'CRVAL{axis}' self._polarization_axis_configed = True @@ -730,24 +634,17 @@ def configure_observable_axis(self, axis, override=True): :return: """ if self._obs_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured observable axis.') + self.logger.debug('Attempt to configure already-configured observable axis.') return if override: - self.set('Chunk.observable.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.observable.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.observable.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - - self._wcs_std['Chunk.observable.axis.axis.ctype'] = \ - f'CTYPE{axis}' - self._wcs_std['Chunk.observable.axis.axis.cunit'] = \ - f'CUNIT{axis}' - self._wcs_std['Chunk.observable.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' + self.set('Chunk.observable.axis.axis.ctype', ([f'CTYPE{axis}'], None)) + self.set('Chunk.observable.axis.axis.cunit', ([f'CUNIT{axis}'], None)) + self.set('Chunk.observable.axis.function.refCoord.pix', ([f'CRPIX{axis}'], None)) + + self._wcs_std['Chunk.observable.axis.axis.ctype'] = f'CTYPE{axis}' + self._wcs_std['Chunk.observable.axis.axis.cunit'] = f'CUNIT{axis}' + self._wcs_std['Chunk.observable.axis.function.refCoord.pix'] = f'CRPIX{axis}' self._obs_axis_configed = True @@ -761,8 +658,7 @@ def configure_time_axis(self, axis, override=True): :return: """ if self._time_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured time axis.') + self.logger.debug('Attempt to configure already-configured time axis.') return if override: @@ -771,22 +667,14 @@ def configure_time_axis(self, axis, override=True): self.set('Chunk.time.trefpos', (['TREFPOS'], None)) self.set('Chunk.time.mjdref', (['MJDREF'], None)) self.set('Chunk.time.resolution', (['TIMEDEL'], None)) - self.set('Chunk.time.axis.axis.ctype', - ([f'CTYPE{axis}'], None)) - self.set('Chunk.time.axis.axis.cunit', - ([f'CUNIT{axis}'], None)) - self.set('Chunk.time.axis.error.syser', - ([f'CSYER{axis}'], None)) - self.set('Chunk.time.axis.error.rnder', - ([f'CRDER{axis}'], None)) - self.set('Chunk.time.axis.function.naxis', - ([f'NAXIS{axis}'], None)) - self.set('Chunk.time.axis.function.delta', - ([f'CDELT{axis}'], None)) - self.set('Chunk.time.axis.function.refCoord.pix', - ([f'CRPIX{axis}'], None)) - self.set('Chunk.time.axis.function.refCoord.val', - ([f'CRVAL{axis}'], None)) + self.set('Chunk.time.axis.axis.ctype', ([f'CTYPE{axis}'], None)) + self.set('Chunk.time.axis.axis.cunit', ([f'CUNIT{axis}'], None)) + self.set('Chunk.time.axis.error.syser', ([f'CSYER{axis}'], None)) + self.set('Chunk.time.axis.error.rnder', ([f'CRDER{axis}'], None)) + self.set('Chunk.time.axis.function.naxis', ([f'NAXIS{axis}'], None)) + self.set('Chunk.time.axis.function.delta', ([f'CDELT{axis}'], None)) + self.set('Chunk.time.axis.function.refCoord.pix', ([f'CRPIX{axis}'], None)) + self.set('Chunk.time.axis.function.refCoord.val', ([f'CRVAL{axis}'], None)) self._wcs_std['Chunk.time.exposure'] = 'EXPTIME' self._wcs_std['Chunk.time.resolution'] = 'TIMEDEL' @@ -794,22 +682,14 @@ def configure_time_axis(self, axis, override=True): self._wcs_std['Chunk.time.trefpos'] = 'TREFPOS' self._wcs_std['Chunk.time.mjdref'] = 'MJDREF' - self._wcs_std['Chunk.time.axis.axis.ctype'] = \ - f'CTYPE{axis}' - self._wcs_std['Chunk.time.axis.axis.cunit'] = \ - f'CUNIT{axis}' - self._wcs_std['Chunk.time.axis.error.syser'] = \ - f'CSYER{axis}' - self._wcs_std['Chunk.time.axis.error.rnder'] = \ - f'CRDER{axis}' - self._wcs_std['Chunk.time.axis.function.naxis'] = \ - f'NAXIS{axis}' - self._wcs_std['Chunk.time.axis.function.delta'] = \ - f'CDELT{axis}' - self._wcs_std['Chunk.time.axis.function.refCoord.pix'] = \ - f'CRPIX{axis}' - self._wcs_std['Chunk.time.axis.function.refCoord.val'] = \ - f'CRVAL{axis}' + self._wcs_std['Chunk.time.axis.axis.ctype'] = f'CTYPE{axis}' + self._wcs_std['Chunk.time.axis.axis.cunit'] = f'CUNIT{axis}' + self._wcs_std['Chunk.time.axis.error.syser'] = f'CSYER{axis}' + self._wcs_std['Chunk.time.axis.error.rnder'] = f'CRDER{axis}' + self._wcs_std['Chunk.time.axis.function.naxis'] = f'NAXIS{axis}' + self._wcs_std['Chunk.time.axis.function.delta'] = f'CDELT{axis}' + self._wcs_std['Chunk.time.axis.function.refCoord.pix'] = f'CRPIX{axis}' + self._wcs_std['Chunk.time.axis.function.refCoord.val'] = f'CRVAL{axis}' self._time_axis_configed = True @@ -835,58 +715,44 @@ def _guess_axis_info(self): def _guess_axis_info_from_plan(self): for ii in self._plan: - if ii.startswith('Chunk.position') and ii.endswith('axis1.ctype') \ - and not self._axis_info['ra'][1]: - configured_index = self._get_configured_index( - self._axis_info, 'ra') + if ii.startswith('Chunk.position') and ii.endswith('axis1.ctype') and not self._axis_info['ra'][1]: + configured_index = self._get_configured_index(self._axis_info, 'ra') self._axis_info['ra'] = (configured_index, True) - elif ii.startswith('Chunk.position') and \ - ii.endswith('axis2.ctype') and not \ - self._axis_info['dec'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'dec') + elif ii.startswith('Chunk.position') and ii.endswith('axis2.ctype') and not self._axis_info['dec'][1]: + configured_index = self._get_configured_index(self._axis_info, 'dec') self._axis_info['dec'] = (configured_index, True) - elif ii.startswith('Chunk.energy') and not \ - self._axis_info['energy'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'energy') + elif ii.startswith('Chunk.energy') and not self._axis_info['energy'][1]: + configured_index = self._get_configured_index(self._axis_info, 'energy') self._axis_info['energy'] = (configured_index, True) - elif ii.startswith('Chunk.time') and not \ - self._axis_info['time'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'time') + elif ii.startswith('Chunk.time') and not self._axis_info['time'][1]: + configured_index = self._get_configured_index(self._axis_info, 'time') self._axis_info['time'] = (configured_index, True) - elif ii.startswith('Chunk.polarization') \ - and not self._axis_info['polarization'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'polarization') + elif ii.startswith('Chunk.polarization') and not self._axis_info['polarization'][1]: + configured_index = self._get_configured_index(self._axis_info, 'polarization') self._axis_info['polarization'] = (configured_index, True) - elif ii.startswith('Chunk.observable') and not \ - self._axis_info['obs'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'obs') + elif ii.startswith('Chunk.observable') and not self._axis_info['obs'][1]: + configured_index = self._get_configured_index(self._axis_info, 'obs') self._axis_info['obs'] = (configured_index, True) - elif ii.startswith('Chunk.custom') and not \ - self._axis_info['custom'][1]: - configured_index = self._get_configured_index(self._axis_info, - 'custom') + elif ii.startswith('Chunk.custom') and not self._axis_info['custom'][1]: + configured_index = self._get_configured_index(self._axis_info, 'custom') self._axis_info['custom'] = (configured_index, True) if self._axis_info['ra'][1] and self._axis_info['dec'][1]: - self.configure_position_axes( - (self._axis_info['ra'][0], self._axis_info['dec'][0]), False) + self.configure_position_axes((self._axis_info['ra'][0], self._axis_info['dec'][0]), False) elif self._axis_info['ra'][1] or self._axis_info['dec'][1]: - raise ValueError('Only one positional axis found ' - '(ra/dec): {}/{}'. - format(self._axis_info['ra'][0], - self._axis_info['dec'][0])) + raise ValueError( + 'Only one positional axis found ' + '(ra/dec): {}/{}'.format(self._axis_info['ra'][0], self._axis_info['dec'][0]) + ) else: # assume that positional axis are 1 and 2 by default - if (self._axis_info['time'][0] in [1, 2] or - self._axis_info['energy'][0] in [1, 2] or - self._axis_info['polarization'][0] in [1, 2] or - self._axis_info['obs'][0] in [1, 2] or - self._axis_info['custom'][0] in [1, 2]): + if ( + self._axis_info['time'][0] in [1, 2] + or self._axis_info['energy'][0] in [1, 2] + or self._axis_info['polarization'][0] in [1, 2] + or self._axis_info['obs'][0] in [1, 2] + or self._axis_info['custom'][0] in [1, 2] + ): raise ValueError('Cannot determine the positional axis') else: self.configure_position_axes((1, 2), False) @@ -896,8 +762,7 @@ def _guess_axis_info_from_plan(self): if self._axis_info['energy'][1]: self.configure_energy_axis(self._axis_info['energy'][0], False) if self._axis_info['polarization'][1]: - self.configure_polarization_axis( - self._axis_info['polarization'][0], False) + self.configure_polarization_axis(self._axis_info['polarization'][0], False) if self._axis_info['obs'][1]: self.configure_observable_axis(self._axis_info['obs'][0], False) if self._axis_info['custom'][1]: @@ -919,32 +784,23 @@ def _guess_axis_info_from_ctypes(self, lookup, counter): self._axis_info['polarization'] = (counter, True) elif lookup.startswith('Chunk.time'): self._axis_info['time'] = (counter, True) - elif lookup.startswith('Chunk.position') and lookup.endswith( - 'axis1.ctype'): + elif lookup.startswith('Chunk.position') and lookup.endswith('axis1.ctype'): self._axis_info['ra'] = (counter, True) - elif lookup.startswith('Chunk.position') and lookup.endswith( - 'axis2.ctype'): + elif lookup.startswith('Chunk.position') and lookup.endswith('axis2.ctype'): self._axis_info['dec'] = (counter, True) elif lookup.startswith('Chunk.observable'): self._axis_info['obs'] = (counter, True) elif lookup.startswith('Chunk.custom'): self._axis_info['custom'] = (counter, True) else: - raise ValueError( - f'Unrecognized axis type: {lookup}') + raise ValueError(f'Unrecognized axis type: {lookup}') def _get_configured_index(self, axis_info, lookup): """Find the next available index value among those that are not set. :param axis_info: local data structure to pass around what is configured, and what is it's value.""" - DEFAULT_INDICES = {'ra': 1, - 'dec': 2, - 'energy': 3, - 'time': 4, - 'polarization': 5, - 'obs': 6, - 'custom': 7} + DEFAULT_INDICES = {'ra': 1, 'dec': 2, 'energy': 3, 'time': 4, 'polarization': 5, 'obs': 6, 'custom': 7} # the logic - if the default index is already used, assign the lowest # index that is unused, otherwise use the default index @@ -995,11 +851,9 @@ def load_from_file(self, file_name): line = line.split('#')[0] key, value = line.split('=', 1) if 'default' in value: - temp = value.replace('default', ''). \ - replace('=', '').strip('\n').strip() + temp = value.replace('default', '').replace('=', '').strip('\n').strip() default = temp.rsplit(',')[1] - temp_list = temp.rsplit(',')[0].replace('[', ''). \ - replace(']', '').replace('\'', '').split(',') + temp_list = temp.rsplit(',')[0].replace('[', '').replace(']', '').replace('\'', '').split(',') if 'None' in default: default = None else: @@ -1007,8 +861,7 @@ def load_from_file(self, file_name): cleaned_up_value = (temp_list, default) else: if '[' in value: - temp_list = value.replace('[', ''). \ - replace(']', '').replace('\'', '').split(',') + temp_list = value.replace('[', '').replace(']', '').replace('\'', '').split(',') temp_list_2 = [] for ii in temp_list: temp_list_2.append(ii.strip().strip('\n')) @@ -1037,9 +890,7 @@ def check_caom2_element(cls, caom2_element): :raises KeyError """ if caom2_element not in cls._CAOM2_ELEMENTS: - raise KeyError( - '{} not a valid CAOM2 element name (mispelling?).'. - format(caom2_element)) + raise KeyError('{} not a valid CAOM2 element name (mispelling?).'.format(caom2_element)) @staticmethod def check_chunk(caom2_element): @@ -1049,14 +900,12 @@ def check_chunk(caom2_element): :raises ValueError """ if not caom2_element.startswith('Chunk'): - raise ValueError( - "Extension number refers to Chunk elements only") + raise ValueError("Extension number refers to Chunk elements only") @staticmethod def check_extension(extension): if extension is not None and extension < 0: - raise ValueError( - f'Extension count failure. {extension} should be >= 0') + raise ValueError(f'Extension count failure. {extension} should be >= 0') def __str__(self): plan = self._serialize(self._plan) @@ -1064,18 +913,20 @@ def __str__(self): extensions = '' if self._extensions: for key in sorted(self._extensions): - extensions = extensions + f'\nextension {key}:\n' +\ - self._serialize(self._extensions[key]) + extensions = extensions + f'\nextension {key}:\n' + self._serialize(self._extensions[key]) return plan + extensions def _serialize(self, src): return '\n'.join( - ['{} = {}'.format(key, '{}, default = {}'.format(src[key][0], - src[key][1]) - if isinstance(src[key], tuple) - else src[key]) - for key in ObsBlueprint._CAOM2_ELEMENTS - if key in src]) + [ + '{} = {}'.format( + key, + '{}, default = {}'.format(src[key][0], src[key][1]) if isinstance(src[key], tuple) else src[key], + ) + for key in ObsBlueprint._CAOM2_ELEMENTS + if key in src + ] + ) def set(self, caom2_element, value, extension=0): """ @@ -1112,36 +963,29 @@ def add_attribute(self, caom2_element, attribute, extension=0): if extension: ObsBlueprint.check_chunk(caom2_element) if extension not in self._extensions: - raise AttributeError( - f'No extension {extension} in the blueprint') + raise AttributeError(f'No extension {extension} in the blueprint') else: if caom2_element in self._extensions[extension]: - if (isinstance(self._extensions[extension][caom2_element], - tuple)): - if (attribute not in - self._extensions[extension][caom2_element][0]): - self._extensions[extension][caom2_element][0].\ - insert(0, attribute) + if isinstance(self._extensions[extension][caom2_element], tuple): + if attribute not in self._extensions[extension][caom2_element][0]: + self._extensions[extension][caom2_element][0].insert(0, attribute) else: raise AttributeError( - (f'No attributes in extension {extension} ' - f'associated with keyword {caom2_element}')) + (f'No attributes in extension {extension} ' f'associated with keyword {caom2_element}') + ) else: - self._extensions[extension][caom2_element] = \ - ([attribute], None) + self._extensions[extension][caom2_element] = ([attribute], None) else: if caom2_element in self._plan: if isinstance(self._plan[caom2_element], tuple): if attribute not in self._plan[caom2_element][0]: self._plan[caom2_element][0].insert(0, attribute) else: - raise AttributeError(f'No attributes associated with ' - f'keyword {caom2_element}') + raise AttributeError(f'No attributes associated with ' f'keyword {caom2_element}') else: self._plan[caom2_element] = ([attribute], None) - def add_table_attribute(self, caom2_element, ttype_attribute, extension=0, - index=0): + def add_table_attribute(self, caom2_element, ttype_attribute, extension=0, index=0): """ Adds a FITS BINTABLE TTYPE* lookup, to a list of other FITS attributes associated with an caom2 element. This does not co-exist with @@ -1163,36 +1007,29 @@ def add_table_attribute(self, caom2_element, ttype_attribute, extension=0, if extension: if extension in self._extensions: if caom2_element in self._extensions[extension]: - if (ObsBlueprint.is_table( - self._extensions[extension][caom2_element])): - if (ttype_attribute not in - self._extensions[extension][caom2_element][1]): - self._extensions[extension][caom2_element][1]. \ - insert(0, ttype_attribute) + if ObsBlueprint.is_table(self._extensions[extension][caom2_element]): + if ttype_attribute not in self._extensions[extension][caom2_element][1]: + self._extensions[extension][caom2_element][1].insert(0, ttype_attribute) else: raise AttributeError( - ('No TTYPE attributes in extension {} associated ' - 'with keyword {}').format(extension, - caom2_element)) + ('No TTYPE attributes in extension {} associated ' 'with keyword {}').format( + extension, caom2_element + ) + ) else: - self._extensions[extension][caom2_element] = \ - ('BINTABLE', [ttype_attribute], index) + self._extensions[extension][caom2_element] = ('BINTABLE', [ttype_attribute], index) else: self._extensions[extension] = {} - self._extensions[extension][caom2_element] = \ - ('BINTABLE', [ttype_attribute], index) + self._extensions[extension][caom2_element] = ('BINTABLE', [ttype_attribute], index) else: if caom2_element in self._plan: if ObsBlueprint.is_table(self._plan[caom2_element]): if ttype_attribute not in self._plan[caom2_element][1]: self._plan[caom2_element][1].insert(0, ttype_attribute) else: - raise AttributeError('No TTYPE attributes associated ' - 'with keyword {}'.format( - caom2_element)) + raise AttributeError('No TTYPE attributes associated ' 'with keyword {}'.format(caom2_element)) else: - self._plan[caom2_element] = ( - 'BINTABLE', [ttype_attribute], None) + self._plan[caom2_element] = ('BINTABLE', [ttype_attribute], None) def set_default(self, caom2_element, default, extension=0): """ @@ -1215,18 +1052,16 @@ def set_default(self, caom2_element, default, extension=0): ObsBlueprint.check_chunk(caom2_element) if extension not in self._extensions: self._extensions[extension] = {} - if caom2_element in self._extensions[extension] and \ - isinstance(self._extensions[extension][caom2_element], tuple): - self._extensions[extension][caom2_element] = \ - (self._extensions[extension][caom2_element][0], default) + if caom2_element in self._extensions[extension] and isinstance( + self._extensions[extension][caom2_element], tuple + ): + self._extensions[extension][caom2_element] = (self._extensions[extension][caom2_element][0], default) else: # default is the only value self._extensions[extension][caom2_element] = default else: - if (caom2_element in self._plan) and \ - isinstance(self._plan[caom2_element], tuple): - self._plan[caom2_element] = (self._plan[caom2_element][0], - default) + if (caom2_element in self._plan) and isinstance(self._plan[caom2_element], tuple): + self._plan[caom2_element] = (self._plan[caom2_element][0], default) else: # override the value self._plan[caom2_element] = default @@ -1244,8 +1079,7 @@ def delete(self, caom2_element, extension=0): if extension: ObsBlueprint.check_chunk(caom2_element) if extension not in self._extensions: - raise ValueError('Extension {} not configured in blueprint'. - format(extension)) + raise ValueError('Extension {} not configured in blueprint'.format(extension)) if caom2_element in self._extensions[extension]: del self._extensions[extension][caom2_element] if len(self._extensions[extension]) == 0: @@ -1269,8 +1103,7 @@ def clear(self, caom2_element, extension=0): if extension: ObsBlueprint.check_chunk(caom2_element) if extension not in self._extensions: - raise ValueError('Extension {} not configured in blueprint'. - format(extension)) + raise ValueError('Extension {} not configured in blueprint'.format(extension)) if caom2_element in self._extensions[extension]: self._extensions[extension][caom2_element] = ([], None) else: @@ -1289,8 +1122,7 @@ def _get(self, caom2_element, extension=0): ObsBlueprint.check_caom2_element(caom2_element) ObsBlueprint.check_extension(extension) if extension: - if (extension in self._extensions) and \ - (caom2_element in self._extensions[extension]): + if (extension in self._extensions) and (caom2_element in self._extensions[extension]): return self._extensions[extension][caom2_element] # look in the minimal plan @@ -1310,8 +1142,7 @@ def has_chunk(self, extension): if 'Chunk' in self._extensions[extension]: value = self._extensions[extension]['Chunk'] elif 'Chunk' in self._plan: - if ((extension is not None and extension == 0) or ( - extension is None)): + if (extension is not None and extension == 0) or (extension is None): value = self._plan['Chunk'] return not value == '{ignore}' @@ -1331,9 +1162,14 @@ def is_function(value): :return: True if the value is the name of a function to be executed, False, otherwise """ - return (not ObsBlueprint.needs_lookup(value) and isinstance(value, str) - and isinstance(value, str) and '(' in value and ')' in value - and '/' not in value) + return ( + not ObsBlueprint.needs_lookup(value) + and isinstance(value, str) + and isinstance(value, str) + and '(' in value + and ')' in value + and '/' not in value + ) @staticmethod def has_default_value(value): @@ -1344,8 +1180,7 @@ def has_default_value(value): def has_no_value(value): """If functions return None, try not to update the WCS with this value.""" - return value is None or ( - isinstance(value, str) and 'None' in value.strip()) + return value is None or (isinstance(value, str) and 'None' in value.strip()) @staticmethod def needs_lookup(value): @@ -1413,10 +1248,19 @@ class Hdf5ObsBlueprint(ObsBlueprint): print(ob) """ - def __init__(self, position_axes=None, energy_axis=None, - polarization_axis=None, time_axis=None, - obs_axis=None, custom_axis=None, module=None, - update=True, instantiated_class=None): + + def __init__( + self, + position_axes=None, + energy_axis=None, + polarization_axis=None, + time_axis=None, + obs_axis=None, + custom_axis=None, + module=None, + update=True, + instantiated_class=None, + ): """ There are no sensible/known HDF5 defaults for WCS construction, so default to ensuring the blueprint executes with mostly values of None. @@ -1436,11 +1280,11 @@ def __init__(self, position_axes=None, energy_axis=None, instantiated_class, ) tmp = { - 'Observation.algorithm.name': ([], 'exposure'), - 'Plane.calibrationLevel': ([], CalibrationLevel.RAW_STANDARD), - 'Plane.dataProductType': ([], DataProductType.IMAGE), - 'Artifact.releaseType': ([], ReleaseType.DATA), - 'Chunk': 'include' + 'Observation.algorithm.name': ([], 'exposure'), + 'Plane.calibrationLevel': ([], CalibrationLevel.RAW_STANDARD), + 'Plane.dataProductType': ([], DataProductType.IMAGE), + 'Artifact.releaseType': ([], ReleaseType.DATA), + 'Chunk': 'include', } # using the tmp to make sure that the keywords are valid for key in tmp: @@ -1455,8 +1299,7 @@ def configure_custom_axis(self, axis, override=True): :param override: Set to False when reading from a file. """ if self._custom_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured custom axis.') + self.logger.debug('Attempt to configure already-configured custom axis.') return if override: @@ -1484,8 +1327,7 @@ def configure_position_axes(self, axes, override=True): :param override: Set to False when reading from a file. """ if self._pos_axes_configed: - self.logger.debug( - 'Attempt to configure already-configured position axes.') + self.logger.debug('Attempt to configure already-configured position axes.') return if override: @@ -1503,18 +1345,12 @@ def configure_position_axes(self, axes, override=True): self.set('Chunk.position.axis.function.cd12', ([], None)) self.set('Chunk.position.axis.function.cd21', ([], None)) self.set('Chunk.position.axis.function.cd22', ([], None)) - self.set('Chunk.position.axis.function.dimension.naxis1', - ([], 1)) - self.set('Chunk.position.axis.function.dimension.naxis2', - ([], 1)) - self.set('Chunk.position.axis.function.refCoord.coord1.pix', - ([], None)) - self.set('Chunk.position.axis.function.refCoord.coord1.val', - ([], None)) - self.set('Chunk.position.axis.function.refCoord.coord2.pix', - ([], None)) - self.set('Chunk.position.axis.function.refCoord.coord2.val', - ([], None)) + self.set('Chunk.position.axis.function.dimension.naxis1', ([], 1)) + self.set('Chunk.position.axis.function.dimension.naxis2', ([], 1)) + self.set('Chunk.position.axis.function.refCoord.coord1.pix', ([], None)) + self.set('Chunk.position.axis.function.refCoord.coord1.val', ([], None)) + self.set('Chunk.position.axis.function.refCoord.coord2.pix', ([], None)) + self.set('Chunk.position.axis.function.refCoord.coord2.val', ([], None)) self._wcs_std['Chunk.position.coordsys'] = '' self._wcs_std['Chunk.position.equinox'] = '' @@ -1547,8 +1383,7 @@ def configure_energy_axis(self, axis, override=True): :return: """ if self._energy_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured energy axis.') + self.logger.debug('Attempt to configure already-configured energy axis.') return if override: @@ -1602,8 +1437,7 @@ def configure_polarization_axis(self, axis, override=True): :return: """ if self._polarization_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured polarization axis.') + self.logger.debug('Attempt to configure already-configured polarization axis.') return if override: @@ -1612,10 +1446,8 @@ def configure_polarization_axis(self, axis, override=True): self.set('Chunk.polarization.axis.axis.cunit', ([], None)) self.set('Chunk.polarization.axis.function.naxis', ([], 1)) self.set('Chunk.polarization.axis.function.delta', ([], None)) - self.set('Chunk.polarization.axis.function.refCoord.pix', - ([], None)) - self.set('Chunk.polarization.axis.function.refCoord.val', - ([], None)) + self.set('Chunk.polarization.axis.function.refCoord.pix', ([], None)) + self.set('Chunk.polarization.axis.function.refCoord.val', ([], None)) self._wcs_std['Chunk.polarization.axis.axis.ctype'] = '' self._wcs_std['Chunk.polarization.axis.axis.cunit'] = '' @@ -1638,8 +1470,7 @@ def configure_observable_axis(self, axis, override=True): :return: """ if self._obs_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured observable axis.') + self.logger.debug('Attempt to configure already-configured observable axis.') return if override: @@ -1663,8 +1494,7 @@ def configure_time_axis(self, axis, override=True): :return: """ if self._time_axis_configed: - self.logger.debug( - 'Attempt to configure already-configured time axis.') + self.logger.debug('Attempt to configure already-configured time axis.') return if override: diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index 6f74cceb..feb6d62b 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -172,8 +172,7 @@ def get_external_headers(external_url): try: session = requests.Session() retries = 10 - retry = Retry(total=retries, read=retries, connect=retries, - backoff_factor=0.5) + retry = Retry(total=retries, read=retries, connect=retries, backoff_factor=0.5) adapter = HTTPAdapter(max_retries=retry) session.mount('http://', adapter) session.mount('https://', adapter) @@ -182,8 +181,7 @@ def get_external_headers(external_url): headers = data_util.make_headers_from_string(r.text) else: headers = None - logging.warning('Error {} when retrieving {} headers.'.format( - r.status_code, external_url)) + logging.warning('Error {} when retrieving {} headers.'.format(r.status_code, external_url)) r.close() return headers except Exception as e: @@ -207,16 +205,13 @@ def get_vos_headers(uri, subject=None): temp_filename = tempfile.NamedTemporaryFile() client.copy(uri, temp_filename.name, head=True) - return data_util.get_local_file_headers( - f'file://{temp_filename.name}' - ) + return data_util.get_local_file_headers(f'file://{temp_filename.name}') else: # this should be a programming error by now raise NotImplementedError('Only vos type URIs supported') -def _get_and_update_artifact_meta(uri, artifact, subject=None, connected=True, - client=None): +def _get_and_update_artifact_meta(uri, artifact, subject=None, connected=True, client=None): """ Updates contentType, contentLength and contentChecksum of an artifact :param artifact: @@ -235,25 +230,21 @@ def _get_and_update_artifact_meta(uri, artifact, subject=None, connected=True, elif file_url.scheme == 'vos': metadata = _get_vos_meta(subject, uri) elif file_url.scheme == 'file': - if (file_url.path.endswith('.header') and subject is not None and - connected): + if file_url.path.endswith('.header') and subject is not None and connected: if artifact.uri.startswith('vos'): metadata = _get_vos_meta(subject, artifact.uri) else: # if header is on disk, get the content_* from CADC metadata = client.info(artifact.uri) if metadata is None: - logging.info( - 'Could not find {} at CADC. No Artifact ' - 'metadata.'.format(artifact.uri)) + logging.info('Could not find {} at CADC. No Artifact ' 'metadata.'.format(artifact.uri)) return else: metadata = data_util.get_local_file_info(file_url.path) else: metadata = client.info(uri) if metadata is None: - logging.info('Could not find {} at CADC. No Artifact ' - 'metadata.'.format(artifact.uri)) + logging.info('Could not find {} at CADC. No Artifact ' 'metadata.'.format(artifact.uri)) return update_artifact_meta(artifact, metadata) @@ -266,12 +257,12 @@ def update_artifact_meta(artifact, file_info): :param file_info :return: """ - logging.debug('old artifact metadata - ' - 'uri({}), encoding({}), size({}), type({})'. - format(artifact.uri, - artifact.content_checksum, - artifact.content_length, - artifact.content_type)) + logging.debug( + 'old artifact metadata - ' + 'uri({}), encoding({}), size({}), type({})'.format( + artifact.uri, artifact.content_checksum, artifact.content_length, artifact.content_type + ) + ) if file_info.md5sum is not None: if file_info.md5sum.startswith('md5:'): checksum = ChecksumURI(file_info.md5sum) @@ -280,12 +271,12 @@ def update_artifact_meta(artifact, file_info): artifact.content_checksum = checksum artifact.content_length = _to_int(file_info.size) artifact.content_type = _to_str(file_info.file_type) - logging.debug('updated artifact metadata - ' - 'uri({}), encoding({}), size({}), type({})'. - format(artifact.uri, - artifact.content_checksum, - artifact.content_length, - artifact.content_type)) + logging.debug( + 'updated artifact metadata - ' + 'uri({}), encoding({}), size({}), type({})'.format( + artifact.uri, artifact.content_checksum, artifact.content_length, artifact.content_type + ) + ) def _get_vos_meta(subject, uri): @@ -300,9 +291,9 @@ def _get_vos_meta(subject, uri): else: client = Client() node = client.get_node(uri, limit=None, force=False) - return FileInfo(id=uri, size=node.props['length'], - md5sum=node.props['MD5'], - file_type=data_util.get_file_type(uri)) + return FileInfo( + id=uri, size=node.props['length'], md5sum=node.props['MD5'], file_type=data_util.get_file_type(uri) + ) def _lookup_blueprint(blueprints, uri): @@ -330,10 +321,22 @@ def _extract_ids(cardinality): return cardinality.split('/', 1) -def _augment(obs, product_id, uri, blueprint, subject, dumpconfig=False, - validate_wcs=True, plugin=None, local=None, - external_url=None, connected=True, use_blueprint_parser=False, - client=None, **kwargs): +def _augment( + obs, + product_id, + uri, + blueprint, + subject, + dumpconfig=False, + validate_wcs=True, + plugin=None, + local=None, + external_url=None, + connected=True, + use_blueprint_parser=False, + client=None, + **kwargs, +): """ Find or construct a plane and an artifact to go with the observation under augmentation. @@ -363,37 +366,29 @@ def _augment(obs, product_id, uri, blueprint, subject, dumpconfig=False, plane = obs.planes[product_id] if uri not in plane.artifacts.keys(): - plane.artifacts.add( - Artifact(uri=str(uri), - product_type=ProductType.SCIENCE, - release_type=ReleaseType.DATA)) + plane.artifacts.add(Artifact(uri=str(uri), product_type=ProductType.SCIENCE, release_type=ReleaseType.DATA)) meta_uri = uri visit_local = None if use_blueprint_parser: - logging.debug( - f'Using a BlueprintParser as requested for {uri}') + logging.debug(f'Using a BlueprintParser as requested for {uri}') parser = BlueprintParser(blueprint, uri=uri) elif local: if uri.startswith('vos'): if '.fits' in local or '.fits.gz' in local: meta_uri = f'file://{local}' - logging.debug( - f'Using a FitsParser for vos local {local}') + logging.debug(f'Using a FitsParser for vos local {local}') headers = data_util.get_local_file_headers(local) parser = FitsParser(headers, blueprint, uri=uri) elif '.csv' in local: - logging.debug( - f'Using a BlueprintParser for vos local {local}') + logging.debug(f'Using a BlueprintParser for vos local {local}') parser = BlueprintParser(blueprint, uri=uri) else: raise ValueError(f'Unexpected file type {local}') else: meta_uri = f'file://{local}' visit_local = local - if ('.header' in local or - data_util.get_file_type(local) == - 'application/fits'): + if '.header' in local or data_util.get_file_type(local) == 'application/fits': if uri.startswith('cadc'): logging.debug(f'Using a FitsParser for local file {local}') parser = FitsParser(local, blueprint, uri=uri) @@ -401,11 +396,11 @@ def _augment(obs, product_id, uri, blueprint, subject, dumpconfig=False, logging.debug(f'Using a ContentParser for local file {local}') parser = ContentParser(blueprint, uri) elif '.h5' in local: - logging.debug( - f'Using an Hdf5Parser for local file {local}') + logging.debug(f'Using an Hdf5Parser for local file {local}') # h5py is an extra in this package since most collections do # not require it import h5py + temp = h5py.File(local) parser = Hdf5Parser(blueprint, uri, temp) else: @@ -415,14 +410,10 @@ def _augment(obs, product_id, uri, blueprint, subject, dumpconfig=False, elif external_url: headers = get_external_headers(external_url) if headers is None: - logging.debug( - 'Using a BlueprintParser for un-retrievable remote headers ' - '{}'.format(uri) - ) + logging.debug('Using a BlueprintParser for un-retrievable remote headers ' '{}'.format(uri)) parser = BlueprintParser(blueprint, uri=uri) else: - logging.debug( - f'Using a FitsParser for remote headers {uri}') + logging.debug(f'Using a FitsParser for remote headers {uri}') parser = FitsParser(headers, blueprint, uri=uri) else: if '.fits' in uri: @@ -436,20 +427,16 @@ def _augment(obs, product_id, uri, blueprint, subject, dumpconfig=False, parser = FitsParser(headers, blueprint, uri=uri) else: # explicitly ignore headers for txt and image files - logging.debug( - f'Using a BlueprintParser for remote file {uri}') + logging.debug(f'Using a BlueprintParser for remote file {uri}') parser = BlueprintParser(blueprint, uri=uri) if parser is None: result = None else: - _get_and_update_artifact_meta( - meta_uri, plane.artifacts[uri], subject, connected, client) - parser.augment_observation(observation=obs, artifact_uri=uri, - product_id=plane.product_id) + _get_and_update_artifact_meta(meta_uri, plane.artifacts[uri], subject, connected, client) + parser.augment_observation(observation=obs, artifact_uri=uri, product_id=plane.product_id) - result = _visit(plugin, parser, obs, visit_local, product_id, uri, - subject, **kwargs) + result = _visit(plugin, parser, obs, visit_local, product_id, uri, subject, **kwargs) if result is not None: if validate_wcs: @@ -462,9 +449,7 @@ def _augment(obs, product_id, uri, blueprint, subject, dumpconfig=False, raise e if len(parser._errors) > 0: - logging.debug( - '{} errors encountered while processing {!r}.'.format( - len(parser._errors), uri)) + logging.debug('{} errors encountered while processing {!r}.'.format(len(parser._errors), uri)) logging.debug(f'{parser._errors}') return result @@ -492,13 +477,19 @@ def _load_module(module): def caom2gen(): parser = get_gen_proc_arg_parser() - parser.add_argument('--blueprint', nargs='+', required=True, - help=('list of files with blueprints for CAOM2 ' - 'construction, in serialized format. If the ' - 'list is of length 1, the same blueprint will ' - 'be applied to all lineage entries. Otherwise, ' - 'there must be a blueprint file per lineage ' - 'entry.')) + parser.add_argument( + '--blueprint', + nargs='+', + required=True, + help=( + 'list of files with blueprints for CAOM2 ' + 'construction, in serialized format. If the ' + 'list is of length 1, the same blueprint will ' + 'be applied to all lineage entries. Otherwise, ' + 'there must be a blueprint file per lineage ' + 'entry.' + ), + ) if len(sys.argv) < 2: parser.print_usage(file=sys.stderr) @@ -532,14 +523,13 @@ def caom2gen(): logging.debug(f'Blueprints: {args.blueprint}') sys.stderr.write( '{}: error: different number of blueprints ' - '{} and files {}.'.format(APP_NAME, len(args.blueprint), - len(args.lineage))) + '{} and files {}.'.format(APP_NAME, len(args.blueprint), len(args.lineage)) + ) sys.exit(-1) for i, cardinality in enumerate(args.lineage): product_id, uri = _extract_ids(cardinality) - logging.debug('Loading blueprint for {} from {}'.format( - uri, args.blueprint[i])) + logging.debug('Loading blueprint for {} from {}'.format(uri, args.blueprint[i])) if '.h5' in uri: blueprint = Hdf5ObsBlueprint(module=module) else: @@ -586,25 +576,19 @@ def _gen_obs(obs_blueprints, in_obs_xml, collection=None, obs_id=None): if bp._get('DerivedObservation.members') is not None: logging.debug('Build a DerivedObservation') obs = DerivedObservation( - collection=collection, - observation_id=obs_id, - algorithm=Algorithm('composite')) + collection=collection, observation_id=obs_id, algorithm=Algorithm('composite') + ) break elif bp._get('CompositeObservation.members') is not None: - logging.debug( - 'Build a CompositeObservation with obs_id {}'.format( - obs_id)) + logging.debug('Build a CompositeObservation with obs_id {}'.format(obs_id)) obs = CompositeObservation( - collection=collection, observation_id=obs_id, - algorithm=Algorithm('composite')) + collection=collection, observation_id=obs_id, algorithm=Algorithm('composite') + ) break if not obs: # build a simple observation - logging.debug( - f'Build a SimpleObservation with obs_id {obs_id}') - obs = SimpleObservation(collection=collection, - observation_id=obs_id, - algorithm=Algorithm('exposure')) + logging.debug(f'Build a SimpleObservation with obs_id {obs_id}') + obs = SimpleObservation(collection=collection, observation_id=obs_id, algorithm=Algorithm('exposure')) return obs @@ -617,17 +601,19 @@ def _set_logging(verbose, debug, quiet): logger.removeHandler(handler) handler = logging.StreamHandler() - handler.setFormatter(DispatchingFormatter({ - 'caom2utils.fits2caom2.FitsWcsParser': logging.Formatter( - '%(asctime)s:%(levelname)s:%(name)-12s:HDU:%(hdu)-2s:' - '%(lineno)d:%(message)s'), - 'astropy': logging.Formatter( - '%(asctime)s:%(levelname)s:%(name)-12s:HDU:%(hdu)-2s:' - '%(lineno)d:%(message)s') - }, - logging.Formatter('%(asctime)s:%(levelname)s:%(name)-12s:' - '%(lineno)d:%(message)s') - )) + handler.setFormatter( + DispatchingFormatter( + { + 'caom2utils.fits2caom2.FitsWcsParser': logging.Formatter( + '%(asctime)s:%(levelname)s:%(name)-12s:HDU:%(hdu)-2s:' '%(lineno)d:%(message)s' + ), + 'astropy': logging.Formatter( + '%(asctime)s:%(levelname)s:%(name)-12s:HDU:%(hdu)-2s:' '%(lineno)d:%(message)s' + ), + }, + logging.Formatter('%(asctime)s:%(levelname)s:%(name)-12s:' '%(lineno)d:%(message)s'), + ) + ) logger.addHandler(handler) if verbose: logger.setLevel(logging.INFO) @@ -649,40 +635,48 @@ def _get_common_arg_parser(): fits2caom2 and caom2gen :return: args parser """ - parser = util.get_base_parser(subparsers=False, - version=version.version, - default_resource_id=GLOBAL_STORAGE_RESOURCE_ID) - - parser.description = ( - 'Augments an observation with information in one or more fits files.') - - parser.add_argument('--dumpconfig', action='store_true', - help=('output the utype to keyword mapping to ' - 'the console')) - - parser.add_argument('--not_connected', action='store_true', - help=('if set, there is no internet connection, so ' - 'skip service invocations.')) - - parser.add_argument('--no_validate', action='store_true', - help=('by default, the application will validate the ' - 'WCS information for an observation. ' - 'Specifying this flag skips that step.')) - - parser.add_argument('-o', '--out', dest='out_obs_xml', - help='output of augmented observation in XML', - required=False) + parser = util.get_base_parser( + subparsers=False, version=version.version, default_resource_id=GLOBAL_STORAGE_RESOURCE_ID + ) + + parser.description = 'Augments an observation with information in one or more fits files.' + + parser.add_argument( + '--dumpconfig', action='store_true', help=('output the utype to keyword mapping to ' 'the console') + ) + + parser.add_argument( + '--not_connected', + action='store_true', + help=('if set, there is no internet connection, so ' 'skip service invocations.'), + ) + + parser.add_argument( + '--no_validate', + action='store_true', + help=( + 'by default, the application will validate the ' + 'WCS information for an observation. ' + 'Specifying this flag skips that step.' + ), + ) + + parser.add_argument( + '-o', '--out', dest='out_obs_xml', help='output of augmented observation in XML', required=False + ) in_group = parser.add_mutually_exclusive_group(required=True) - in_group.add_argument('-i', '--in', dest='in_obs_xml', - type=argparse.FileType('r'), - help='input of observation to be augmented in XML') - in_group.add_argument('--observation', nargs=2, - help='observation in a collection', - metavar=('collection', 'observationID')) - parser.add_argument('--local', nargs='+', - help=('list of files in local filesystem (same order ' - 'as uri)')) + in_group.add_argument( + '-i', + '--in', + dest='in_obs_xml', + type=argparse.FileType('r'), + help='input of observation to be augmented in XML', + ) + in_group.add_argument( + '--observation', nargs=2, help='observation in a collection', metavar=('collection', 'observationID') + ) + parser.add_argument('--local', nargs='+', help=('list of files in local filesystem (same order ' 'as uri)')) return parser @@ -693,9 +687,7 @@ def get_arg_parser(): :return: args parser """ parser = _get_common_arg_parser() - parser.add_argument('--productID', - help='product ID of the plane in the observation', - required=False) + parser.add_argument('--productID', help='product ID of the plane in the observation', required=False) parser.add_argument('fileURI', help='URI of a fits file', nargs='+') return parser @@ -723,22 +715,23 @@ def proc(args, obs_blueprints): _set_logging(args.verbose, args.debug, args.quiet) if args.local and (len(args.local) != len(args.fileURI)): - msg = ('number of local arguments not the same with file ' - 'URIs ({} vs {})').format(len(args.local), args.fileURI) + msg = ('number of local arguments not the same with file ' 'URIs ({} vs {})').format( + len(args.local), args.fileURI + ) raise RuntimeError(msg) if args.in_obs_xml: obs = _gen_obs(obs_blueprints, args.in_obs_xml) else: - obs = _gen_obs(obs_blueprints, None, args.observation[0], - args.observation[1]) + obs = _gen_obs(obs_blueprints, None, args.observation[0], args.observation[1]) if args.in_obs_xml and len(obs.planes) != 1: if not args.productID: msg = '{}{}{}'.format( 'A productID parameter is required if ', 'there are zero or more than one planes ', - 'in the input observation.') + 'in the input observation.', + ) raise RuntimeError(msg) subject = net.Subject.from_cmd_line_args(args) @@ -756,17 +749,26 @@ def proc(args, obs_blueprints): product_id = args.productID else: msg = '{}{}'.format( - 'A productID parameter is required if one is not ', - 'identified in the blueprint.') + 'A productID parameter is required if one is not ', 'identified in the blueprint.' + ) raise RuntimeError(msg) file_name = None if args.local: file_name = args.local[i] - obs = _augment(obs, product_id, uri, blueprint, subject, - args.dumpconfig, validate_wcs, plugin=None, - local=file_name, client=client) + obs = _augment( + obs, + product_id, + uri, + blueprint, + subject, + args.dumpconfig, + validate_wcs, + plugin=None, + local=file_name, + client=client, + ) _write_observation(obs, args) @@ -780,24 +782,25 @@ def _load_plugin(plugin_name): plgin = getattr(plgin, 'ObservationUpdater')() if not hasattr(plgin, 'update'): - msg = 'The plugin {} is not correct. It must provide one ' \ - 'of:\n' \ - '1 - a function named update, or\n' \ - '2 - a class ObservationUpdater with a function named ' \ - 'update.\n In either case, the update signature needs ' \ - 'to be (Observation, **kwargs).'.format(plugin_name) + msg = ( + 'The plugin {} is not correct. It must provide one ' + 'of:\n' + '1 - a function named update, or\n' + '2 - a class ObservationUpdater with a function named ' + 'update.\n In either case, the update signature needs ' + 'to be (Observation, **kwargs).'.format(plugin_name) + ) raise ImportError(msg) return plgin -def _visit(plugin_name, parser, obs, visit_local, product_id=None, uri=None, - subject=None, **kwargs): +def _visit(plugin_name, parser, obs, visit_local, product_id=None, uri=None, subject=None, **kwargs): result = obs if plugin_name is not None and len(plugin_name) > 0: # TODO make a check that's necessary under both calling conditions here logging.debug( - 'Begin plugin execution {!r} update method on ' - 'observation {!r}'.format(plugin_name, obs.observation_id)) + 'Begin plugin execution {!r} update method on ' 'observation {!r}'.format(plugin_name, obs.observation_id) + ) plgin = _load_plugin(plugin_name) if isinstance(parser, FitsParser): kwargs['headers'] = parser.headers @@ -814,8 +817,8 @@ def _visit(plugin_name, parser, obs, visit_local, product_id=None, uri=None, if result is not None: logging.debug( 'Finished executing plugin {!r} update ' - 'method on observation {!r}'.format( - plugin_name, obs.observation_id)) + 'method on observation {!r}'.format(plugin_name, obs.observation_id) + ) except Exception as e: logging.error(e) tb = traceback.format_exc() @@ -844,8 +847,7 @@ def gen_proc(args, blueprints, **kwargs): if args.in_obs_xml: obs = _gen_obs(blueprints, args.in_obs_xml) else: - obs = _gen_obs(blueprints, None, args.observation[0], - args.observation[1]) + obs = _gen_obs(blueprints, None, args.observation[0], args.observation[1]) validate_wcs = True if args.no_validate: @@ -859,21 +861,17 @@ def gen_proc(args, blueprints, **kwargs): subject = net.Subject.from_cmd_line_args(args) if args.resource_id is None: # if the resource_id is Undefined, using CadcDataClient - client = data_util.StorageClientWrapper( - subject, using_storage_inventory=False) + client = data_util.StorageClientWrapper(subject, using_storage_inventory=False) else: # if the resource_id is defined, assume that the caller intends to # use the Storage Inventory system, as it's the CADC storage # client that depends on a resource_id - client = data_util.StorageClientWrapper( - subject, resource_id=args.resource_id) + client = data_util.StorageClientWrapper(subject, resource_id=args.resource_id) for ii, cardinality in enumerate(args.lineage): product_id, uri = _extract_ids(cardinality) blueprint = _lookup_blueprint(blueprints, uri) - logging.debug( - 'Begin augmentation for product_id {}, uri {}'.format(product_id, - uri)) + logging.debug('Begin augmentation for product_id {}, uri {}'.format(product_id, uri)) file_name = None if args.local: @@ -887,10 +885,22 @@ def gen_proc(args, blueprints, **kwargs): if args.use_blueprint_parser: use_blueprint_parser = uri in args.use_blueprint_parser - obs = _augment(obs, product_id, uri, blueprint, subject, - args.dumpconfig, validate_wcs, args.plugin, file_name, - external_url, connected, use_blueprint_parser, client, - **kwargs) + obs = _augment( + obs, + product_id, + uri, + blueprint, + subject, + args.dumpconfig, + validate_wcs, + args.plugin, + file_name, + external_url, + connected, + use_blueprint_parser, + client, + **kwargs, + ) if obs is None: logging.warning('No observation. Stop processing.') @@ -915,52 +925,89 @@ def get_gen_proc_arg_parser(): :return: args parser """ parser = _get_common_arg_parser() - parser.add_argument('--external_url', nargs='+', - help=('service endpoint(s) that ' - 'return(s) a string that can be ' - 'made into FITS headers. Cardinality should' - 'be consistent with lineage.')) - parser.add_argument('--module', help=('if the blueprint contains function ' - 'calls, call ' - 'importlib.import_module ' - 'for the named module. Provide a ' - 'fully qualified name. Parameter ' - 'choices are the artifact URI (uri) ' - 'or a list of astropy Header ' - 'instances (header). This will ' - 'allow the update of a single ' - 'blueprint entry with a single ' - 'call.')) - parser.add_argument('--plugin', help=('if this parameter is specified, ' - 'call importlib.import_module ' - 'for the named module. Then ' - 'execute the method "update", ' - 'with the signature ' - '(Observation, **kwargs). ' - 'This will allow ' - 'for the update of multiple ' - 'observation data members with one ' - 'call.')) - parser.add_argument('--lineage', nargs='+', - help=('productID/artifactURI. List of plane/artifact ' - 'identifiers that will be' - 'created for the identified observation.')) - parser.add_argument('--use_blueprint_parser', nargs='+', - help=('productID/artifactURI. List of lineage entries ' - 'that will be processed with a BlueprintParser. ' - 'Good for files with no metadata in the ' - 'content.')) + parser.add_argument( + '--external_url', + nargs='+', + help=( + 'service endpoint(s) that ' + 'return(s) a string that can be ' + 'made into FITS headers. Cardinality should' + 'be consistent with lineage.' + ), + ) + parser.add_argument( + '--module', + help=( + 'if the blueprint contains function ' + 'calls, call ' + 'importlib.import_module ' + 'for the named module. Provide a ' + 'fully qualified name. Parameter ' + 'choices are the artifact URI (uri) ' + 'or a list of astropy Header ' + 'instances (header). This will ' + 'allow the update of a single ' + 'blueprint entry with a single ' + 'call.' + ), + ) + parser.add_argument( + '--plugin', + help=( + 'if this parameter is specified, ' + 'call importlib.import_module ' + 'for the named module. Then ' + 'execute the method "update", ' + 'with the signature ' + '(Observation, **kwargs). ' + 'This will allow ' + 'for the update of multiple ' + 'observation data members with one ' + 'call.' + ), + ) + parser.add_argument( + '--lineage', + nargs='+', + help=( + 'productID/artifactURI. List of plane/artifact ' + 'identifiers that will be' + 'created for the identified observation.' + ), + ) + parser.add_argument( + '--use_blueprint_parser', + nargs='+', + help=( + 'productID/artifactURI. List of lineage entries ' + 'that will be processed with a BlueprintParser. ' + 'Good for files with no metadata in the ' + 'content.' + ), + ) return parser -def augment(blueprints, no_validate=False, dump_config=False, plugin=None, - out_obs_xml=None, in_obs_xml=None, collection=None, - observation=None, product_id=None, uri=None, netrc=False, - file_name=None, verbose=False, debug=False, quiet=False, **kwargs): +def augment( + blueprints, + no_validate=False, + dump_config=False, + plugin=None, + out_obs_xml=None, + in_obs_xml=None, + collection=None, + observation=None, + product_id=None, + uri=None, + netrc=False, + file_name=None, + verbose=False, + debug=False, + quiet=False, + **kwargs, +): _set_logging(verbose, debug, quiet) - logging.debug( - 'Begin augmentation for product_id {}, uri {}'.format(product_id, - uri)) + logging.debug('Begin augmentation for product_id {}, uri {}'.format(product_id, uri)) # The 'visit_args' are a dictionary within the 'params' dictionary. # They are set by the collection-specific implementation, as they are @@ -979,8 +1026,9 @@ def augment(blueprints, no_validate=False, dump_config=False, plugin=None, validate_wcs = not no_validate for ii in blueprints: - obs = _augment(obs, product_id, uri, blueprints[ii], subject, - dump_config, validate_wcs, plugin, file_name, **kwargs) + obs = _augment( + obs, product_id, uri, blueprints[ii], subject, dump_config, validate_wcs, plugin, file_name, **kwargs + ) writer = ObservationWriter() if out_obs_xml: diff --git a/caom2utils/caom2utils/caomvalidator.py b/caom2utils/caom2utils/caomvalidator.py index 91e6a3c4..64a81f68 100644 --- a/caom2utils/caom2utils/caomvalidator.py +++ b/caom2utils/caom2utils/caomvalidator.py @@ -127,11 +127,9 @@ def _validate_observation(caom2_entity, deep=True): if caom2_entity.target: _validate_keyword('target.keywords', caom2_entity.target.keywords) if caom2_entity.telescope: - _validate_keyword('telescope.keywords', - caom2_entity.telescope.keywords) + _validate_keyword('telescope.keywords', caom2_entity.telescope.keywords) if caom2_entity.instrument: - _validate_keyword('telescope.instrument', - caom2_entity.instrument.keywords) + _validate_keyword('telescope.instrument', caom2_entity.instrument.keywords) if deep: for plane in caom2_entity.planes.values(): _validate_plane(plane) @@ -149,8 +147,7 @@ def _validate_plane(caom2_entity, deep=True): """ _check_param(caom2_entity, Plane) if caom2_entity.provenance: - _validate_keyword('provenance.keywords', - caom2_entity.provenance.keywords) + _validate_keyword('provenance.keywords', caom2_entity.provenance.keywords) if caom2_entity.position: validate_polygon(caom2_entity.position.bounds) @@ -213,11 +210,9 @@ def _validate_keyword(name, keywords): return for keyword in keywords: if keyword is not None and keyword.find('|') != -1: - raise AssertionError( - f'invalid {name}: may not contain pipe (|)') + raise AssertionError(f'invalid {name}: may not contain pipe (|)') def _check_param(param, param_type): if param is None or not isinstance(param, param_type): - raise ValueError( - f'{param} must be a valid {param_type.__name__}.') + raise ValueError(f'{param} must be a valid {param_type.__name__}.') diff --git a/caom2utils/caom2utils/data_util.py b/caom2utils/caom2utils/data_util.py index d1fd23c3..884c214c 100644 --- a/caom2utils/caom2utils/data_util.py +++ b/caom2utils/caom2utils/data_util.py @@ -135,9 +135,7 @@ def get(self, working_directory, uri): except Exception as e: self._add_fail_metric('get', uri) self._logger.debug(traceback.format_exc()) - raise exceptions.UnexpectedException( - f'Did not retrieve {uri} because {e}' - ) + raise exceptions.UnexpectedException(f'Did not retrieve {uri} because {e}') self._add_metric('get', uri, start, stat(fqn).st_size) self._logger.debug('End get') @@ -164,9 +162,7 @@ def get_head(self, uri): self._add_fail_metric('get_header', uri) self._logger.debug(traceback.format_exc()) self._logger.error(e) - raise exceptions.UnexpectedException( - f'Did not retrieve {uri} header because {e}' - ) + raise exceptions.UnexpectedException(f'Did not retrieve {uri} header because {e}') def info(self, uri): """ @@ -223,9 +219,7 @@ def put(self, working_directory, uri): self._add_fail_metric('put', uri) self._logger.debug(traceback.format_exc()) self._logger.error(e) - raise exceptions.UnexpectedException( - f'Failed to store data with {e}' - ) + raise exceptions.UnexpectedException(f'Failed to store data with {e}') finally: chdir(cwd) self._add_metric('put', uri, start, local_meta.size) @@ -245,9 +239,7 @@ def remove(self, uri): self._add_fail_metric('remove', uri) self._logger.debug(traceback.format_exc()) self._logger.error(e) - raise exceptions.UnexpectedException( - f'Did not remove {uri} because {e}' - ) + raise exceptions.UnexpectedException(f'Did not remove {uri} because {e}') self._add_metric('remove', uri, start, value=None) self._logger.debug('End remove') @@ -284,8 +276,7 @@ def _clean_headers(fits_header): new_header.append('END\n') elif line.strip() == 'END': new_header.append('END\n') - elif '=' not in line and not (line.startswith('COMMENT') or - line.startswith('HISTORY')): + elif '=' not in line and not (line.startswith('COMMENT') or line.startswith('HISTORY')): pass else: new_header.append(f'{line}\n') @@ -355,7 +346,7 @@ def get_file_encoding(fqn): def get_file_type(fqn): """Basic header extension to content_type lookup.""" lower_fqn = fqn.lower() - if (lower_fqn.endswith('.fits') or lower_fqn.endswith('.fits.fz')): + if lower_fqn.endswith('.fits') or lower_fqn.endswith('.fits.fz'): return 'application/fits' elif lower_fqn.endswith('.gif'): return 'image/gif' @@ -378,7 +369,6 @@ def make_headers_from_string(fits_header): ":param fits_header a string of keyword/value pairs""" fits_header = _clean_headers(fits_header) delim = 'END\n' - extensions = \ - [e + delim for e in fits_header.split(delim) if e.strip()] + extensions = [e + delim for e in fits_header.split(delim) if e.strip()] headers = [fits.Header.fromstring(e, sep='\n') for e in extensions] return headers diff --git a/caom2utils/caom2utils/legacy.py b/caom2utils/caom2utils/legacy.py index 338132ca..5de7a4ce 100755 --- a/caom2utils/caom2utils/legacy.py +++ b/caom2utils/caom2utils/legacy.py @@ -113,9 +113,7 @@ def get_caom2_elements(self, lookup): elif lookup in self._inverse_plan.keys(): return self._inverse_plan[lookup] else: - raise ValueError( - '{} caom2 element not found in the plan (spelling?).'. - format(lookup)) + raise ValueError('{} caom2 element not found in the plan (spelling?).'.format(lookup)) # Mimic the default java fits2caom2.config file content, to support the @@ -128,36 +126,29 @@ def get_caom2_elements(self, lookup): # going forward. _JAVA_CAOM2_CONFIG = { 'DerivedObservation.members': 'members', - 'Observation.type': 'OBSTYPE', 'Observation.intent': 'obs.intent', 'Observation.sequenceNumber': 'obs.sequenceNumber', 'Observation.metaRelease': 'obs.metaRelease', - 'Observation.algorithm.name': 'algorithm.name', - 'Observation.instrument.name': 'instrument.name', 'Observation.instrument.keywords': 'instrument.keywords', - 'Observation.proposal.id': 'proposal.id', 'Observation.proposal.pi': 'proposal.pi', 'Observation.proposal.project': 'proposal.project', 'Observation.proposal.title': 'proposal.title', 'Observation.proposal.keywords': 'proposal.keywords', - 'Observation.target.name': 'target.name', 'Observation.target.type': 'target.type', 'Observation.target.standard': 'target.standard', 'Observation.target.redshift': 'target.redshift', 'Observation.target.keywords': 'target.keywords', 'Observation.target.moving': 'target.moving', - 'Observation.telescope.name': 'telescope.name', 'Observation.telescope.geoLocationX': 'telescope.geoLocationX', 'Observation.telescope.geoLocationY': 'telescope.geoLocationY', 'Observation.telescope.geoLocationZ': 'telescope.geoLocationZ', 'Observation.telescope.keywords': 'telescope.keywords', - 'Observation.environment.seeing': 'environment.seeing', 'Observation.environment.humidity': 'environment.humidity', 'Observation.environment.elevation': 'environment.elevation', @@ -165,12 +156,10 @@ def get_caom2_elements(self, lookup): 'Observation.environment.wavelengthTau': 'environment.wavelengthTau', 'Observation.environment.ambientTemp': 'environment.ambientTemp', 'Observation.environment.photometric': 'environment.photometric', - 'Plane.metaRelease': 'plane.metaRelease', 'Plane.dataRelease': 'plane.dataRelease', 'Plane.dataProductType': 'plane.dataProductType', 'Plane.calibrationLevel': 'plane.calibrationLevel', - 'Plane.provenance.name': 'provenance.name', 'Plane.provenance.version': 'provenance.version', 'Plane.provenance.project': 'provenance.project', @@ -180,19 +169,15 @@ def get_caom2_elements(self, lookup): 'Plane.provenance.lastExecuted': 'provenance.lastExecuted', 'Plane.provenance.keywords': 'provenance.keywords', 'Plane.provenance.inputs': 'provenance.inputs', - 'Plane.metrics.sourceNumberDensity': 'metrics.sourceNumberDensity', 'Plane.metrics.background': 'metrics.background', 'Plane.metrics.backgroundStddev': 'metrics.backgroundStddev', 'Plane.metrics.fluxDensityLimit': 'metrics.fluxDensityLimit', 'Plane.metrics.magLimit': 'metrics.magLimit', - 'Artifact.productType': 'artifact.productType', 'Artifact.releaseType': 'artifact.releaseType', - 'Part.name': 'part.name', 'Part.productType': 'part.productType', - 'Chunk.naxis': 'ZNAXIS,NAXIS', 'Chunk.observableAxis': 'chunk.observableAxis', 'Chunk.positionAxis1': 'getPositionAxis()', @@ -200,14 +185,12 @@ def get_caom2_elements(self, lookup): 'Chunk.energyAxis': 'getEnergyAxis()', 'Chunk.timeAxis': 'getTimeAxis()', 'Chunk.polarizationAxis': 'getPolarizationAxis()', - 'Chunk.observable.dependent.bin': 'observable.dependent.bin', 'Chunk.observable.dependent.axis.ctype': 'observable.dependent.ctype', 'Chunk.observable.dependent.axis.cunit': 'observable.dependent.cunit', 'Chunk.observable.independent.bin': 'observable.independent.bin', 'Chunk.observable.independent.axis.ctype': 'observable.independent.ctype', 'Chunk.observable.independent.axis.cunit': 'observable.independent.cunit', - 'Chunk.position.coordsys': 'RADECSYS,RADESYS', 'Chunk.position.equinox': 'EQUINOX,EPOCH', 'Chunk.position.resolution': 'position.resolution', @@ -223,31 +206,20 @@ def get_caom2_elements(self, lookup): 'Chunk.position.axis.function.cd12': 'CD{positionAxis1}_{positionAxis2}', 'Chunk.position.axis.function.cd21': 'CD{positionAxis2}_{positionAxis1}', 'Chunk.position.axis.function.cd22': 'CD{positionAxis2}_{positionAxis2}', - 'Chunk.position.axis.function.dimension.naxis1': - 'ZNAXIS{positionAxis1},NAXIS{positionAxis1}', - 'Chunk.position.axis.function.dimension.naxis2': - 'ZNAXIS{positionAxis2},NAXIS{positionAxis2}', + 'Chunk.position.axis.function.dimension.naxis1': 'ZNAXIS{positionAxis1},NAXIS{positionAxis1}', + 'Chunk.position.axis.function.dimension.naxis2': 'ZNAXIS{positionAxis2},NAXIS{positionAxis2}', 'Chunk.position.axis.function.refCoord.coord1.pix': 'CRPIX{positionAxis1}', 'Chunk.position.axis.function.refCoord.coord1.val': 'CRVAL{positionAxis1}', 'Chunk.position.axis.function.refCoord.coord2.pix': 'CRPIX{positionAxis2}', 'Chunk.position.axis.function.refCoord.coord2.val': 'CRVAL{positionAxis2}', - 'Chunk.position.axis.range.start.coord1.pix': - 'position.range.start.coord1.pix', - 'Chunk.position.axis.range.start.coord1.val': - 'position.range.start.coord1.val', - 'Chunk.position.axis.range.start.coord2.pix': - 'position.range.start.coord2.pix', - 'Chunk.position.axis.range.start.coord2.val': - 'position.range.start.coord2.val', - 'Chunk.position.axis.range.end.coord1.pix': - 'position.range.end.coord1.pix', - 'Chunk.position.axis.range.end.coord1.val': - 'position.range.end.coord1.val', - 'Chunk.position.axis.range.end.coord2.pix': - 'position.range.end.coord2.pix', - 'Chunk.position.axis.range.end.coord2.val': - 'position.range.end.coord2.val', - + 'Chunk.position.axis.range.start.coord1.pix': 'position.range.start.coord1.pix', + 'Chunk.position.axis.range.start.coord1.val': 'position.range.start.coord1.val', + 'Chunk.position.axis.range.start.coord2.pix': 'position.range.start.coord2.pix', + 'Chunk.position.axis.range.start.coord2.val': 'position.range.start.coord2.val', + 'Chunk.position.axis.range.end.coord1.pix': 'position.range.end.coord1.pix', + 'Chunk.position.axis.range.end.coord1.val': 'position.range.end.coord1.val', + 'Chunk.position.axis.range.end.coord2.pix': 'position.range.end.coord2.pix', + 'Chunk.position.axis.range.end.coord2.val': 'position.range.end.coord2.val', 'Chunk.energy.specsys': 'SPECSYS', 'Chunk.energy.ssysobs': 'SSYSOBS', 'Chunk.energy.restfrq': 'RESTFRQ', @@ -273,7 +245,6 @@ def get_caom2_elements(self, lookup): 'Chunk.energy.axis.range.start.val': 'energy.range.start.val', 'Chunk.energy.axis.range.end.pix': 'energy.range.end.pix', 'Chunk.energy.axis.range.end.val': 'energy.range.end.val', - 'Chunk.polarization.axis.axis.ctype': 'CTYPE{polarizationAxis}', 'Chunk.polarization.axis.axis.cunit': 'CUNIT{polarizationAxis}', 'Chunk.polarization.axis.bounds.samples': 'polarization.samples', @@ -287,7 +258,6 @@ def get_caom2_elements(self, lookup): 'Chunk.polarization.axis.range.start.val': 'polarization.range.start.val', 'Chunk.polarization.axis.range.end.pix': 'polarization.range.end.pix', 'Chunk.polarization.axis.range.end.val': 'polarization.range.end.val', - 'Chunk.time.exposure': 'time.exposure', 'Chunk.time.resolution': 'time.resolution', } @@ -340,11 +310,8 @@ def load_config(file_name): d['artifacts'] = {} else: artifact = line.split('?')[1].split('#[')[0].strip() - extension = \ - int(line.split('#[')[1].split(']')[0].strip()) - logging.debug( - 'Adding overrides for artifact {} in extension {}'. - format(artifact, extension)) + extension = int(line.split('#[')[1].split(']')[0].strip()) + logging.debug('Adding overrides for artifact {} in extension {}'.format(artifact, extension)) if artifact not in d['artifacts'].keys(): d['artifacts'][artifact] = {} if extension not in d['artifacts'][artifact].keys(): @@ -388,40 +355,37 @@ def _update_axis_info(parser, defaults, overrides, config): raise ValueError(f'Unrecognized CTYPE: {value}') ignore = '{ignore}' - if ('Chunk.position' not in config) or \ - (config['Chunk.position'] != ignore): + if ('Chunk.position' not in config) or (config['Chunk.position'] != ignore): if ra_axis and dec_axis: parser.configure_position_axes((ra_axis, dec_axis)) elif ra_axis or dec_axis: - raise ValueError('Only one positional axis found (ra/dec): {}/{}'. - format(ra_axis, dec_axis)) + raise ValueError('Only one positional axis found (ra/dec): {}/{}'.format(ra_axis, dec_axis)) else: # assume that positional axis are 1 and 2 by default - if time_axis in ['1', '2'] or energy_axis in ['1', '2'] or \ - polarization_axis in ['1', '2'] or obs_axis in ['1', '2']: + if ( + time_axis in ['1', '2'] + or energy_axis in ['1', '2'] + or polarization_axis in ['1', '2'] + or obs_axis in ['1', '2'] + ): raise ValueError('Cannot determine the positional axis') else: parser.configure_position_axes(('1', '2')) - if time_axis and (('Chunk.time' not in config) or - (config['Chunk.time'] != ignore)): + if time_axis and (('Chunk.time' not in config) or (config['Chunk.time'] != ignore)): parser.configure_time_axis(time_axis) - if energy_axis and (('Chunk.energy' not in config) or - (config['Chunk.energy'] != ignore)): + if energy_axis and (('Chunk.energy' not in config) or (config['Chunk.energy'] != ignore)): parser.configure_energy_axis(energy_axis) - if polarization_axis and (('Chunk.polarization' not in config) or - (config['Chunk.polarization'] != ignore)): + if polarization_axis and (('Chunk.polarization' not in config) or (config['Chunk.polarization'] != ignore)): parser.configure_polarization_axis(polarization_axis) - if obs_axis and (('Chunk.observable' not in config) or - (config['Chunk.observable'] != ignore)): + if obs_axis and (('Chunk.observable' not in config) or (config['Chunk.observable'] != ignore)): parser.configure_observable_axis(obs_axis) -def update_blueprint(obs_blueprint, artifact_uri=None, config=None, - defaults=None, overrides=None): +def update_blueprint(obs_blueprint, artifact_uri=None, config=None, defaults=None, overrides=None): """ Update an observation blueprint according to defaults and/or overrides as configured by the user. @@ -441,8 +405,7 @@ def update_blueprint(obs_blueprint, artifact_uri=None, config=None, convert = ConvertFromJava(obs_blueprint, config) errors = [] if config: - logging.debug( - f'Setting user-supplied configuration for {artifact_uri}.') + logging.debug(f'Setting user-supplied configuration for {artifact_uri}.') for key, value in config.items(): try: if value.isupper() and value.find('.') == -1: @@ -452,8 +415,7 @@ def update_blueprint(obs_blueprint, artifact_uri=None, config=None, obs_blueprint.add_attribute(caom2_key, value) except ValueError: errors.append(f'{key}: {sys.exc_info()[1]}') - logging.debug( - f'User-supplied configuration applied for {artifact_uri}.') + logging.debug(f'User-supplied configuration applied for {artifact_uri}.') if defaults: logging.debug(f'Setting defaults for {artifact_uri}') @@ -461,9 +423,7 @@ def update_blueprint(obs_blueprint, artifact_uri=None, config=None, try: for caom2_key in convert.get_caom2_elements(key): obs_blueprint.set_default(caom2_key, value) - logging.debug( - '{} setting default value to {}'.format( - caom2_key, value)) + logging.debug('{} setting default value to {}'.format(caom2_key, value)) except ValueError: errors.append(f'{key}: {sys.exc_info()[1]}') logging.debug(f'Defaults set for {artifact_uri}.') @@ -475,26 +435,22 @@ def update_blueprint(obs_blueprint, artifact_uri=None, config=None, logging.debug(f'01/11/18 Chris said ignore {key!r}.') continue if key == 'artifacts' and artifact_uri in overrides['artifacts']: - logging.debug('Found extension overrides for URI {}.'.format( - artifact_uri)) + logging.debug('Found extension overrides for URI {}.'.format(artifact_uri)) for extension in overrides['artifacts'][artifact_uri].keys(): - for ext_key, ext_value in \ - overrides['artifacts'][artifact_uri][extension].items(): + for ext_key, ext_value in overrides['artifacts'][artifact_uri][extension].items(): if ext_key == 'BITPIX': - logging.debug( - f'01/11/18 Chris said ignore {key!r}.') + logging.debug(f'01/11/18 Chris said ignore {key!r}.') continue try: - for caom2_key in \ - convert.get_caom2_elements(ext_key): - obs_blueprint.set(caom2_key, ext_value, - extension) - logging.debug(('{} set override value to {} ' - 'in extension {}.').format( - caom2_key, ext_value, extension)) + for caom2_key in convert.get_caom2_elements(ext_key): + obs_blueprint.set(caom2_key, ext_value, extension) + logging.debug( + ('{} set override value to {} ' 'in extension {}.').format( + caom2_key, ext_value, extension + ) + ) except ValueError: - errors.append('{}: ext {} {}'.format( - key, extension, sys.exc_info()[1])) + errors.append('{}: ext {} {}'.format(key, extension, sys.exc_info()[1])) else: try: for caom2_key in convert.get_caom2_elements(key): @@ -513,14 +469,14 @@ def main_app(): parser = caom2blueprint.get_arg_parser() # add legacy fits2caom2 arguments - parser.add_argument('--config', required=False, - help=('optional CAOM2 utype to keyword config file to ' - 'merge with the internal configuration')) + parser.add_argument( + '--config', + required=False, + help=('optional CAOM2 utype to keyword config file to ' 'merge with the internal configuration'), + ) - parser.add_argument('--default', - help='file with default values for keywords') - parser.add_argument('--override', - help='file with override values for keywords') + parser.add_argument('--default', help='file with default values for keywords') + parser.add_argument('--override', help='file with override values for keywords') if len(sys.argv) < 2: # correct error message when running python3 @@ -552,11 +508,9 @@ def main_app(): else: obs_blueprint[uri] = blueprints.ObsBlueprint() if config: - result = update_blueprint(obs_blueprint[uri], uri, - config, defaults, overrides) + result = update_blueprint(obs_blueprint[uri], uri, config, defaults, overrides) if result: - logging.debug( - f'Errors parsing the config files: {result}') + logging.debug(f'Errors parsing the config files: {result}') try: caom2blueprint.proc(args, obs_blueprint) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index d140015a..dde9b9bb 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -86,6 +86,7 @@ class Caom2Exception(Exception): """Exception raised when an attempt to create or update a CAOM2 record fails for some reason.""" + pass @@ -93,6 +94,7 @@ class BlueprintParser: """ Extract CAOM2 metadata from files with no WCS information. """ + def __init__(self, obs_blueprint=None, uri=None): if obs_blueprint: self._blueprint = obs_blueprint @@ -116,15 +118,13 @@ def apply_blueprint(self): plan = self.blueprint._plan # first apply the functions - if (self.blueprint._module is not None or - self.blueprint._module_instance is not None): + if self.blueprint._module is not None or self.blueprint._module_instance is not None: for key, value in plan.items(): if ObsBlueprint.is_function(value): if self._blueprint._module_instance is None: plan[key] = self._execute_external(value, key, 0) else: - plan[key] = self._execute_external_instance( - value, key, 0) + plan[key] = self._execute_external_instance(value, key, 0) # apply defaults for key, value in plan.items(): @@ -140,21 +140,19 @@ def augment_observation(self, observation, artifact_uri, product_id=None): :param artifact_uri: the key for finding the artifact to augment :param product_id: the key for finding for the plane to augment """ - self.logger.debug( - f'Begin CAOM2 observation augmentation for URI {artifact_uri}.') + self.logger.debug(f'Begin CAOM2 observation augmentation for URI {artifact_uri}.') if observation is None or not isinstance(observation, caom2.Observation): - raise ValueError( - f'Observation type mis-match for {observation}.') + raise ValueError(f'Observation type mis-match for {observation}.') - observation.meta_release = self._get_datetime(self._get_from_list( - 'Observation.metaRelease', index=0, - current=observation.meta_release)) + observation.meta_release = self._get_datetime( + self._get_from_list('Observation.metaRelease', index=0, current=observation.meta_release) + ) observation.meta_read_groups = self._get_from_list( - 'Observation.metaReadGroups', index=0, - current=observation.meta_read_groups) + 'Observation.metaReadGroups', index=0, current=observation.meta_read_groups + ) observation.meta_producer = self._get_from_list( - 'Observation.metaProducer', index=0, - current=observation.meta_producer) + 'Observation.metaProducer', index=0, current=observation.meta_producer + ) plane = None if not product_id: @@ -170,8 +168,7 @@ def augment_observation(self, observation, artifact_uri, product_id=None): plane = caom2.Plane(product_id=product_id) observation.planes[product_id] = plane self.augment_plane(plane, artifact_uri) - self.logger.debug( - f'End CAOM2 observation augmentation for {artifact_uri}.') + self.logger.debug(f'End CAOM2 observation augmentation for {artifact_uri}.') def augment_plane(self, plane, artifact_uri): """ @@ -179,23 +176,23 @@ def augment_plane(self, plane, artifact_uri): :param plane: existing CAOM2 plane to be augmented. :param artifact_uri: """ - self.logger.debug( - f'Begin CAOM2 plane augmentation for {artifact_uri}.') + self.logger.debug(f'Begin CAOM2 plane augmentation for {artifact_uri}.') if plane is None or not isinstance(plane, caom2.Plane): raise ValueError(f'Plane type mis-match for {plane}') - plane.meta_release = self._get_datetime(self._get_from_list( - 'Plane.metaRelease', index=0, current=plane.meta_release)) - plane.data_release = self._get_datetime(self._get_from_list( - 'Plane.dataRelease', index=0, current=plane.data_release)) + plane.meta_release = self._get_datetime( + self._get_from_list('Plane.metaRelease', index=0, current=plane.meta_release) + ) + plane.data_release = self._get_datetime( + self._get_from_list('Plane.dataRelease', index=0, current=plane.data_release) + ) plane.data_product_type = self._to_data_product_type( - self._get_from_list('Plane.dataProductType', index=0, - current=plane.data_product_type)) - plane.calibration_level = self._to_calibration_level(_to_int_32( - self._get_from_list('Plane.calibrationLevel', index=0, - current=plane.calibration_level))) - plane.meta_producer = self._get_from_list( - 'Plane.metaProducer', index=0, current=plane.meta_producer) + self._get_from_list('Plane.dataProductType', index=0, current=plane.data_product_type) + ) + plane.calibration_level = self._to_calibration_level( + _to_int_32(self._get_from_list('Plane.calibrationLevel', index=0, current=plane.calibration_level)) + ) + plane.meta_producer = self._get_from_list('Plane.metaProducer', index=0, current=plane.meta_producer) artifact = None for ii in plane.artifacts: @@ -203,14 +200,14 @@ def augment_plane(self, plane, artifact_uri): if artifact.uri == artifact_uri: break if artifact is None or artifact.uri != artifact_uri: - artifact = caom2.Artifact(artifact_uri, self._to_product_type( - self._get_from_list('Artifact.productType', index=0)), - self._to_release_type(self._get_from_list( - 'Artifact.releaseType', index=0))) + artifact = caom2.Artifact( + artifact_uri, + self._to_product_type(self._get_from_list('Artifact.productType', index=0)), + self._to_release_type(self._get_from_list('Artifact.releaseType', index=0)), + ) plane.artifacts[artifact_uri] = artifact self.augment_artifact(artifact, 0) - self.logger.debug( - f'End CAOM2 plane augmentation for {artifact_uri}.') + self.logger.debug(f'End CAOM2 plane augmentation for {artifact_uri}.') def augment_artifact(self, artifact, index): """ @@ -220,28 +217,28 @@ def augment_artifact(self, artifact, index): """ self.logger.debug(f'Begin CAOM2 artifact augmentation for {self.uri}.') if artifact is None or not isinstance(artifact, caom2.Artifact): - raise ValueError( - f'Artifact type mis-match for {artifact}') - - artifact.product_type = self._to_product_type(self._get_from_list( - 'Artifact.productType', index=0, current=artifact.product_type)) - artifact.release_type = self._to_release_type(self._get_from_list( - 'Artifact.releaseType', index=0, current=artifact.release_type)) - artifact.content_type = self._get_from_list( - 'Artifact.contentType', index=0, current=artifact.content_type) + raise ValueError(f'Artifact type mis-match for {artifact}') + + artifact.product_type = self._to_product_type( + self._get_from_list('Artifact.productType', index=0, current=artifact.product_type) + ) + artifact.release_type = self._to_release_type( + self._get_from_list('Artifact.releaseType', index=0, current=artifact.release_type) + ) + artifact.content_type = self._get_from_list('Artifact.contentType', index=0, current=artifact.content_type) artifact.content_length = self._get_from_list( - 'Artifact.contentLength', index=0, current=artifact.content_length) - artifact.content_checksum = _to_checksum_uri(self._get_from_list( - 'Artifact.contentChecksum', index=0, - current=artifact.content_checksum)) + 'Artifact.contentLength', index=0, current=artifact.content_length + ) + artifact.content_checksum = _to_checksum_uri( + self._get_from_list('Artifact.contentChecksum', index=0, current=artifact.content_checksum) + ) artifact.content_release = self._get_from_list( - 'Artifact.contentRelease', index=0, - current=artifact.content_release) + 'Artifact.contentRelease', index=0, current=artifact.content_release + ) artifact.content_read_groups = self._get_from_list( - 'Artifact.contentReadGroups', index=0, - current=artifact.content_read_groups) - artifact.meta_producer = self._get_from_list( - 'Artifact.metaProducer', index=0, current=artifact.meta_producer) + 'Artifact.contentReadGroups', index=0, current=artifact.content_read_groups + ) + artifact.meta_producer = self._get_from_list('Artifact.metaProducer', index=0, current=artifact.meta_producer) self.logger.debug(f'End CAOM2 artifact augmentation for {self.uri}.') def _get_from_list(self, lookup, index, current=None): @@ -250,15 +247,16 @@ def _get_from_list(self, lookup, index, current=None): keywords = self.blueprint._get(lookup) except KeyError: self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug( - f'Could not find {lookup} in configuration.') + self.logger.debug(f'Could not find {lookup} in configuration.') if current: - self.logger.debug( - f'{lookup}: using current value of {current!r}.') + self.logger.debug(f'{lookup}: using current value of {current!r}.') value = current return value - if (keywords is not None and not ObsBlueprint.needs_lookup(keywords) - and not ObsBlueprint.is_function(keywords)): + if ( + keywords is not None + and not ObsBlueprint.needs_lookup(keywords) + and not ObsBlueprint.is_function(keywords) + ): value = keywords elif self._blueprint.update: # The first clause: boolean attributes are used to represent @@ -284,8 +282,7 @@ def _get_set_from_list(self, lookup, index): keywords = self.blueprint._get(lookup) except KeyError: self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug(f'Could not find \'{lookup}\' in caom2blueprint ' - f'configuration.') + self.logger.debug(f'Could not find \'{lookup}\' in caom2blueprint ' f'configuration.') # if there's something useful as a value in the keywords, # extract it @@ -294,16 +291,14 @@ def _get_set_from_list(self, lookup, index): # if there's a default value use it if keywords[1]: value = keywords[1] - self.logger.debug( - f'{lookup}: assigned default value {value}.') + self.logger.debug(f'{lookup}: assigned default value {value}.') elif not ObsBlueprint.is_function(keywords): value = keywords self.logger.debug(f'{lookup}: assigned value {value}.') return value def add_error(self, key, message): - self._errors.append('{} {} {}'.format( - datetime.now().strftime('%Y-%m-%dT%H:%M:%S'), key, message)) + self._errors.append('{} {} {}'.format(datetime.now().strftime('%Y-%m-%dT%H:%M:%S'), key, message)) def _to_data_product_type(self, value): return self._to_enum_type(value, caom2.DataProductType) @@ -319,9 +314,7 @@ def _to_release_type(self, value): def _to_enum_type(self, value, to_enum_type): if value is None: - raise ValueError( - f'Must set a value of {to_enum_type.__name__} for ' - f'{self.uri}.') + raise ValueError(f'Must set a value of {to_enum_type.__name__} for ' f'{self.uri}.') elif isinstance(value, to_enum_type): return value else: @@ -343,22 +336,19 @@ def _execute_external(self, value, key, extension): elif 'header' in value and isinstance(self, FitsParser): parameter = self._headers[extension] elif isinstance(self, FitsParser): - parameter = {'uri': self.uri, - 'header': self._headers[extension]} + parameter = {'uri': self.uri, 'header': self._headers[extension]} else: if hasattr(self, '_file'): parameter = {'base': self._file} else: - parameter = {'uri': self.uri, - 'header': None} + parameter = {'uri': self.uri, 'header': None} result = '' execute = None try: execute = getattr(self.blueprint._module, value.split('(')[0]) except Exception as e: - msg = 'Failed to find {}.{} for {}'.format( - self.blueprint._module.__name__, value.split('(')[0], key) + msg = 'Failed to find {}.{} for {}'.format(self.blueprint._module.__name__, value.split('(')[0], key) self.logger.error(msg) self._errors.append(msg) tb = traceback.format_exc() @@ -368,11 +358,9 @@ def _execute_external(self, value, key, extension): result = execute(parameter) self.logger.debug(f'Key {key} calculated value of {result} using {value} type {type(result)}') except Exception as e: - msg = 'Failed to execute {} for {} in {}'.format( - execute.__name__, key, self.uri) + msg = 'Failed to execute {} for {} in {}'.format(execute.__name__, key, self.uri) self.logger.error(msg) - self.logger.debug('Input parameter was {}, value was {}'.format( - parameter, value)) + self.logger.debug('Input parameter was {}, value was {}'.format(parameter, value)) self._errors.append(msg) tb = traceback.format_exc() self.logger.debug(tb) @@ -395,12 +383,11 @@ def _execute_external_instance(self, value, key, extension): """ result = '' try: - execute = getattr( - self.blueprint._module_instance, value.split('(')[0]) + execute = getattr(self.blueprint._module_instance, value.split('(')[0]) except Exception as e: msg = 'Failed to find {}.{} for {}'.format( - self.blueprint._module_instance.__class__.__name__, - value.split('(')[0], key) + self.blueprint._module_instance.__class__.__name__, value.split('(')[0], key + ) self.logger.error(msg) self._errors.append(msg) tb = traceback.format_exc() @@ -418,8 +405,7 @@ def _execute_external_instance(self, value, key, extension): # for a SkyCoord construction failure. raise Caom2Exception(e2) except Exception as e: - msg = 'Failed to execute {} for {} in {}'.format( - execute, key, self.uri) + msg = 'Failed to execute {} for {} in {}'.format(execute, key, self.uri) self.logger.error(msg) self.logger.debug('Input value was {}'.format(value)) self._errors.append(msg) @@ -444,19 +430,26 @@ def _get_datetime(self, from_value): # CFHT 2003/03/29,01:34:54 # CFHT 2003/03/29 # DDO 12/02/95 - for dt_format in ['%Y-%m-%dT%H:%M:%S', '%Y-%m-%dT%H:%M:%S.%f', - '%Y-%m-%d %H:%M:%S.%f', '%Y-%m-%d', - '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S', - '%Y/%m/%d,%H:%M:%S', '%Y/%m/%d', - '%d/%m/%y', '%d/%m/%y %H:%M:%S', '%d-%m-%Y']: + for dt_format in [ + '%Y-%m-%dT%H:%M:%S', + '%Y-%m-%dT%H:%M:%S.%f', + '%Y-%m-%d %H:%M:%S.%f', + '%Y-%m-%d', + '%Y/%m/%d %H:%M:%S', + '%Y-%m-%d %H:%M:%S', + '%Y/%m/%d,%H:%M:%S', + '%Y/%m/%d', + '%d/%m/%y', + '%d/%m/%y %H:%M:%S', + '%d-%m-%Y', + ]: try: result = datetime.strptime(from_value, dt_format) except ValueError: pass if result is None: - self.logger.error('Cannot parse datetime {}'.format( - from_value)) + self.logger.error('Cannot parse datetime {}'.format(from_value)) self.add_error('get_datetime', sys.exc_info()[1]) return result else: @@ -464,14 +457,12 @@ def _get_datetime(self, from_value): class ContentParser(BlueprintParser): - def __init__(self, obs_blueprint=None, uri=None): super().__init__(obs_blueprint, uri) self._wcs_parser = WcsParser(obs_blueprint, extension=0) def _get_chunk_naxis(self, chunk, index): - chunk.naxis = self._get_from_list( - 'Chunk.naxis', index, self.blueprint.get_configed_axes_count()) + chunk.naxis = self._get_from_list('Chunk.naxis', index, self.blueprint.get_configed_axes_count()) def augment_artifact(self, artifact, index): """ @@ -481,13 +472,10 @@ def augment_artifact(self, artifact, index): """ super().augment_artifact(artifact, index) - self.logger.debug( - f'Begin content artifact augmentation for {artifact.uri}') + self.logger.debug(f'Begin content artifact augmentation for {artifact.uri}') if self.blueprint.get_configed_axes_count() == 0: - raise TypeError( - f'No WCS Data. End content artifact augmentation for ' - f'{artifact.uri}.') + raise TypeError(f'No WCS Data. End content artifact augmentation for ' f'{artifact.uri}.') if self.add_parts(artifact, index): part = artifact.parts[str(index)] @@ -531,8 +519,7 @@ def augment_artifact(self, artifact, index): self._wcs_parser.augment_custom(chunk) self._try_custom_with_blueprint(chunk, index) - self.logger.debug( - f'End content artifact augmentation for {artifact.uri}.') + self.logger.debug(f'End content artifact augmentation for {artifact.uri}.') def augment_observation(self, observation, artifact_uri, product_id=None): """ @@ -542,8 +529,7 @@ def augment_observation(self, observation, artifact_uri, product_id=None): :param product_id: the key for finding for the plane to augment """ super().augment_observation(observation, artifact_uri, product_id) - self.logger.debug( - f'Begin content observation augmentation for URI {artifact_uri}.') + self.logger.debug(f'Begin content observation augmentation for URI {artifact_uri}.') members = self._get_members(observation) if members: if isinstance(members, caom2.TypedSet): @@ -554,33 +540,28 @@ def augment_observation(self, observation, artifact_uri, product_id=None): observation.members.add(caom2.ObservationURI(m)) observation.algorithm = self._get_algorithm(observation) - observation.sequence_number = _to_int(self._get_from_list( - 'Observation.sequenceNumber', index=0)) + observation.sequence_number = _to_int(self._get_from_list('Observation.sequenceNumber', index=0)) observation.intent = self._get_from_list( - 'Observation.intent', 0, (caom2.ObservationIntentType.SCIENCE if - observation.intent is None else - observation.intent)) - observation.type = self._get_from_list('Observation.type', 0, - current=observation.type) + 'Observation.intent', + 0, + (caom2.ObservationIntentType.SCIENCE if observation.intent is None else observation.intent), + ) + observation.type = self._get_from_list('Observation.type', 0, current=observation.type) observation.meta_release = self._get_datetime( - self._get_from_list('Observation.metaRelease', 0, - current=observation.meta_release)) - observation.meta_read_groups = self._get_from_list( - 'Observation.metaReadGroups', 0) + self._get_from_list('Observation.metaRelease', 0, current=observation.meta_release) + ) + observation.meta_read_groups = self._get_from_list('Observation.metaReadGroups', 0) observation.meta_producer = self._get_from_list( - 'Observation.metaProducer', 0, current=observation.meta_producer) - observation.requirements = self._get_requirements( - observation.requirements) + 'Observation.metaProducer', 0, current=observation.meta_producer + ) + observation.requirements = self._get_requirements(observation.requirements) observation.instrument = self._get_instrument(observation.instrument) observation.proposal = self._get_proposal(observation.proposal) observation.target = self._get_target(observation.target) - observation.target_position = self._get_target_position( - observation.target_position) + observation.target_position = self._get_target_position(observation.target_position) observation.telescope = self._get_telescope(observation.telescope) - observation.environment = self._get_environment( - observation.environment) - self.logger.debug( - f'End content observation augmentation for {artifact_uri}.') + observation.environment = self._get_environment(observation.environment) + self.logger.debug(f'End content observation augmentation for {artifact_uri}.') def augment_plane(self, plane, artifact_uri): """ @@ -589,28 +570,25 @@ def augment_plane(self, plane, artifact_uri): :param artifact_uri: """ super().augment_plane(plane, artifact_uri) - self.logger.debug( - f'Begin content plane augmentation for {artifact_uri}.') + self.logger.debug(f'Begin content plane augmentation for {artifact_uri}.') - plane.meta_release = self._get_datetime(self._get_from_list( - 'Plane.metaRelease', index=0, current=plane.meta_release)) - plane.data_release = self._get_datetime(self._get_from_list( - 'Plane.dataRelease', index=0)) + plane.meta_release = self._get_datetime( + self._get_from_list('Plane.metaRelease', index=0, current=plane.meta_release) + ) + plane.data_release = self._get_datetime(self._get_from_list('Plane.dataRelease', index=0)) plane.data_product_type = self._to_data_product_type( - self._get_from_list('Plane.dataProductType', index=0, - current=plane.data_product_type)) - plane.calibration_level = self._to_calibration_level(_to_int_32( - self._get_from_list('Plane.calibrationLevel', index=0, - current=plane.calibration_level))) - plane.meta_producer = self._get_from_list( - 'Plane.metaProducer', index=0, current=plane.meta_producer) + self._get_from_list('Plane.dataProductType', index=0, current=plane.data_product_type) + ) + plane.calibration_level = self._to_calibration_level( + _to_int_32(self._get_from_list('Plane.calibrationLevel', index=0, current=plane.calibration_level)) + ) + plane.meta_producer = self._get_from_list('Plane.metaProducer', index=0, current=plane.meta_producer) plane.observable = self._get_observable(current=plane.observable) plane.provenance = self._get_provenance(plane.provenance) plane.metrics = self._get_metrics(current=plane.metrics) plane.quality = self._get_quality(current=plane.quality) - self.logger.debug( - f'End content plane augmentation for {artifact_uri}.') + self.logger.debug(f'End content plane augmentation for {artifact_uri}.') def _get_algorithm(self, obs): """ @@ -620,8 +598,7 @@ def _get_algorithm(self, obs): """ self.logger.debug('Begin Algorithm augmentation.') # TODO DEFAULT VALUE - name = self._get_from_list('Observation.algorithm.name', index=0, - current=obs.algorithm.name) + name = self._get_from_list('Observation.algorithm.name', index=0, current=obs.algorithm.name) if name is not None and name == 'exposure' and isinstance(obs, caom2.DerivedObservation): # stop the raising of a ValueError when adding a Plane representing a SimpleObservation to a # DerivedObservation under construction. It results in attempting to change Algorithm.name value to @@ -640,11 +617,11 @@ def _get_energy_transition(self, current): """ self.logger.debug('Begin EnergyTransition augmentation.') species = self._get_from_list( - 'Chunk.energy.transition.species', index=0, - current=None if current is None else current.species) + 'Chunk.energy.transition.species', index=0, current=None if current is None else current.species + ) transition = self._get_from_list( - 'Chunk.energy.transition.transition', index=0, - current=None if current is None else current.transition) + 'Chunk.energy.transition.transition', index=0, current=None if current is None else current.transition + ) result = None if species is not None and transition is not None: result = caom2.EnergyTransition(species, transition) @@ -661,28 +638,38 @@ def _get_environment(self, current): """ self.logger.debug('Begin Environment augmentation.') seeing = self._get_from_list( - 'Observation.environment.seeing', index=0, - current=None if current is None else current.seeing) + 'Observation.environment.seeing', index=0, current=None if current is None else current.seeing + ) humidity = _to_float( self._get_from_list( - 'Observation.environment.humidity', index=0, - current=None if current is None else current.humidity)) + 'Observation.environment.humidity', index=0, current=None if current is None else current.humidity + ) + ) elevation = self._get_from_list( - 'Observation.environment.elevation', index=0, - current=None if current is None else current.elevation) + 'Observation.environment.elevation', index=0, current=None if current is None else current.elevation + ) tau = self._get_from_list( - 'Observation.environment.tau', index=0, - current=None if current is None else current.tau) + 'Observation.environment.tau', index=0, current=None if current is None else current.tau + ) wavelength_tau = self._get_from_list( - 'Observation.environment.wavelengthTau', index=0, - current=None if current is None else current.wavelength_tau) + 'Observation.environment.wavelengthTau', + index=0, + current=None if current is None else current.wavelength_tau, + ) ambient = _to_float( self._get_from_list( - 'Observation.environment.ambientTemp', index=0, - current=None if current is None else current.ambient_temp)) - photometric = self._cast_as_bool(self._get_from_list( - 'Observation.environment.photometric', index=0, - current=None if current is None else current.photometric)) + 'Observation.environment.ambientTemp', + index=0, + current=None if current is None else current.ambient_temp, + ) + ) + photometric = self._cast_as_bool( + self._get_from_list( + 'Observation.environment.photometric', + index=0, + current=None if current is None else current.photometric, + ) + ) enviro = None if seeing or humidity or elevation or tau or wavelength_tau or ambient: enviro = caom2.Environment() @@ -704,10 +691,9 @@ def _get_instrument(self, current): """ self.logger.debug('Begin Instrument augmentation.') name = self._get_from_list( - 'Observation.instrument.name', index=0, - current=None if current is None else current.name) - keywords = self._get_set_from_list( - 'Observation.instrument.keywords', index=0) + 'Observation.instrument.name', index=0, current=None if current is None else current.name + ) + keywords = self._get_set_from_list('Observation.instrument.keywords', index=0) instr = None if name: instr = caom2.Instrument(str(name)) @@ -723,54 +709,48 @@ def _get_members(self, obs): """ members = None self.logger.debug('Begin Members augmentation.') - if (isinstance(obs, caom2.SimpleObservation) and - (self.blueprint._get('DerivedObservation.members') or - self.blueprint._get('CompositeObservation.members'))): - raise TypeError( - 'Cannot apply blueprint for DerivedObservation to a ' - 'simple observation') + if isinstance(obs, caom2.SimpleObservation) and ( + self.blueprint._get('DerivedObservation.members') or self.blueprint._get('CompositeObservation.members') + ): + raise TypeError('Cannot apply blueprint for DerivedObservation to a ' 'simple observation') elif isinstance(obs, caom2.DerivedObservation): - lookup = self.blueprint._get('DerivedObservation.members', - extension=1) + lookup = self.blueprint._get('DerivedObservation.members', extension=1) if ObsBlueprint.is_table(lookup) and len(self.headers) > 1: - member_list = self._get_from_table( - 'DerivedObservation.members', 1) + member_list = self._get_from_table('DerivedObservation.members', 1) # ensure the members are good little ObservationURIs if member_list.startswith('caom:'): members = member_list else: - members = ' '.join(['caom:{}/{}'.format( - obs.collection, i) if not i.startswith('caom') else i - for i in member_list.split()]) + members = ' '.join( + [ + 'caom:{}/{}'.format(obs.collection, i) if not i.startswith('caom') else i + for i in member_list.split() + ] + ) else: if obs.members is None: - members = self._get_from_list( - 'DerivedObservation.members', index=0) + members = self._get_from_list('DerivedObservation.members', index=0) else: - members = self._get_from_list( - 'DerivedObservation.members', index=0, - current=obs.members) + members = self._get_from_list('DerivedObservation.members', index=0, current=obs.members) elif isinstance(obs, caom2.CompositeObservation): - lookup = self.blueprint._get('CompositeObservation.members', - extension=1) + lookup = self.blueprint._get('CompositeObservation.members', extension=1) if ObsBlueprint.is_table(lookup) and len(self.headers) > 1: - member_list = self._get_from_table( - 'CompositeObservation.members', 1) + member_list = self._get_from_table('CompositeObservation.members', 1) # ensure the members are good little ObservationURIs if member_list.startswith('caom:'): members = member_list else: - members = ' '.join(['caom:{}/{}'.format( - obs.collection, i) if not i.startswith('caom') else i - for i in member_list.split()]) + members = ' '.join( + [ + 'caom:{}/{}'.format(obs.collection, i) if not i.startswith('caom') else i + for i in member_list.split() + ] + ) else: if obs.members is None: - members = self._get_from_list( - 'CompositeObservation.members', index=0) + members = self._get_from_list('CompositeObservation.members', index=0) else: - members = self._get_from_list( - 'CompositeObservation.members', index=0, - current=obs.members) + members = self._get_from_list('CompositeObservation.members', index=0, current=obs.members) self.logger.debug('End Members augmentation.') return members @@ -781,27 +761,28 @@ def _get_metrics(self, current): """ self.logger.debug('Begin Metrics augmentation.') source_number_density = self._get_from_list( - 'Plane.metrics.sourceNumberDensity', index=0, - current=None if current is None else current.source_number_density) + 'Plane.metrics.sourceNumberDensity', + index=0, + current=None if current is None else current.source_number_density, + ) background = self._get_from_list( - 'Plane.metrics.background', index=0, - current=None if current is None else current.background) + 'Plane.metrics.background', index=0, current=None if current is None else current.background + ) background_stddev = self._get_from_list( - 'Plane.metrics.backgroundStddev', index=0, - current=None if current is None else current.background_std_dev) + 'Plane.metrics.backgroundStddev', index=0, current=None if current is None else current.background_std_dev + ) flux_density_limit = self._get_from_list( - 'Plane.metrics.fluxDensityLimit', index=0, - current=None if current is None else current.flux_density_limit) + 'Plane.metrics.fluxDensityLimit', index=0, current=None if current is None else current.flux_density_limit + ) mag_limit = self._get_from_list( - 'Plane.metrics.magLimit', index=0, - current=None if current is None else current.mag_limit) + 'Plane.metrics.magLimit', index=0, current=None if current is None else current.mag_limit + ) sample_snr = self._get_from_list( - 'Plane.metrics.sampleSNR', index=0, - current=None if current is None else current.sample_snr) + 'Plane.metrics.sampleSNR', index=0, current=None if current is None else current.sample_snr + ) metrics = None - if (source_number_density or background or background_stddev or - flux_density_limit or mag_limit or sample_snr): + if source_number_density or background or background_stddev or flux_density_limit or mag_limit or sample_snr: metrics = caom2.Metrics() metrics.source_number_density = source_number_density metrics.background = background @@ -838,7 +819,10 @@ def _get_axis_wcs(self, label, wcs, index): aug_error = self._two_param_constructor( f'Chunk.{label}.axis.error.syser', f'Chunk.{label}.axis.error.rnder', - index, _to_float, caom2.CoordError) + index, + _to_float, + caom2.CoordError, + ) aug_naxis = None aug_range = self._try_range(index, label) @@ -849,7 +833,10 @@ def _get_axis_wcs(self, label, wcs, index): aug_ref_coord = self._two_param_constructor( f'Chunk.{label}.axis.function.refCoord.pix', f'Chunk.{label}.axis.function.refCoord.val', - index, _to_float, caom2.RefCoord) + index, + _to_float, + caom2.RefCoord, + ) aug_delta = _to_float(self._get_from_list(f'Chunk.{label}.axis.function.delta', index)) aug_length = _to_int(self._get_from_list(f'Chunk.{label}.axis.function.naxis', index)) aug_function = None @@ -874,9 +861,7 @@ def _get_observable(self, current): :return: Observable """ self.logger.debug('Begin Observable augmentation.') - ucd = self._get_from_list( - 'Plane.observable.ucd', index=0, - current=None if current is None else current.ucd) + ucd = self._get_from_list('Plane.observable.ucd', index=0, current=None if current is None else current.ucd) observable = caom2.Observable(ucd) if ucd else None self.logger.debug('End Observable augmentation.') return observable @@ -889,19 +874,18 @@ def _get_proposal(self, current): """ self.logger.debug('Begin Proposal augmentation.') prop_id = self._get_from_list( - 'Observation.proposal.id', index=0, - current=None if current is None else current.id) + 'Observation.proposal.id', index=0, current=None if current is None else current.id + ) pi = self._get_from_list( - 'Observation.proposal.pi', index=0, - current=None if current is None else current.pi_name) + 'Observation.proposal.pi', index=0, current=None if current is None else current.pi_name + ) project = self._get_from_list( - 'Observation.proposal.project', index=0, - current=None if current is None else current.project) + 'Observation.proposal.project', index=0, current=None if current is None else current.project + ) title = self._get_from_list( - 'Observation.proposal.title', index=0, - current=None if current is None else current.title) - keywords = self._get_set_from_list( - 'Observation.proposal.keywords', index=0) + 'Observation.proposal.title', index=0, current=None if current is None else current.title + ) + keywords = self._get_set_from_list('Observation.proposal.keywords', index=0) proposal = current if prop_id: proposal = caom2.Proposal(str(prop_id), pi, project, title) @@ -917,39 +901,43 @@ def _get_provenance(self, current): """ self.logger.debug('Begin Provenance augmentation.') name = _to_str( + self._get_from_list('Plane.provenance.name', index=0, current=None if current is None else current.name) + ) + p_version = _to_str( self._get_from_list( - 'Plane.provenance.name', index=0, - current=None if current is None else current.name)) - p_version = _to_str(self._get_from_list( - 'Plane.provenance.version', index=0, - current=None if current is None else current.version)) + 'Plane.provenance.version', index=0, current=None if current is None else current.version + ) + ) project = _to_str( self._get_from_list( - 'Plane.provenance.project', index=0, - current=None if current is None else current.project)) + 'Plane.provenance.project', index=0, current=None if current is None else current.project + ) + ) producer = _to_str( self._get_from_list( - 'Plane.provenance.producer', index=0, - current=None if current is None else current.producer)) + 'Plane.provenance.producer', index=0, current=None if current is None else current.producer + ) + ) run_id = _to_str( self._get_from_list( - 'Plane.provenance.runID', index=0, - current=None if current is None else current.run_id)) + 'Plane.provenance.runID', index=0, current=None if current is None else current.run_id + ) + ) reference = _to_str( self._get_from_list( - 'Plane.provenance.reference', index=0, - current=None if current is None else current.reference)) + 'Plane.provenance.reference', index=0, current=None if current is None else current.reference + ) + ) last_executed = self._get_datetime( self._get_from_list( - 'Plane.provenance.lastExecuted', index=0, - current=None if current is None else current.last_executed)) - keywords = self._get_set_from_list( - 'Plane.provenance.keywords', index=0) + 'Plane.provenance.lastExecuted', index=0, current=None if current is None else current.last_executed + ) + ) + keywords = self._get_set_from_list('Plane.provenance.keywords', index=0) inputs = self._get_set_from_list('Plane.provenance.inputs', index=0) prov = None if name: - prov = caom2.Provenance(name, p_version, project, producer, run_id, - reference, last_executed) + prov = caom2.Provenance(name, p_version, project, producer, run_id, reference, last_executed) ContentParser._add_keywords(keywords, current, prov) if inputs: if isinstance(inputs, caom2.TypedSet): @@ -971,9 +959,7 @@ def _get_quality(self, current): :return: Quality """ self.logger.debug('Begin Quality augmentation.') - flag = self._get_from_list( - 'Plane.dataQuality', index=0, - current=None if current is None else current.flag) + flag = self._get_from_list('Plane.dataQuality', index=0, current=None if current is None else current.flag) quality = caom2.DataQuality(flag) if flag else None self.logger.debug('End Quality augmentation.') return quality @@ -986,8 +972,8 @@ def _get_requirements(self, current): """ self.logger.debug('Begin Requirement augmentation.') flag = self._get_from_list( - 'Observation.requirements.flag', index=0, - current=None if current is None else current.flag) + 'Observation.requirements.flag', index=0, current=None if current is None else current.flag + ) reqts = caom2.Requirements(flag) if flag else None self.logger.debug('End Requirement augmentation.') return reqts @@ -999,30 +985,33 @@ def _get_target(self, current): """ self.logger.debug('Begin Target augmentation.') name = self._get_from_list( - 'Observation.target.name', index=0, - current=None if current is None else current.name) + 'Observation.target.name', index=0, current=None if current is None else current.name + ) target_type = self._get_from_list( - 'Observation.target.type', index=0, - current=None if current is None else current.target_type) - standard = self._cast_as_bool(self._get_from_list( - 'Observation.target.standard', index=0, - current=None if current is None else current.standard)) + 'Observation.target.type', index=0, current=None if current is None else current.target_type + ) + standard = self._cast_as_bool( + self._get_from_list( + 'Observation.target.standard', index=0, current=None if current is None else current.standard + ) + ) redshift = self._get_from_list( - 'Observation.target.redshift', index=0, - current=None if current is None else current.redshift) - keywords = self._get_set_from_list( - 'Observation.target.keywords', index=0) + 'Observation.target.redshift', index=0, current=None if current is None else current.redshift + ) + keywords = self._get_set_from_list('Observation.target.keywords', index=0) moving = self._cast_as_bool( self._get_from_list( - 'Observation.target.moving', index=0, - current=None if current is None else current.moving)) - target_id = _to_str(self._get_from_list( - 'Observation.target.targetID', index=0, - current=None if current is None else current.target_id)) + 'Observation.target.moving', index=0, current=None if current is None else current.moving + ) + ) + target_id = _to_str( + self._get_from_list( + 'Observation.target.targetID', index=0, current=None if current is None else current.target_id + ) + ) target = None if name: - target = caom2.Target(str(name), target_type, standard, redshift, - moving=moving, target_id=target_id) + target = caom2.Target(str(name), target_type, standard, redshift, moving=moving, target_id=target_id) ContentParser._add_keywords(keywords, current, target) self.logger.debug('End Target augmentation.') return target @@ -1035,17 +1024,21 @@ def _get_target_position(self, current): """ self.logger.debug('Begin CAOM2 TargetPosition augmentation.') x = self._get_from_list( - 'Observation.target_position.point.cval1', index=0, - current=None if current is None else current.coordinates.cval1) + 'Observation.target_position.point.cval1', + index=0, + current=None if current is None else current.coordinates.cval1, + ) y = self._get_from_list( - 'Observation.target_position.point.cval2', index=0, - current=None if current is None else current.coordinates.cval2) + 'Observation.target_position.point.cval2', + index=0, + current=None if current is None else current.coordinates.cval2, + ) coordsys = self._get_from_list( - 'Observation.target_position.coordsys', index=0, - current=None if current is None else current.coordsys) + 'Observation.target_position.coordsys', index=0, current=None if current is None else current.coordsys + ) equinox = self._get_from_list( - 'Observation.target_position.equinox', index=0, - current=None if current is None else current.equinox) + 'Observation.target_position.equinox', index=0, current=None if current is None else current.equinox + ) aug_target_position = None if x and y: aug_point = caom2.Point(x, y) @@ -1062,22 +1055,30 @@ def _get_telescope(self, current): """ self.logger.debug('Begin Telescope augmentation.') name = self._get_from_list( - 'Observation.telescope.name', index=0, - current=None if current is None else current.name) + 'Observation.telescope.name', index=0, current=None if current is None else current.name + ) geo_x = _to_float( self._get_from_list( - 'Observation.telescope.geoLocationX', index=0, - current=None if current is None else current.geo_location_x)) + 'Observation.telescope.geoLocationX', + index=0, + current=None if current is None else current.geo_location_x, + ) + ) geo_y = _to_float( self._get_from_list( - 'Observation.telescope.geoLocationY', index=0, - current=None if current is None else current.geo_location_y)) + 'Observation.telescope.geoLocationY', + index=0, + current=None if current is None else current.geo_location_y, + ) + ) geo_z = _to_float( self._get_from_list( - 'Observation.telescope.geoLocationZ', index=0, - current=None if current is None else current.geo_location_z)) - keywords = self._get_set_from_list( - 'Observation.telescope.keywords', index=0) + 'Observation.telescope.geoLocationZ', + index=0, + current=None if current is None else current.geo_location_z, + ) + ) + keywords = self._get_set_from_list('Observation.telescope.keywords', index=0) aug_tel = None if name: aug_tel = caom2.Telescope(str(name), geo_x, geo_y, geo_z) @@ -1154,10 +1155,12 @@ def _try_energy_with_blueprint(self, chunk, index): chunk.energy.ssyssrc = self._get_from_list('Chunk.energy.ssyssrc', index, chunk.energy.ssyssrc) chunk.energy.velang = self._get_from_list('Chunk.energy.velang', index, chunk.energy.velang) chunk.energy.bandpass_name = self._get_from_list( - 'Chunk.energy.bandpassName', index, chunk.energy.bandpass_name) + 'Chunk.energy.bandpassName', index, chunk.energy.bandpass_name + ) chunk.energy.transition = self._get_energy_transition(chunk.energy.transition) chunk.energy.resolving_power = _to_float( - self._get_from_list('Chunk.energy.resolvingPower', index, chunk.energy.resolving_power)) + self._get_from_list('Chunk.energy.resolvingPower', index, chunk.energy.resolving_power) + ) self.logger.debug('End augmentation with blueprint for energy.') def _try_observable_with_blueprint(self, chunk, index): @@ -1171,13 +1174,15 @@ def _try_observable_with_blueprint(self, chunk, index): :param index: The index in the blueprint for looking up plan information. """ - self.logger.debug('Begin augmentation with blueprint for ' - 'observable.') + self.logger.debug('Begin augmentation with blueprint for ' 'observable.') aug_axis = self._two_param_constructor( 'Chunk.observable.dependent.axis.ctype', - 'Chunk.observable.dependent.axis.cunit', index, _to_str, caom2.Axis) - aug_bin = _to_int( - self._get_from_list('Chunk.observable.dependent.bin', index)) + 'Chunk.observable.dependent.axis.cunit', + index, + _to_str, + caom2.Axis, + ) + aug_bin = _to_int(self._get_from_list('Chunk.observable.dependent.bin', index)) if aug_axis is not None and aug_bin is not None: chunk.observable = caom2.ObservableAxis(caom2.Slice(aug_axis, aug_bin)) chunk.observable_axis = _to_int(self._get_from_list('Chunk.observableAxis', index)) @@ -1194,8 +1199,7 @@ def _try_polarization_with_blueprint(self, chunk, index): :param index: The index in the blueprint for looking up plan information. """ - self.logger.debug('Begin augmentation with blueprint for ' - 'polarization.') + self.logger.debug('Begin augmentation with blueprint for ' 'polarization.') aug_axis, aug_naxis_index = self._get_axis_wcs('polarization', chunk.polarization, index) if aug_axis is not None: if chunk.polarization: @@ -1213,24 +1217,36 @@ def _try_position_range(self, index): aug_range_c1_start = self._two_param_constructor( 'Chunk.position.axis.range.start.coord1.pix', 'Chunk.position.axis.range.start.coord1.val', - index, _to_float, caom2.RefCoord) + index, + _to_float, + caom2.RefCoord, + ) aug_range_c1_end = self._two_param_constructor( 'Chunk.position.axis.range.end.coord1.pix', 'Chunk.position.axis.range.end.coord1.val', - index, _to_float, caom2.RefCoord) + index, + _to_float, + caom2.RefCoord, + ) aug_range_c2_start = self._two_param_constructor( 'Chunk.position.axis.range.start.coord2.pix', 'Chunk.position.axis.range.start.coord2.val', - index, _to_float, caom2.RefCoord) + index, + _to_float, + caom2.RefCoord, + ) aug_range_c2_end = self._two_param_constructor( 'Chunk.position.axis.range.end.coord2.pix', 'Chunk.position.axis.range.end.coord2.val', - index, _to_float, caom2.RefCoord) - if (aug_range_c1_start and aug_range_c1_end and aug_range_c2_start - and aug_range_c2_end): + index, + _to_float, + caom2.RefCoord, + ) + if aug_range_c1_start and aug_range_c1_end and aug_range_c2_start and aug_range_c2_end: aug_range = caom2.CoordRange2D( caom2.Coord2D(aug_range_c1_start, aug_range_c1_end), - caom2.Coord2D(aug_range_c2_start, aug_range_c2_end)) + caom2.Coord2D(aug_range_c2_start, aug_range_c2_end), + ) self.logger.debug('Completed setting range for position') return aug_range @@ -1246,8 +1262,12 @@ def _try_position_with_blueprint(self, chunk, index): """ self.logger.debug('Begin augmentation with blueprint for position.') aug_axis = None - if (chunk.position is not None and chunk.position.axis is not None and chunk.position.axis.axis1 is not None - and chunk.position.axis.axis2 is not None): + if ( + chunk.position is not None + and chunk.position.axis is not None + and chunk.position.axis.axis1 is not None + and chunk.position.axis.axis2 is not None + ): # preserve the values obtained from file data aug_x_axis = chunk.position.axis.axis1 aug_y_axis = chunk.position.axis.axis2 @@ -1255,32 +1275,49 @@ def _try_position_with_blueprint(self, chunk, index): aug_y_error = chunk.position.axis.error2 else: aug_x_axis = self._two_param_constructor( - 'Chunk.position.axis.axis1.ctype', - 'Chunk.position.axis.axis1.cunit', index, _to_str, caom2.Axis) + 'Chunk.position.axis.axis1.ctype', 'Chunk.position.axis.axis1.cunit', index, _to_str, caom2.Axis + ) aug_y_axis = self._two_param_constructor( - 'Chunk.position.axis.axis2.ctype', - 'Chunk.position.axis.axis2.cunit', index, _to_str, caom2.Axis) + 'Chunk.position.axis.axis2.ctype', 'Chunk.position.axis.axis2.cunit', index, _to_str, caom2.Axis + ) aug_x_error = self._two_param_constructor( 'Chunk.position.axis.error1.syser', - 'Chunk.position.axis.error1.rnder', index, _to_float, caom2.CoordError) + 'Chunk.position.axis.error1.rnder', + index, + _to_float, + caom2.CoordError, + ) aug_y_error = self._two_param_constructor( 'Chunk.position.axis.error2.syser', - 'Chunk.position.axis.error2.rnder', index, _to_float, caom2.CoordError) + 'Chunk.position.axis.error2.rnder', + index, + _to_float, + caom2.CoordError, + ) aug_range = self._try_position_range(index) if aug_range is None: if chunk.position is None or chunk.position.axis is None or chunk.position.axis.function is None: aug_dimension = self._two_param_constructor( 'Chunk.position.axis.function.dimension.naxis1', 'Chunk.position.axis.function.dimension.naxis2', - index, _to_int, caom2.Dimension2D) + index, + _to_int, + caom2.Dimension2D, + ) aug_x_ref_coord = self._two_param_constructor( 'Chunk.position.axis.function.refCoord.coord1.pix', 'Chunk.position.axis.function.refCoord.coord1.val', - index, _to_float, caom2.RefCoord) + index, + _to_float, + caom2.RefCoord, + ) aug_y_ref_coord = self._two_param_constructor( 'Chunk.position.axis.function.refCoord.coord2.pix', 'Chunk.position.axis.function.refCoord.coord2.val', - index, _to_float, caom2.RefCoord) + index, + _to_float, + caom2.RefCoord, + ) aug_cd11 = _to_float(self._get_from_list('Chunk.position.axis.function.cd11', index)) aug_cd12 = _to_float(self._get_from_list('Chunk.position.axis.function.cd12', index)) aug_cd21 = _to_float(self._get_from_list('Chunk.position.axis.function.cd21', index)) @@ -1292,17 +1329,23 @@ def _try_position_with_blueprint(self, chunk, index): self.logger.debug(f'Creating position Coord2D for {self.uri}') aug_function = None - if (aug_dimension is not None and aug_ref_coord is not None and - aug_cd11 is not None and aug_cd12 is not None and - aug_cd21 is not None and aug_cd22 is not None): - aug_function = caom2.CoordFunction2D(aug_dimension, aug_ref_coord, aug_cd11, aug_cd12, aug_cd21, - aug_cd22) + if ( + aug_dimension is not None + and aug_ref_coord is not None + and aug_cd11 is not None + and aug_cd12 is not None + and aug_cd21 is not None + and aug_cd22 is not None + ): + aug_function = caom2.CoordFunction2D( + aug_dimension, aug_ref_coord, aug_cd11, aug_cd12, aug_cd21, aug_cd22 + ) self.logger.debug(f'Creating position CoordFunction2D for {self.uri}') - if (aug_x_axis is not None and aug_y_axis is not None and - aug_function is not None): - aug_axis = caom2.CoordAxis2D(aug_x_axis, aug_y_axis, aug_x_error, - aug_y_error, None, None, aug_function) + if aug_x_axis is not None and aug_y_axis is not None and aug_function is not None: + aug_axis = caom2.CoordAxis2D( + aug_x_axis, aug_y_axis, aug_x_error, aug_y_error, None, None, aug_function + ) self.logger.debug(f'Creating position CoordAxis2D for {self.uri}') chunk.position_axis_1 = _to_int(self._get_from_list('Chunk.positionAxis1', index)) @@ -1319,10 +1362,12 @@ def _try_position_with_blueprint(self, chunk, index): if chunk.position: chunk.position.coordsys = self._get_from_list('Chunk.position.coordsys', index, chunk.position.coordsys) - chunk.position.equinox = _to_float(self._get_from_list( - 'Chunk.position.equinox', index, chunk.position.equinox)) + chunk.position.equinox = _to_float( + self._get_from_list('Chunk.position.equinox', index, chunk.position.equinox) + ) chunk.position.resolution = self._get_from_list( - 'Chunk.position.resolution', index, chunk.position.resolution) + 'Chunk.position.resolution', index, chunk.position.resolution + ) self.logger.debug('End augmentation with blueprint for position.') def _try_range(self, index, lookup): @@ -1331,11 +1376,17 @@ def _try_range(self, index, lookup): aug_range_start = self._two_param_constructor( f'Chunk.{lookup}.axis.range.start.pix', f'Chunk.{lookup}.axis.range.start.val', - index, _to_float, caom2.RefCoord) + index, + _to_float, + caom2.RefCoord, + ) aug_range_end = self._two_param_constructor( f'Chunk.{lookup}.axis.range.end.pix', f'Chunk.{lookup}.axis.range.end.val', - index, _to_float, caom2.RefCoord) + index, + _to_float, + caom2.RefCoord, + ) if aug_range_start and aug_range_end: result = caom2.CoordRange1D(aug_range_start, aug_range_end) self.logger.debug(f'Completed setting range with return for {lookup}') @@ -1365,7 +1416,8 @@ def _try_time_with_blueprint(self, chunk, index): if chunk.time: chunk.time.exposure = _to_float(self._get_from_list('Chunk.time.exposure', index, chunk.time.exposure)) chunk.time.resolution = _to_float( - self._get_from_list('Chunk.time.resolution', index, chunk.time.resolution)) + self._get_from_list('Chunk.time.resolution', index, chunk.time.resolution) + ) chunk.time.timesys = _to_str(self._get_from_list('Chunk.time.timesys', index, chunk.time.timesys)) chunk.time.trefpos = self._get_from_list('Chunk.time.trefpos', index, chunk.time.trefpos) chunk.time.mjdref = self._get_from_list('Chunk.time.mjdref', index, chunk.time.mjdref) @@ -1508,10 +1560,7 @@ def headers(self): def add_parts(self, artifact, index): # there is one Part per extension, the name is the extension number - if ( - FitsParser._has_data_array(self._headers[index]) - and self.blueprint.has_chunk(index) - ): + if FitsParser._has_data_array(self._headers[index]) and self.blueprint.has_chunk(index): if str(index) not in artifact.parts.keys(): # TODO use extension name? artifact.parts.add(caom2.Part(str(index))) @@ -1524,32 +1573,26 @@ def add_parts(self, artifact, index): return result def apply_blueprint(self): - # pointers that are short to type exts = self.blueprint._extensions wcs_std = self.blueprint._wcs_std plan = self.blueprint._plan # firstly, apply the functions - if (self.blueprint._module is not None or - self.blueprint._module_instance is not None): + if self.blueprint._module is not None or self.blueprint._module_instance is not None: for key, value in plan.items(): if ObsBlueprint.is_function(value): if self._blueprint._module_instance is None: plan[key] = self._execute_external(value, key, 0) else: - plan[key] = self._execute_external_instance( - value, key, 0) + plan[key] = self._execute_external_instance(value, key, 0) for extension in exts: for key, value in exts[extension].items(): if ObsBlueprint.is_function(value): if self._blueprint._module_instance is None: - exts[extension][key] = self._execute_external( - value, key, extension) + exts[extension][key] = self._execute_external(value, key, extension) else: - exts[extension][key] = \ - self._execute_external_instance( - value, key, extension) + exts[extension][key] = self._execute_external_instance(value, key, extension) # apply overrides from blueprint to all extensions for key, value in plan.items(): @@ -1558,12 +1601,10 @@ def apply_blueprint(self): # alternative attributes provided for standard wcs attrib. for header in self.headers: for v in value[0]: - if v in header and \ - v not in wcs_std[key].split(','): + if v in header and v not in wcs_std[key].split(','): keywords = wcs_std[key].split(',') for keyword in keywords: - _set_by_type(header, keyword, - str(header[v])) + _set_by_type(header, keyword, str(header[v])) elif ObsBlueprint.is_function(value): continue elif ObsBlueprint.has_no_value(value): @@ -1583,8 +1624,7 @@ def apply_blueprint(self): # apply overrides to the remaining extensions for extension in exts: if extension >= len(self.headers): - logging.error('More extensions configured {} than headers ' - '{}'.format(extension, len(self.headers))) + logging.error('More extensions configured {} than headers ' '{}'.format(extension, len(self.headers))) continue hdr = self.headers[extension] for key, value in exts[extension].items(): @@ -1593,25 +1633,25 @@ def apply_blueprint(self): keywords = wcs_std[key].split(',') for keyword in keywords: _set_by_type(hdr, keyword, value) - logging.debug( - '{}: set to {} in extension {}'.format(keyword, value, - extension)) + logging.debug('{}: set to {} in extension {}'.format(keyword, value, extension)) # apply defaults to all extensions for key, value in plan.items(): if ObsBlueprint.has_default_value(value): for index, header in enumerate(self.headers): for keywords in value[0]: for keyword in keywords.split(','): - if (not header.get(keyword.strip()) and - keyword == keywords and # checking a string - keywords == value[0][-1]): # last item + if ( + not header.get(keyword.strip()) + and keyword == keywords + and keywords == value[0][-1] # checking a string + ): # last item # apply a default if a value does not already # exist, and all possible values of # keywords have been checked _set_by_type(header, keyword.strip(), value[1]) logging.debug( - '{}: set default value of {} in HDU {}.'. - format(keyword, value[1], index)) + '{}: set default value of {} in HDU {}.'.format(keyword, value[1], index) + ) # TODO wcs in astropy ignores cdelt attributes when it finds a cd # attribute even if it's in a different axis @@ -1623,10 +1663,8 @@ def apply_blueprint(self): break if cd_present: for i in range(1, 6): - if f'CDELT{i}' in header and \ - 'CD{0}_{0}'.format(i) not in header: - header['CD{0}_{0}'.format(i)] = \ - header[f'CDELT{i}'] + if f'CDELT{i}' in header and 'CD{0}_{0}'.format(i) not in header: + header['CD{0}_{0}'.format(i)] = header[f'CDELT{i}'] # TODO When a projection is specified, wcslib expects corresponding # DP arguments with NAXES attributes. Normally, omitting the attribute @@ -1638,16 +1676,16 @@ def apply_blueprint(self): for header in self.headers: sip = False for i in range(1, 6): - if ((f'CTYPE{i}' in header) and - isinstance(header[f'CTYPE{i}'], str) and - ('-SIP' in header[f'CTYPE{i}'])): + if ( + (f'CTYPE{i}' in header) + and isinstance(header[f'CTYPE{i}'], str) + and ('-SIP' in header[f'CTYPE{i}']) + ): sip = True break if sip: for i in range(1, 6): - if (f'CTYPE{i}' in header) and \ - ('-SIP' not in header[f'CTYPE{i}']) and \ - (f'DP{i}' not in header): + if (f'CTYPE{i}' in header) and ('-SIP' not in header[f'CTYPE{i}']) and (f'DP{i}' not in header): header[f'DP{i}'] = 'NAXES: 1' return @@ -1657,14 +1695,10 @@ def augment_artifact(self, artifact, index=0): Augments a given CAOM2 artifact with available FITS information :param artifact: existing CAOM2 artifact to be augmented """ - self.logger.debug( - 'Begin artifact augmentation for {} with {} HDUs.'.format( - artifact.uri, len(self.headers))) + self.logger.debug('Begin artifact augmentation for {} with {} HDUs.'.format(artifact.uri, len(self.headers))) if self.blueprint.get_configed_axes_count() == 0: - raise TypeError( - 'No WCS Data. End artifact augmentation for {}.'.format( - artifact.uri)) + raise TypeError('No WCS Data. End artifact augmentation for {}.'.format(artifact.uri)) for i, header in enumerate(self.headers): if not self.add_parts(artifact, i): @@ -1674,8 +1708,7 @@ def augment_artifact(self, artifact, index=0): self._wcs_parser = FitsWcsParser(header, self.file, str(i)) super().augment_artifact(artifact, i) - self.logger.debug( - f'End artifact augmentation for {artifact.uri}.') + self.logger.debug(f'End artifact augmentation for {artifact.uri}.') def _get_chunk_naxis(self, chunk, index=None): # NOTE: astropy.wcs does not distinguished between WCS axes and @@ -1696,11 +1729,9 @@ def _get_from_list(self, lookup, index, current=None): keys = self.blueprint._get(lookup) except KeyError: self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug( - f'Could not find {lookup!r} in caom2blueprint configuration.') + self.logger.debug(f'Could not find {lookup!r} in caom2blueprint configuration.') if current: - self.logger.debug( - f'{lookup}: using current value of {current!r}.') + self.logger.debug(f'{lookup}: using current value of {current!r}.') value = current return value @@ -1709,9 +1740,7 @@ def _get_from_list(self, lookup, index, current=None): try: value = self.headers[index].get(ii) if value: - self.logger.debug( - f'{lookup}: assigned value {value} based on ' - f'keyword {ii}.') + self.logger.debug(f'{lookup}: assigned value {value} based on ' f'keyword {ii}.') break except (KeyError, IndexError): if keys[0].index(ii) == len(keys[0]) - 1: @@ -1720,28 +1749,22 @@ def _get_from_list(self, lookup, index, current=None): if keys[1]: if current is None: value = keys[1] - self.logger.debug( - f'{lookup}: assigned default value {value}.') + self.logger.debug(f'{lookup}: assigned default value {value}.') else: value = current if value is None: # checking current does not work in the general case, # because current might legitimately be 'None' if self._blueprint.update: - if ( - current is not None - or (current is None and isinstance(value, bool)) - ): + if current is not None or (current is None and isinstance(value, bool)): value = current - self.logger.debug( - f'{lookup}: used current value {value}.') + self.logger.debug(f'{lookup}: used current value {value}.') else: # assign a default value, if one exists if keys[1]: if current is None: value = keys[1] - self.logger.debug( - f'{lookup}: assigned default value {value}.') + self.logger.debug(f'{lookup}: assigned default value {value}.') else: value = current @@ -1772,21 +1795,18 @@ def _get_from_table(self, lookup, extension): keywords = self.blueprint._get(lookup, extension) except KeyError as e: self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug( - 'Could not find {!r} in fits2caom2 configuration.'.format( - lookup)) + self.logger.debug('Could not find {!r} in fits2caom2 configuration.'.format(lookup)) raise e if isinstance(keywords, tuple) and keywords[0] == 'BINTABLE': - # BINTABLE, so need to retrieve the data from the file if self.file is not None and self.file != '': with fits.open(self.file) as fits_data: if fits_data[extension].header['XTENSION'] != 'BINTABLE': raise ValueError( 'Got {} when looking for a BINTABLE ' - 'extension.'.format( - fits_data[extension].header['XTENSION'])) + 'extension.'.format(fits_data[extension].header['XTENSION']) + ) for ii in keywords[1]: for jj in fits_data[extension].data[keywords[2]][ii]: value = f'{jj} {value}' @@ -1801,8 +1821,7 @@ def _get_set_from_list(self, lookup, index): keywords = self.blueprint._get(lookup) except KeyError: self.add_error(lookup, sys.exc_info()[1]) - self.logger.debug(f'Could not find \'{lookup}\' in caom2blueprint ' - f'configuration.') + self.logger.debug(f'Could not find \'{lookup}\' in caom2blueprint ' f'configuration.') if isinstance(keywords, tuple): for ii in keywords[0]: @@ -1813,9 +1832,7 @@ def _get_set_from_list(self, lookup, index): self.add_error(lookup, sys.exc_info()[1]) if keywords[1]: value = keywords[1] - self.logger.debug( - '{}: assigned default value {}.'.format(lookup, - value)) + self.logger.debug('{}: assigned default value {}.'.format(lookup, value)) elif keywords: value = keywords self.logger.debug(f'{lookup}: assigned value {value}.') @@ -1887,9 +1904,7 @@ class Hdf5Parser(ContentParser): CAOM2 record. """ - def __init__( - self, obs_blueprint, uri, h5_file, find_roots_here='sitedata' - ): + def __init__(self, obs_blueprint, uri, h5_file, find_roots_here='sitedata'): """ :param obs_blueprint: Hdf5ObsBlueprint instance :param uri: which artifact augmentation is based on @@ -1915,6 +1930,7 @@ def apply_blueprint_from_file(self): # h5py is an extra in this package since most collections do not # require it import h5py + individual, multi, attributes = self._extract_path_names_from_blueprint() filtered_individual = [ii for ii in individual.keys() if '(' in ii] @@ -1937,21 +1953,14 @@ def _extract_from_item(name, object): # If it's the Part/Chunk metadata, capture it to extensions. # Syntax of the keys described in Hdf5ObsBlueprint class. for part_index, part_name in enumerate(self._extension_names): - if ( - name.startswith(part_name) - and isinstance(object, h5py.Dataset) - and object.dtype.names is not None - ): + if name.startswith(part_name) and isinstance(object, h5py.Dataset) and object.dtype.names is not None: for d_name in object.dtype.names: temp_path = f'{name.replace(part_name, "")}/{d_name}' for path_name in multi.keys(): if path_name == temp_path: for jj in multi.get(path_name): - self._blueprint.set( - jj, object[d_name], part_index - ) - elif (path_name.startswith(temp_path) - and '(' in path_name): + self._blueprint.set(jj, object[d_name], part_index) + elif path_name.startswith(temp_path) and '(' in path_name: z = path_name.split('(') if ':' in z[1]: a = z[1].split(')')[0].split(':') @@ -1960,8 +1969,7 @@ def _extract_from_item(name, object): for jj in multi.get(path_name): self._blueprint.set( jj, - object[d_name][int(a[0])][ - int(a[1])], + object[d_name][int(a[0])][int(a[1])], part_index, ) else: @@ -2047,25 +2055,20 @@ def apply_blueprint(self): plan = self._blueprint._plan # apply the functions - if (self._blueprint._module is not None or - self._blueprint._module_instance is not None): + if self._blueprint._module is not None or self._blueprint._module_instance is not None: for key, value in plan.items(): if ObsBlueprint.is_function(value): if self._blueprint._module_instance is None: plan[key] = self._execute_external(value, key, 0) else: - plan[key] = self._execute_external_instance( - value, key, 0) + plan[key] = self._execute_external_instance(value, key, 0) for extension in exts: for key, value in exts[extension].items(): if ObsBlueprint.is_function(value): if self._blueprint._module_instance is None: - exts[extension][key] = self._execute_external( - value, key, extension) + exts[extension][key] = self._execute_external(value, key, extension) else: - exts[extension][key] = \ - self._execute_external_instance( - value, key, extension) + exts[extension][key] = self._execute_external_instance(value, key, extension) # blueprint already contains all the overrides, only need to make # sure the overrides get applied to all the extensions @@ -2082,8 +2085,7 @@ def apply_blueprint(self): ): continue exts[extension][key] = value - self.logger.debug( - f'{key}: set to {value} in extension {extension}') + self.logger.debug(f'{key}: set to {value} in extension {extension}') # if no values have been set by file lookups, function execution, # or applying overrides, apply defaults, including to all extensions @@ -2095,13 +2097,11 @@ def apply_blueprint(self): if q is None: exts[extension][key] = value[1] self.logger.debug( - f'Add {key} and assign default value of ' - f'{value[1]} in extension {extension}.') + f'Add {key} and assign default value of ' f'{value[1]} in extension {extension}.' + ) elif ObsBlueprint.needs_lookup(value): exts[extension][key] = value[1] - self.logger.debug( - f'{key}: set value to default of {value[1]} in ' - f'extension {extension}.') + self.logger.debug(f'{key}: set value to default of {value[1]} in ' f'extension {extension}.') plan[key] = value[1] self.logger.debug(f'{key}: set value to default of {value[1]}') @@ -2141,8 +2141,7 @@ def _set_by_type(header, keyword, value): except ValueError: pass - if (float_value and not str(value).isdecimal() or - re.match(r'0\.0*', str(value))): + if float_value and not str(value).isdecimal() or re.match(r'0\.0*', str(value)): header.set(keyword, float_value) elif int_value: header.set(keyword, int_value) diff --git a/caom2utils/caom2utils/polygonvalidator.py b/caom2utils/caom2utils/polygonvalidator.py index e64bc184..9f5e4767 100644 --- a/caom2utils/caom2utils/polygonvalidator.py +++ b/caom2utils/caom2utils/polygonvalidator.py @@ -97,8 +97,7 @@ def validate_polygon(poly): if len(points) < 3: # points in a polygon is not required to form a closed polygon, # hence min 3 points - raise AssertionError('invalid polygon: {} points (min 3)'.format( - len(points))) + raise AssertionError('invalid polygon: {} points (min 3)'.format(len(points))) cval1s = [] cval2s = [] @@ -140,13 +139,10 @@ def _validate_is_clockwise(orig_lon, lon): if not np.isclose(lon[1], orig_lon[1]): if not np.isclose(lon[1] - 360, orig_lon[1]): rlon = lon[::-1] - if np.isclose(rlon[1], orig_lon[1]) or \ - np.isclose(rlon[1] - 360, orig_lon[1]): - raise AssertionError( - 'invalid polygon: clockwise winding direction') + if np.isclose(rlon[1], orig_lon[1]) or np.isclose(rlon[1] - 360, orig_lon[1]): + raise AssertionError('invalid polygon: clockwise winding direction') else: - raise AssertionError( - 'software error: compared wrong values') + raise AssertionError('software error: compared wrong values') def _validate_self_intersection_and_direction(ras, decs): @@ -161,8 +157,7 @@ def _validate_self_intersection_and_direction(ras, decs): x, y, z = vector.lonlat_to_vector(ras, decs) points = np.dstack((x, y, z))[0] if polygon.SphericalPolygon.self_intersect(points): - raise AssertionError( - 'Polygon contains self intersecting segments') + raise AssertionError('Polygon contains self intersecting segments') spolygon = polygon.SphericalPolygon.from_radec(ras, decs) lon, lat = next(spolygon.to_lonlat()) @@ -180,8 +175,7 @@ def validate_multipolygon(mp): if not mp: return if not isinstance(mp, MultiPolygon): - raise ValueError( - f'MultiPoligon expected in validation received {type(mp)}') + raise ValueError(f'MultiPoligon expected in validation received {type(mp)}') _validate_size_and_end_vertices(mp) @@ -194,19 +188,16 @@ def validate_multipolygon(mp): def _validate_size_and_end_vertices(mp): if len(mp.vertices) < 4: # triangle - raise AssertionError('invalid polygon: {} vertices (min 4)'.format( - len(mp.vertices))) + raise AssertionError('invalid polygon: {} vertices (min 4)'.format(len(mp.vertices))) if mp.vertices[0].type != SegmentType.MOVE: - raise AssertionError( - 'invalid polygon: first vertex is not a MOVE vertex') + raise AssertionError('invalid polygon: first vertex is not a MOVE vertex') if mp.vertices[-1].type != SegmentType.CLOSE: - raise AssertionError( - 'invalid polygon: last vertex is not a CLOSE vertex') + raise AssertionError('invalid polygon: last vertex is not a CLOSE vertex') -class MultiPolygonValidator(): +class MultiPolygonValidator: """ A class to validate the sequencing of vertices in a polygon, as well as constructing and validating the polygon. @@ -229,8 +220,7 @@ def validate(self, vertex): def _validate_move(self, vertex): if self._open_loop: - raise AssertionError( - 'invalid polygon: MOVE vertex when loop open') + raise AssertionError('invalid polygon: MOVE vertex when loop open') self._lines = 0 self._open_loop = True self._polygon.points.append(Point(vertex.cval1, vertex.cval2)) @@ -238,11 +228,9 @@ def _validate_move(self, vertex): def _validate_close(self, vertex): # close the polygon if not self._open_loop: - raise AssertionError( - 'invalid polygon: CLOSE vertex when loop close') + raise AssertionError('invalid polygon: CLOSE vertex when loop close') if self._lines < 2: - raise AssertionError( - 'invalid polygon: minimum 2 lines required') + raise AssertionError('invalid polygon: minimum 2 lines required') self._open_loop = False # SphericalPolygon requires point[0] == point[-1] point = self._polygon.points[0] @@ -254,7 +242,6 @@ def _validate_close(self, vertex): def _validate_line(self, vertex): if not self._open_loop: - raise AssertionError( - 'invalid polygon: LINE vertex when loop close') + raise AssertionError('invalid polygon: LINE vertex when loop close') self._lines += 1 self._polygon.points.append(Point(vertex.cval1, vertex.cval2)) diff --git a/caom2utils/caom2utils/tests/conftest.py b/caom2utils/caom2utils/tests/conftest.py index e0a9d956..df7b0ab3 100644 --- a/caom2utils/caom2utils/tests/conftest.py +++ b/caom2utils/caom2utils/tests/conftest.py @@ -67,6 +67,7 @@ # import os import glob + THIS_DIR = os.path.dirname(os.path.realpath(__file__)) TESTDATA_DIR = os.path.join(THIS_DIR, 'data') diff --git a/caom2utils/caom2utils/tests/test_caomvalidator.py b/caom2utils/caom2utils/tests/test_caomvalidator.py index 3bb85c90..f01afb59 100644 --- a/caom2utils/caom2utils/tests/test_caomvalidator.py +++ b/caom2utils/caom2utils/tests/test_caomvalidator.py @@ -99,15 +99,17 @@ def test_assert_validate_keyword(): def test_validate_observation(): - obs = SimpleObservation('test_collection', 'test_obs_id', - Algorithm('test_name')) + obs = SimpleObservation('test_collection', 'test_obs_id', Algorithm('test_name')) validate(obs) - obs = DerivedObservation('test_collection', 'test_obs_id', - Algorithm('test_name'), - proposal=Proposal('test_proposal'), - telescope=Telescope('test_telescope'), - instrument=Instrument('test_instrument'), - target=Target('test_targets')) + obs = DerivedObservation( + 'test_collection', + 'test_obs_id', + Algorithm('test_name'), + proposal=Proposal('test_proposal'), + telescope=Telescope('test_telescope'), + instrument=Instrument('test_instrument'), + target=Target('test_targets'), + ) obs.algorithm.keywords = 'foo' obs.proposal.keywords = set('foo=42') obs.telescope.keywords = set('foo:42') @@ -125,8 +127,7 @@ def test_compatibility(): # tests a previously generated observation and validates the # entities, and the entities with children - source_file_path = os.path.join(THIS_DIR, TEST_DATA, - 'SampleComposite-CAOM-2.3.xml') + source_file_path = os.path.join(THIS_DIR, TEST_DATA, 'SampleComposite-CAOM-2.3.xml') reader = ObservationReader(True) with open(source_file_path): obs = reader.read(source_file_path) diff --git a/caom2utils/caom2utils/tests/test_collections.py b/caom2utils/caom2utils/tests/test_collections.py index 8ba21bee..47f0f078 100644 --- a/caom2utils/caom2utils/tests/test_collections.py +++ b/caom2utils/caom2utils/tests/test_collections.py @@ -106,8 +106,7 @@ def test_differences(directory): prod_id = [p.product_id for p in expected.planes.values()][0] product_id = f'--productID {prod_id}' collection_id = expected.collection - data_files = _get_files( - ['header', 'png', 'gif', 'cat', 'fits', 'h5', 'orig'], directory) + data_files = _get_files(['header', 'png', 'gif', 'cat', 'fits', 'h5', 'orig'], directory) assert data_files file_meta = _get_uris(collection_id, data_files, expected) @@ -139,14 +138,14 @@ def test_differences(directory): cardinality = f'{product_id} {temp}' # return # TODO shorter testing cycle - with patch('caom2utils.data_util.StorageInventoryClient') as \ - swc_si_mock,\ - patch('cadcutils.net.ws.WsCapabilities.get_access_url', - autospec=True) as cap_mock,\ - patch('caom2utils.caom2blueprint.get_vos_headers') as gvh_mock, \ - patch('caom2utils.caom2blueprint._get_vos_meta') as gvm_mock, \ - patch('caom2utils.data_util.get_local_headers_from_fits') as \ - header_mock: + with patch('caom2utils.data_util.StorageInventoryClient') as swc_si_mock, patch( + 'cadcutils.net.ws.WsCapabilities.get_access_url', autospec=True + ) as cap_mock, patch('caom2utils.caom2blueprint.get_vos_headers') as gvh_mock, patch( + 'caom2utils.caom2blueprint._get_vos_meta' + ) as gvm_mock, patch( + 'caom2utils.data_util.get_local_headers_from_fits' + ) as header_mock: + def info_mock(uri): if uri.startswith('vos'): archive = uri.split('/')[-2] @@ -167,10 +166,9 @@ def _get_vos_headers(uri, subject=None): return None def _vos_client_meta(subject, uri): - return FileInfo(id=uri, - md5sum='5b00b00d4b06aba986c3663d09aa581f', - size=682560, - file_type='application/fits') + return FileInfo( + id=uri, md5sum='5b00b00d4b06aba986c3663d09aa581f', size=682560, file_type='application/fits' + ) def _header(fqn): if '.fits' in fqn: @@ -208,11 +206,12 @@ def _header(fqn): header_mock.side_effect = _header temp = tempfile.NamedTemporaryFile() - sys.argv = ('{} -o {} --no_validate --observation {} {} {} {} ' - '--resource-id ivo://cadc.nrc.ca/test'.format( - application, temp.name, - expected.collection, expected.observation_id, - inputs, cardinality)).split() + sys.argv = ( + '{} -o {} --no_validate --observation {} {} {} {} ' + '--resource-id ivo://cadc.nrc.ca/test'.format( + application, temp.name, expected.collection, expected.observation_id, inputs, cardinality + ) + ).split() print(sys.argv) app_cmd() actual = _read_observation(temp.name) # actual observation @@ -225,23 +224,22 @@ def _get_cardinality(directory): # The blueprints are named to reverse sort so that this # alignment of product id / artifact URI works if '/cfhtsg/' in directory: - return '--lineage ' \ - 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' \ - 'MegaPipe.080.156.Z.MP9801.weight.fits ' \ - 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' \ - 'MegaPipe.080.156.Z.MP9801.fits ' \ - 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' \ - 'MegaPipe.080.156.Z.MP9801.fits.gif' + return ( + '--lineage ' + 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' + 'MegaPipe.080.156.Z.MP9801.weight.fits ' + 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' + 'MegaPipe.080.156.Z.MP9801.fits ' + 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' + 'MegaPipe.080.156.Z.MP9801.fits.gif' + ) elif '/omm/' in directory: if 'SCIRED' in directory: - return '--lineage Cdemo_ext2_SCIRED/cadc:OMM/' \ - 'Cdemo_ext2_SCIRED.fits.gz' + return '--lineage Cdemo_ext2_SCIRED/cadc:OMM/' 'Cdemo_ext2_SCIRED.fits.gz' else: - return '--lineage C190531_0432_SCI/cadc:OMM/' \ - 'C190531_0432_SCI.fits.gz' + return '--lineage C190531_0432_SCI/cadc:OMM/' 'C190531_0432_SCI.fits.gz' elif 'apass/catalog' in directory: - return '--lineage catalog/vos://cadc.nrc.ca!vospace/CAOMworkshop/' \ - 'Examples/DAO/dao_c122_2016_012725.fits' + return '--lineage catalog/vos://cadc.nrc.ca!vospace/CAOMworkshop/' 'Examples/DAO/dao_c122_2016_012725.fits' elif 'taos_' in directory: if 'def' in directory: return '--lineage def/cadc:def/def.h5' @@ -275,8 +273,7 @@ def _get_common(fnames): def _get_subdirs(dir_name): - return [name for name in os.listdir(dir_name) if - os.path.isdir(os.path.join(dir_name, name))] + return [name for name in os.listdir(dir_name) if os.path.isdir(os.path.join(dir_name, name))] def _get_parameter(extension, dir_name): @@ -320,14 +317,18 @@ def _get_uris(collection, fnames, obs): f = os.path.basename(fname).replace('.header', '') for p in obs.planes.values(): for a in p.artifacts.values(): - if (f'cadc:{collection}/{f}' in a.uri or - (a.uri.startswith('vos') and f in a.uri) or - (a.uri == 'astron:LOTSS/P124+62/mosaic.fits')): + if ( + f'cadc:{collection}/{f}' in a.uri + or (a.uri.startswith('vos') and f in a.uri) + or (a.uri == 'astron:LOTSS/P124+62/mosaic.fits') + ): uris.append(a.uri) - meta = FileInfo(id=a.uri, - file_type=a.content_type, - size=a.content_length, - md5sum=a.content_checksum.checksum) + meta = FileInfo( + id=a.uri, + file_type=a.content_type, + size=a.content_length, + md5sum=a.content_checksum.checksum, + ) file_url = urlparse(a.uri) file_id = file_url.path.split('/')[-1] archive = file_url.path.split('/')[0] @@ -356,17 +357,14 @@ def _get_files(patterns, dir_name): def _compare_observations(expected, actual, output_dir): - result = get_differences(expected, actual, 'Observation') if result: tmp = '\n'.join([r for r in result]) - msg = f'Differences found observation {expected.observation_id} in ' \ - f'{output_dir}\n{tmp}' + msg = f'Differences found observation {expected.observation_id} in ' f'{output_dir}\n{tmp}' _write_observation(actual) raise AssertionError(msg) else: - logging.info('Observation {} in {} match'.format( - expected.observation_id, output_dir)) + logging.info('Observation {} in {} match'.format(expected.observation_id, output_dir)) def _read_observation(fname): @@ -376,6 +374,5 @@ def _read_observation(fname): def _write_observation(obs): - writer = ObservationWriter(True, False, 'caom2', - 'http://www.opencadc.org/caom2/xml/v2.4') + writer = ObservationWriter(True, False, 'caom2', 'http://www.opencadc.org/caom2/xml/v2.4') writer.write(obs, './x.xml') diff --git a/caom2utils/caom2utils/tests/test_convert_from_java.py b/caom2utils/caom2utils/tests/test_convert_from_java.py index 96bed86f..8b07e710 100644 --- a/caom2utils/caom2utils/tests/test_convert_from_java.py +++ b/caom2utils/caom2utils/tests/test_convert_from_java.py @@ -80,29 +80,29 @@ @pytest.mark.parametrize('override_file', [cfhtwircam_override]) def test_class_apply_defaults(override_file): - ob = ObsBlueprint(position_axes=(1, 2), energy_axis=3, - polarization_axis=4, time_axis=5) - usc = {'Plane.dataProductType': 'plane.dataProductType', - 'Plane.provenance.producer': 'provenance.producer', - 'Plane.provenance.project': 'provenance.project', - 'Plane.metaRelease': 'plane.metaRelease', - 'Plane.dataRelease': 'plane.dataRelease', - 'Plane.calibrationLevel': 'plane.calibrationLevel', - 'Observation.metaRelease': 'obs.metaRelease', - 'Observation.intent': 'obs.intent', - 'Observation.type': 'obs.type', - 'Observation.proposal.pi': 'proposal.pi', - 'Observation.proposal.project': 'proposal.project', - 'Observation.proposal.title': 'proposal.title', - 'Observation.sequenceNumber': 'obs.sequenceNumber', - 'Observation.target.standard': 'target.standard', - 'Artifact.productType': 'artifact.productType', - 'Chunk.time.resolution': 'time.resolution', - 'Chunk.time.exposure': 'time.exposure', - 'Chunk.energy.resolvingPower': 'resolvingPower', - 'Chunk.energy.bandpassName': 'filtername', - 'Artifact.contentChecksum': 'artifact.contentChecksum' - } + ob = ObsBlueprint(position_axes=(1, 2), energy_axis=3, polarization_axis=4, time_axis=5) + usc = { + 'Plane.dataProductType': 'plane.dataProductType', + 'Plane.provenance.producer': 'provenance.producer', + 'Plane.provenance.project': 'provenance.project', + 'Plane.metaRelease': 'plane.metaRelease', + 'Plane.dataRelease': 'plane.dataRelease', + 'Plane.calibrationLevel': 'plane.calibrationLevel', + 'Observation.metaRelease': 'obs.metaRelease', + 'Observation.intent': 'obs.intent', + 'Observation.type': 'obs.type', + 'Observation.proposal.pi': 'proposal.pi', + 'Observation.proposal.project': 'proposal.project', + 'Observation.proposal.title': 'proposal.title', + 'Observation.sequenceNumber': 'obs.sequenceNumber', + 'Observation.target.standard': 'target.standard', + 'Artifact.productType': 'artifact.productType', + 'Chunk.time.resolution': 'time.resolution', + 'Chunk.time.exposure': 'time.exposure', + 'Chunk.energy.resolvingPower': 'resolvingPower', + 'Chunk.energy.bandpassName': 'filtername', + 'Artifact.contentChecksum': 'artifact.contentChecksum', + } convert = ConvertFromJava(ob, usc) test_overrides = load_config(override_file) diff --git a/caom2utils/caom2utils/tests/test_custom_axis_util.py b/caom2utils/caom2utils/tests/test_custom_axis_util.py index a7fbf469..bb2a2705 100644 --- a/caom2utils/caom2utils/tests/test_custom_axis_util.py +++ b/caom2utils/caom2utils/tests/test_custom_axis_util.py @@ -67,9 +67,22 @@ # from caom2utils import wcs_util -from caom2 import ReleaseType, Artifact, Part, Chunk, plane, caom_util, \ - chunk, CoordAxis1D, CoordBounds1D, CoordFunction1D, CoordRange1D, \ - Interval, RefCoord, wcs +from caom2 import ( + ReleaseType, + Artifact, + Part, + Chunk, + plane, + caom_util, + chunk, + CoordAxis1D, + CoordBounds1D, + CoordFunction1D, + CoordRange1D, + Interval, + RefCoord, + wcs, +) from caom2.caom_util import TypedList, TypedOrderedDict import pytest import unittest @@ -88,12 +101,12 @@ def test_function1d_to_interval(self): wcs = CustomTestUtil.bad_ctype_wcs() with pytest.raises(ValueError) as ex: wcs_util.CustomAxisUtil.validate_wcs(wcs) - assert ('Invalid CTYPE:' in str(ex.value)) + assert 'Invalid CTYPE:' in str(ex.value) # bad cunit wcs = CustomTestUtil.bad_cunit_wcs() with pytest.raises(ValueError) as ex: wcs_util.CustomAxisUtil.validate_wcs(wcs) - assert ('Invalid CUNIT for CTYPE:' in str(ex.value)) + assert 'Invalid CUNIT for CTYPE:' in str(ex.value) def test_val2pix(self): # happy path @@ -114,8 +127,7 @@ def test_function1d_to_interval_happy_path(self): delta = -0.2 ref_coord = RefCoord(0.0, 0.0) function_1d = CoordFunction1D(naxis, delta, ref_coord) - actual_interval = wcs_util.CustomAxisUtil.function1d_to_interval( - wcs, function_1d) + actual_interval = wcs_util.CustomAxisUtil.function1d_to_interval(wcs, function_1d) expected_interval = Interval(-502.5, -2.5) self.assertEqual(expected_interval.lower, actual_interval.lower) self.assertEqual(expected_interval.upper, actual_interval.upper) @@ -127,7 +139,7 @@ def test_function1d_to_interval_happy_path(self): function_1d = CoordFunction1D(naxis, delta, ref_coord) with pytest.raises(ValueError) as ex: wcs_util.CustomAxisUtil.function1d_to_interval(wcs, function_1d) - assert ('Invalid CoordFunction1D:' in str(ex.value)) + assert 'Invalid CoordFunction1D:' in str(ex.value) def test_range1d_to_interval(self): # happy path @@ -135,8 +147,7 @@ def test_range1d_to_interval(self): start = RefCoord(float(0.9), float(1.1)) end = RefCoord(float(10.9), float(11.1)) range_1d = CoordRange1D(start, end) - actual_interval = wcs_util.CustomAxisUtil.range1d_to_interval( - wcs, range_1d) + actual_interval = wcs_util.CustomAxisUtil.range1d_to_interval(wcs, range_1d) expected_interval = Interval(1.1, 11.1) self.assertEqual(expected_interval.lower, actual_interval.lower) self.assertEqual(expected_interval.upper, actual_interval.upper) @@ -147,7 +158,7 @@ def test_range1d_to_interval(self): range_1d = CoordRange1D(start, end) with pytest.raises(ValueError) as ex: wcs_util.CustomAxisUtil.range1d_to_interval(wcs, range_1d) - assert ('Invalid CoordRange1D:' in str(ex.value)) + assert 'Invalid CoordRange1D:' in str(ex.value) def test_compute_dimension_from_range_bounds(self): # user_chunk = False, matches is None @@ -166,9 +177,9 @@ def test_compute_dimension_from_range_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = None expected_ctype = "RM" - actual_num_pixels = \ - wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( - artifacts, product_type, expected_ctype) + actual_num_pixels = wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( + artifacts, product_type, expected_ctype + ) expected_num_pixels = None self.assertEqual(expected_num_pixels, actual_num_pixels) # user_chunk = False, ctype not match @@ -187,9 +198,9 @@ def test_compute_dimension_from_range_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" - actual_num_pixels = \ - wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( - artifacts, product_type, expected_ctype) + actual_num_pixels = wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( + artifacts, product_type, expected_ctype + ) expected_num_pixels = None self.assertEqual(expected_num_pixels, actual_num_pixels) # user_chunk = False, ptype not match @@ -208,9 +219,9 @@ def test_compute_dimension_from_range_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" - actual_num_pixels = \ - wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( - artifacts, product_type, expected_ctype) + actual_num_pixels = wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( + artifacts, product_type, expected_ctype + ) expected_num_pixels = None self.assertEqual(expected_num_pixels, actual_num_pixels) # user_chunk = False, atype not match @@ -229,9 +240,9 @@ def test_compute_dimension_from_range_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" - actual_num_pixels = \ - wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( - artifacts, product_type, expected_ctype) + actual_num_pixels = wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( + artifacts, product_type, expected_ctype + ) expected_num_pixels = None self.assertEqual(expected_num_pixels, actual_num_pixels) # user_chunk = True, current_type != expected_ctype @@ -251,9 +262,8 @@ def test_compute_dimension_from_range_bounds(self): product_type = chunk.ProductType.CALIBRATION expected_ctype = "FARADAY" with pytest.raises(ValueError) as ex: - wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( - artifacts, product_type, expected_ctype) - assert ('CTYPE must be the same across all Artifacts' in str(ex.value)) + wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds(artifacts, product_type, expected_ctype) + assert 'CTYPE must be the same across all Artifacts' in str(ex.value) # user_chunk = True, get_num_pixels: range is not None test_chunk = Chunk() test_chunk.product_type = chunk.ProductType.CALIBRATION @@ -270,9 +280,9 @@ def test_compute_dimension_from_range_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" - actual_num_pixels = \ - wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( - artifacts, product_type, expected_ctype) + actual_num_pixels = wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( + artifacts, product_type, expected_ctype + ) expected_num_pixels = 10 self.assertEqual(expected_num_pixels, actual_num_pixels) # user_chunk = True, get_num_pixels: bounds with 3 samples that @@ -292,9 +302,9 @@ def test_compute_dimension_from_range_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" - actual_num_pixels = \ - wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( - artifacts, product_type, expected_ctype) + actual_num_pixels = wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( + artifacts, product_type, expected_ctype + ) expected_num_pixels = 11 self.assertEqual(expected_num_pixels, actual_num_pixels) # user_chunk = True, range = None, bounds = None, use_func and @@ -314,9 +324,9 @@ def test_compute_dimension_from_range_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" - actual_num_pixels = \ - wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( - artifacts, product_type, expected_ctype) + actual_num_pixels = wcs_util.CustomAxisUtil.compute_dimension_from_range_bounds( + artifacts, product_type, expected_ctype + ) expected_num_pixels = None self.assertEqual(expected_num_pixels, actual_num_pixels) @@ -327,7 +337,8 @@ def test_compute_dimension_from_wcs(self): product_type = None expected_ctype = None actual_dimension = wcs_util.CustomAxisUtil.compute_dimension_from_wcs( - bounds, artifacts, product_type, expected_ctype) + bounds, artifacts, product_type, expected_ctype + ) expected_dimension = None self.assertEqual(expected_dimension, actual_dimension) # bounds is not None, user_chunk = True, current_type != expected_ctype @@ -348,9 +359,8 @@ def test_compute_dimension_from_wcs(self): product_type = chunk.ProductType.CALIBRATION expected_ctype = "FARADAY" with pytest.raises(ValueError) as ex: - wcs_util.CustomAxisUtil.compute_dimension_from_wcs( - bounds, artifacts, product_type, expected_ctype) - assert ('CTYPE must be the same across all Artifacts' in str(ex.value)) + wcs_util.CustomAxisUtil.compute_dimension_from_wcs(bounds, artifacts, product_type, expected_ctype) + assert 'CTYPE must be the same across all Artifacts' in str(ex.value) # bounds is not None, user_chunk = True, current_type is not None and # current_type == expected_ctype, ss >= scale, num = 1 bounds = Interval(1.1, 11.1) @@ -370,7 +380,8 @@ def test_compute_dimension_from_wcs(self): product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" actual_dimension = wcs_util.CustomAxisUtil.compute_dimension_from_wcs( - bounds, artifacts, product_type, expected_ctype) + bounds, artifacts, product_type, expected_ctype + ) expected_dimension = 200 self.assertEqual(expected_dimension, actual_dimension) # bounds is not None, user_chunk = False, sw = None @@ -390,7 +401,8 @@ def test_compute_dimension_from_wcs(self): product_type = None expected_ctype = "RM" actual_dimension = wcs_util.CustomAxisUtil.compute_dimension_from_wcs( - bounds, artifacts, product_type, expected_ctype) + bounds, artifacts, product_type, expected_ctype + ) expected_dimension = None self.assertEqual(expected_dimension, actual_dimension) # bounds is not None, user_chunk = True, current_type is not None and @@ -415,7 +427,8 @@ def test_compute_dimension_from_wcs(self): product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" actual_dimension = wcs_util.CustomAxisUtil.compute_dimension_from_wcs( - bounds, artifacts, product_type, expected_ctype) + bounds, artifacts, product_type, expected_ctype + ) expected_dimension = 500 self.assertEqual(expected_dimension, actual_dimension) @@ -436,8 +449,7 @@ def test_compute_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = None expected_ctype = "RM" - actual_bounds = wcs_util.CustomAxisUtil.compute_bounds( - artifacts, product_type, expected_ctype) + actual_bounds = wcs_util.CustomAxisUtil.compute_bounds(artifacts, product_type, expected_ctype) expected_bounds = None self.assertEqual(expected_bounds, actual_bounds) # user_chunk = True, current_type != expected_ctype @@ -457,9 +469,8 @@ def test_compute_bounds(self): product_type = chunk.ProductType.CALIBRATION expected_ctype = "FARADAY" with pytest.raises(ValueError) as ex: - wcs_util.CustomAxisUtil.compute_bounds( - artifacts, product_type, expected_ctype) - assert ('CTYPE must be the same across all Artifacts' in str(ex.value)) + wcs_util.CustomAxisUtil.compute_bounds(artifacts, product_type, expected_ctype) + assert 'CTYPE must be the same across all Artifacts' in str(ex.value) # user_chunk = True, range is not None test_chunk = Chunk() test_chunk.product_type = chunk.ProductType.CALIBRATION @@ -476,8 +487,7 @@ def test_compute_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" - actual_interval = wcs_util.CustomAxisUtil.compute_bounds( - artifacts, product_type, expected_ctype) + actual_interval = wcs_util.CustomAxisUtil.compute_bounds(artifacts, product_type, expected_ctype) expected_interval = Interval(1.1, 11.1) self.assertEqual(expected_interval.lower, actual_interval.lower) self.assertEqual(expected_interval.upper, actual_interval.upper) @@ -498,8 +508,7 @@ def test_compute_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" - actual_interval = wcs_util.CustomAxisUtil.compute_bounds( - artifacts, product_type, expected_ctype) + actual_interval = wcs_util.CustomAxisUtil.compute_bounds(artifacts, product_type, expected_ctype) expected_interval = Interval(-1.2, 11.2) self.assertEqual(expected_interval.lower, actual_interval.lower) self.assertEqual(expected_interval.upper, actual_interval.upper) @@ -519,8 +528,7 @@ def test_compute_bounds(self): artifacts = TypedList(Artifact, artifact) product_type = chunk.ProductType.CALIBRATION expected_ctype = "RM" - actual_interval = wcs_util.CustomAxisUtil.compute_bounds( - artifacts, product_type, expected_ctype) + actual_interval = wcs_util.CustomAxisUtil.compute_bounds(artifacts, product_type, expected_ctype) expected_interval = Interval(-49.5, 19950.5) self.assertEqual(expected_interval.lower, actual_interval.lower) self.assertEqual(expected_interval.upper, actual_interval.upper) @@ -686,7 +694,7 @@ def test_compute(self): artifacts = TypedList(Artifact, artifact) with pytest.raises(ValueError) as ex: actual_axis = wcs_util.CustomAxisUtil.compute(artifacts) - assert ('Unsupported CTYPE:' in str(ex.value)) + assert 'Unsupported CTYPE:' in str(ex.value) # _choose_product returns Artifact.product (SCIENCE), # user_chunk = True, Chunk.custom is not None # first_ctype == Chunk.custom.axis.axis.ctype @@ -708,16 +716,13 @@ def test_compute(self): expected_samples = [expected_sample] expected_bounds = Interval(-49.5, 19950.5, expected_samples) expected_dimension = 200 - expected_axis = plane.CustomAxis(expected_ctype, expected_bounds, - expected_dimension) + expected_axis = plane.CustomAxis(expected_ctype, expected_bounds, expected_dimension) actual_axis = wcs_util.CustomAxisUtil.compute(artifacts) self.assertEqual(expected_axis.ctype, actual_axis.ctype) self.assertEqual(expected_axis.bounds.lower, actual_axis.bounds.lower) self.assertEqual(expected_axis.bounds.upper, actual_axis.bounds.upper) - self.assertEqual(expected_axis.bounds.samples[0].lower, - actual_axis.bounds.samples[0].lower) - self.assertEqual(expected_axis.bounds.samples[0].upper, - actual_axis.bounds.samples[0].upper) + self.assertEqual(expected_axis.bounds.samples[0].lower, actual_axis.bounds.samples[0].lower) + self.assertEqual(expected_axis.bounds.samples[0].upper, actual_axis.bounds.samples[0].upper) self.assertEqual(expected_axis.dimension, actual_axis.dimension) # _choose_product returns Artifact.product (SCIENCE), # user_chunk = True, Chunk.custom is not None @@ -744,11 +749,10 @@ def test_compute(self): expected_samples = [expected_sample] expected_bounds = Interval(-49.5, 19950.5, expected_samples) expected_dimension = 200 - expected_axis = plane.CustomAxis(expected_ctype, expected_bounds, - expected_dimension) + expected_axis = plane.CustomAxis(expected_ctype, expected_bounds, expected_dimension) with pytest.raises(ValueError) as ex: actual_axis = wcs_util.CustomAxisUtil.compute(artifacts) - assert ('CTYPE must be the same across all Artifacts' in str(ex.value)) + assert 'CTYPE must be the same across all Artifacts' in str(ex.value) # Supporting Classes for generating test data @@ -784,8 +788,7 @@ def good_wcs_with_range(): sx = float(54321.0) nx = 200 ds = float(0.01) - return CustomTestUtil.get_test_function_with_range( - ctype, unit, px, sx, nx, ds) + return CustomTestUtil.get_test_function_with_range(ctype, unit, px, sx, nx, ds) @staticmethod def good_wcs_with_bounds_3_samples(): @@ -795,8 +798,7 @@ def good_wcs_with_bounds_3_samples(): sx = float(54321.0) nx = 200 ds = float(0.01) - return CustomTestUtil.get_test_function_with_bounds_3_samples( - ctype, unit, px, sx, nx, ds) + return CustomTestUtil.get_test_function_with_bounds_3_samples(ctype, unit, px, sx, nx, ds) @staticmethod def good_wcs_with_function(): @@ -806,8 +808,7 @@ def good_wcs_with_function(): sx = float(1.0) nx = 200 ds = float(0.01) - return CustomTestUtil.get_test_function_with_function( - ctype, unit, px, sx, nx, ds) + return CustomTestUtil.get_test_function_with_function(ctype, unit, px, sx, nx, ds) @staticmethod def bad_ctype_wcs(): @@ -835,8 +836,7 @@ def bad_delta(): axis_1d = CoordAxis1D(wcs.Axis("RM", "rad/m**2")) # delta < 0.0 is bad ref_coord = RefCoord(float(1.0), float(2.0)) - axis_1d.function = CoordFunction1D( - int(100), -0.01, ref_coord) + axis_1d.function = CoordFunction1D(int(100), -0.01, ref_coord) return chunk.CustomWCS(axis_1d) @@ -892,8 +892,7 @@ def get_test_function_with_bounds_3_samples(ctype, unit, px, sx, nx, ds): start = RefCoord(float(-0.9), float(-1.2)) end = RefCoord(float(0.6), float(0.2)) b_range_3 = CoordRange1D(start, end) - samples = caom_util.TypedList(CoordRange1D, b_range_1, b_range_2, - b_range_3) + samples = caom_util.TypedList(CoordRange1D, b_range_1, b_range_2, b_range_3) bounds = CoordBounds1D(samples) axis_1d = wcs.CoordAxis1D(wcs.Axis(ctype, unit), error, range, bounds) ref_coord = wcs.RefCoord(px, sx) @@ -910,8 +909,7 @@ def get_test_function_with_function(ctype, unit, px, sx, nx, ds): delta = float(2.5) ref_coord = wcs.RefCoord(float(1.0), float(2.0)) function = CoordFunction1D(naxis, delta, ref_coord) - axis_1d = CoordAxis1D(wcs.Axis(ctype, unit), error, range, bounds, - function) + axis_1d = CoordAxis1D(wcs.Axis(ctype, unit), error, range, bounds, function) ref_coord = RefCoord(px, sx) axis_1d.function = CoordFunction1D(nx, ds, ref_coord) custom_wcs = chunk.CustomWCS(axis_1d) diff --git a/caom2utils/caom2utils/tests/test_data_util.py b/caom2utils/caom2utils/tests/test_data_util.py index 14533d95..68748a6c 100644 --- a/caom2utils/caom2utils/tests/test_data_util.py +++ b/caom2utils/caom2utils/tests/test_data_util.py @@ -100,9 +100,7 @@ def test_get_file_type(): 'abc.fits': 'application/fits', } for key, value in vals.items(): - assert ( - data_util.get_file_type(key) == value - ), f'wrong type {data_util.get_file_type(key)} for {key}' + assert data_util.get_file_type(key) == value, f'wrong type {data_util.get_file_type(key)} for {key}' @patch('caom2utils.data_util.StorageInventoryClient') @@ -115,8 +113,7 @@ def test_storage_inventory_client(cadc_client_mock): test_fqn.unlink() def info_si_mock(ignore): - return FileInfo(id=test_uri, file_type='application/fits', - md5sum='abc', size=42) + return FileInfo(id=test_uri, file_type='application/fits', md5sum='abc', size=42) def get_si_mock(ignore2, dest, **kwargs): fhead = kwargs.get('fhead') @@ -152,30 +149,18 @@ def get_si_mock(ignore2, dest, **kwargs): # delete test_wrapper.remove(test_uri) assert cadc_client_mock.return_value.cadcremove.called, 'remove call' - cadc_client_mock.return_value.cadcremove.assert_called_with( - test_uri - ), 'wrong remove args' - - cadc_client_mock.return_value.cadcinfo.side_effect = ( - exceptions.UnexpectedException('cadcinfo') - ) - cadc_client_mock.return_value.cadcget.side_effect = ( - exceptions.UnexpectedException('cadcget') - ) - cadc_client_mock.return_value.cadcput.side_effect = ( - exceptions.UnexpectedException('cadcput') - ) + cadc_client_mock.return_value.cadcremove.assert_called_with(test_uri), 'wrong remove args' + + cadc_client_mock.return_value.cadcinfo.side_effect = exceptions.UnexpectedException('cadcinfo') + cadc_client_mock.return_value.cadcget.side_effect = exceptions.UnexpectedException('cadcget') + cadc_client_mock.return_value.cadcput.side_effect = exceptions.UnexpectedException('cadcput') _fail_mock(test_wrapper, test_uri, test_working_directory) - cadc_client_mock.return_value.cadcremove.side_effect = ( - exceptions.UnexpectedException('cadcremove') - ) + cadc_client_mock.return_value.cadcremove.side_effect = exceptions.UnexpectedException('cadcremove') with pytest.raises(exceptions.UnexpectedException): test_wrapper.remove(test_uri) - cadc_client_mock.return_value.cadcinfo.side_effect = ( - exceptions.NotFoundException('cadcinfo') - ) + cadc_client_mock.return_value.cadcinfo.side_effect = exceptions.NotFoundException('cadcinfo') test_result = test_wrapper.info(test_uri) assert test_result is None, 'expected when not found' @@ -187,6 +172,7 @@ def test_si_tracking(client_mock): def _get(working_directory, uri): raise exceptions.UnexpectedException + client_mock.return_value.cadcget.side_effect = _get client_mock.return_value.cadcremove.side_effect = Mock() @@ -197,16 +183,12 @@ def _get(working_directory, uri): with pytest.raises(exceptions.UnexpectedException): test_wrapper.get('/tmp', 'cadc:TEST/abc.fits') assert test_metrics.observe_failure.called, 'expect observe_failure call' - test_metrics.observe_failure.assert_called_with( - 'get', 'si', 'cadc:TEST/abc.fits' - ) + test_metrics.observe_failure.assert_called_with('get', 'si', 'cadc:TEST/abc.fits') # test metrics success test_wrapper.remove('cadc:TEST/abc.fits') assert test_metrics.observe.called, 'expect observe call' - test_metrics.observe.assert_called_with( - ANY, ANY, None, 'remove', 'si', 'cadc:TEST/abc.fits' - ) + test_metrics.observe.assert_called_with(ANY, ANY, None, 'remove', 'si', 'cadc:TEST/abc.fits') def test_clean_headers(): @@ -243,16 +225,10 @@ def test_unicode_decode_error(): def test_get_file_encoding(): - test_subjects = { - 'abc.fits': None, - 'abc.fits.gz': 'gzip', - 'abc.fits.fz': 'x-fits' - } + test_subjects = {'abc.fits': None, 'abc.fits.gz': 'gzip', 'abc.fits.fz': 'x-fits'} for test_subject in test_subjects.keys(): test_result = data_util.get_file_encoding(test_subject) - assert ( - test_result == test_subjects.get(test_subject) - ), f'got wrong extension {test_result} for {test_subject}' + assert test_result == test_subjects.get(test_subject), f'got wrong extension {test_result} for {test_subject}' def _check_get_result(test_fqn): diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index e248f3d7..42390ccd 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -120,8 +120,7 @@ class MyExitError(Exception): pass -EXPECTED_ENERGY_XML = \ - ''' +EXPECTED_ENERGY_XML = ''' @@ -147,12 +146,10 @@ class MyExitError(Exception): def test_augment_energy(): bp = ObsBlueprint(energy_axis=1) test_fitsparser = FitsParser(sample_file_4axes, bp) - artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_4axes), - ProductType.SCIENCE, ReleaseType.DATA) + artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_4axes), ProductType.SCIENCE, ReleaseType.DATA) test_fitsparser.augment_artifact(artifact) energy = artifact.parts['0'].chunks[0].energy - ex = _get_from_str_xml(EXPECTED_ENERGY_XML, - ObservationReader()._get_spectral_wcs, 'energy') + ex = _get_from_str_xml(EXPECTED_ENERGY_XML, ObservationReader()._get_spectral_wcs, 'energy') result = get_differences(ex, energy) assert result is None, result @@ -187,6 +184,7 @@ def test_hdf5_wcs_parser_set_wcs(): ]: # limit the cases where h5py needs to be installed import h5py + temp = h5py.File(test_fqn) test_subject = Hdf5Parser(bp, test_uri, temp) assert test_subject is not None, 'expect a result' @@ -200,8 +198,7 @@ def test_hdf5_wcs_parser_set_wcs(): def test_augment_failure(): bp = ObsBlueprint() test_fitsparser = FitsParser(sample_file_4axes, bp) - artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_4axes), - ProductType.SCIENCE, ReleaseType.DATA) + artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_4axes), ProductType.SCIENCE, ReleaseType.DATA) with pytest.raises(TypeError): test_fitsparser.augment_artifact(artifact) @@ -216,19 +213,15 @@ def test_augment_artifact_energy_from_blueprint(): test_blueprint.set('Chunk.energy.axis.function.refCoord.val', '-60000.0') test_blueprint.set('Chunk.energy.axis.function.delta', '-824.46002') test_blueprint.set('Chunk.energy.axis.function.naxis', '1') - test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, - uri='ad:TEST/test_blueprint') + test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, uri='ad:TEST/test_blueprint') test_chunk = Chunk() test_fitsparser._try_energy_with_blueprint(test_chunk, 0) - ex = _get_from_str_xml(EXPECTED_ENERGY_XML, - ObservationReader()._get_spectral_wcs, - 'energy') + ex = _get_from_str_xml(EXPECTED_ENERGY_XML, ObservationReader()._get_spectral_wcs, 'energy') result = get_differences(ex, test_chunk.energy) assert result is None, result -EXPECTED_POLARIZATION_XML = \ - ''' +EXPECTED_POLARIZATION_XML = ''' @@ -249,15 +242,11 @@ def test_augment_artifact_energy_from_blueprint(): def test_augment_polarization(): - test_fitsparser = FitsParser(sample_file_4axes, - ObsBlueprint(polarization_axis=1)) - artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_4axes), - ProductType.SCIENCE, ReleaseType.DATA) + test_fitsparser = FitsParser(sample_file_4axes, ObsBlueprint(polarization_axis=1)) + artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_4axes), ProductType.SCIENCE, ReleaseType.DATA) test_fitsparser.augment_artifact(artifact) polarization = artifact.parts['0'].chunks[0].polarization - ex = _get_from_str_xml(EXPECTED_POLARIZATION_XML, - ObservationReader()._get_polarization_wcs, - 'polarization') + ex = _get_from_str_xml(EXPECTED_POLARIZATION_XML, ObservationReader()._get_polarization_wcs, 'polarization') result = get_differences(ex, polarization) assert result is None, result @@ -270,19 +259,15 @@ def test_augment_artifact_polarization_from_blueprint(): test_blueprint.set('Chunk.polarization.axis.function.refCoord.val', '1.0') test_blueprint.set('Chunk.polarization.axis.function.delta', '1.0') test_blueprint.set('Chunk.polarization.axis.function.naxis', '1') - test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, - uri='test_parser') + test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, uri='test_parser') test_chunk = Chunk() test_fitsparser._try_polarization_with_blueprint(test_chunk, 0) - ex = _get_from_str_xml(EXPECTED_POLARIZATION_XML, - ObservationReader()._get_polarization_wcs, - 'polarization') + ex = _get_from_str_xml(EXPECTED_POLARIZATION_XML, ObservationReader()._get_polarization_wcs, 'polarization') result = get_differences(ex, test_chunk.polarization) assert result is None -EXPECTED_POSITION_XML = \ - ''' +EXPECTED_POSITION_XML = ''' @@ -322,8 +307,7 @@ def test_augment_artifact_polarization_from_blueprint(): def test_augment_artifact(): test_blueprint = ObsBlueprint(position_axes=(1, 2)) test_fitsparser = FitsParser(sample_file_4axes, test_blueprint) - artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_4axes), - ProductType.SCIENCE, ReleaseType.DATA) + artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_4axes), ProductType.SCIENCE, ReleaseType.DATA) test_fitsparser.augment_artifact(artifact) assert artifact.parts is not None assert len(artifact.parts) == 1 @@ -331,8 +315,7 @@ def test_augment_artifact(): test_chunk = test_part.chunks.pop() assert test_chunk is not None assert test_chunk.position is not None - ex = _get_from_str_xml(EXPECTED_POSITION_XML, - ObservationReader()._get_spatial_wcs, 'position') + ex = _get_from_str_xml(EXPECTED_POSITION_XML, ObservationReader()._get_spatial_wcs, 'position') result = get_differences(ex, test_chunk.position) assert result is None @@ -352,23 +335,18 @@ def test_augment_artifact_position_from_blueprint(): test_blueprint.set('Chunk.position.axis.function.dimension.naxis1', '1') test_blueprint.set('Chunk.position.axis.function.dimension.naxis2', '1') test_blueprint.set('Chunk.position.axis.range.start.coord1.pix', '513.0') - test_blueprint.set('Chunk.position.axis.range.start.coord1.val', - '128.7499990027') + test_blueprint.set('Chunk.position.axis.range.start.coord1.val', '128.7499990027') test_blueprint.set('Chunk.position.axis.range.start.coord2.pix', '513.0') - test_blueprint.set('Chunk.position.axis.range.start.coord2.val', - '-0.9999999922536') - test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, - uri='test_parser') + test_blueprint.set('Chunk.position.axis.range.start.coord2.val', '-0.9999999922536') + test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, uri='test_parser') test_chunk = Chunk() test_fitsparser._try_position_with_blueprint(test_chunk, 0) - ex = _get_from_str_xml(EXPECTED_POSITION_XML, - ObservationReader()._get_spatial_wcs, 'position') + ex = _get_from_str_xml(EXPECTED_POSITION_XML, ObservationReader()._get_spatial_wcs, 'position') result = get_differences(ex, test_chunk.position) assert result is None -EXPECTED_CFHT_WIRCAM_RAW_GUIDE_CUBE_TIME = \ - ''' +EXPECTED_CFHT_WIRCAM_RAW_GUIDE_CUBE_TIME = ''' @@ -397,10 +375,8 @@ def test_augment_artifact_position_from_blueprint(): def test_augment_artifact_time(): - test_fitsparser = FitsParser(sample_file_time_axes, - ObsBlueprint(time_axis=1)) - artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_time_axes), - ProductType.SCIENCE, ReleaseType.DATA) + test_fitsparser = FitsParser(sample_file_time_axes, ObsBlueprint(time_axis=1)) + artifact = Artifact('ad:{}/{}'.format('TEST', sample_file_time_axes), ProductType.SCIENCE, ReleaseType.DATA) test_fitsparser.augment_artifact(artifact) assert artifact.parts is not None assert len(artifact.parts) == 6 @@ -408,8 +384,7 @@ def test_augment_artifact_time(): test_chunk = test_part.chunks.pop() assert test_chunk is not None assert test_chunk.time is not None - ex = _get_from_str_xml(EXPECTED_CFHT_WIRCAM_RAW_GUIDE_CUBE_TIME, - ObservationReader()._get_temporal_wcs, 'time') + ex = _get_from_str_xml(EXPECTED_CFHT_WIRCAM_RAW_GUIDE_CUBE_TIME, ObservationReader()._get_temporal_wcs, 'time') result = get_differences(ex, test_chunk.time) assert result is None @@ -425,24 +400,20 @@ def test_augment_artifact_time_from_blueprint(): test_blueprint.set('Chunk.time.axis.error.syser', '1e-07') test_blueprint.set('Chunk.time.axis.error.rnder', '1e-07') test_blueprint.set('Chunk.time.axis.function.refCoord.pix', '0.5') - test_blueprint.set('Chunk.time.axis.function.refCoord.val', - '56789.4298069') + test_blueprint.set('Chunk.time.axis.function.refCoord.val', '56789.4298069') test_blueprint.set('Chunk.time.axis.function.delta', '2.31481e-07') test_blueprint.set('Chunk.time.axis.function.naxis', '1') - test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, - uri='ad:TEST/test_blueprint') + test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, uri='ad:TEST/test_blueprint') test_chunk = Chunk() test_fitsparser._try_time_with_blueprint(test_chunk, 0) - ex = _get_from_str_xml(EXPECTED_CFHT_WIRCAM_RAW_GUIDE_CUBE_TIME, - ObservationReader()._get_temporal_wcs, 'time') + ex = _get_from_str_xml(EXPECTED_CFHT_WIRCAM_RAW_GUIDE_CUBE_TIME, ObservationReader()._get_temporal_wcs, 'time') result = get_differences(ex, test_chunk.time) assert result is None def test_get_wcs_values(): w = get_test_wcs(sample_file_4axes) - test_parser = FitsWcsParser(get_test_header(sample_file_4axes)[0].header, - sample_file_4axes, 0) + test_parser = FitsWcsParser(get_test_header(sample_file_4axes)[0].header, sample_file_4axes, 0) result = test_parser._sanitize(w.wcs.equinox) assert result is None if hasattr(w, 'pixel_shape'): @@ -457,10 +428,8 @@ def test_get_wcs_values(): def test_wcs_parser_augment_failures(): - test_parser = FitsWcsParser(get_test_header(sample_file_4axes)[0].header, - sample_file_4axes, 0) - test_obs = SimpleObservation('collection', 'MA1_DRAO-ST', - Algorithm('exposure')) + test_parser = FitsWcsParser(get_test_header(sample_file_4axes)[0].header, sample_file_4axes, 0) + test_obs = SimpleObservation('collection', 'MA1_DRAO-ST', Algorithm('exposure')) with pytest.raises(ValueError): test_parser.augment_custom(test_obs) @@ -562,38 +531,29 @@ def get_test_wcs(test_file): def _get_from_str_xml(string_xml, get_func, element_tag): - etree.register_namespace( - 'caom2', 'http://www.opencadc.org/caom2/xml/v2.3') + etree.register_namespace('caom2', 'http://www.opencadc.org/caom2/xml/v2.3') parent_element = etree.fromstring(string_xml) ns = parent_element.nsmap['caom2'] act_obj = get_func(element_tag, parent_element, ns, False) return act_obj -@patch('sys.exit', Mock(side_effect=[MyExitError, MyExitError, MyExitError, - MyExitError, MyExitError, - MyExitError])) +@patch('sys.exit', Mock(side_effect=[MyExitError, MyExitError, MyExitError, MyExitError, MyExitError, MyExitError])) def test_help(): - """ Tests the helper displays for commands in main""" + """Tests the helper displays for commands in main""" # expected helper messages - with open(os.path.join(TESTDATA_DIR, 'bad_product_id.txt')) \ - as myfile: + with open(os.path.join(TESTDATA_DIR, 'bad_product_id.txt')) as myfile: bad_product_id = myfile.read() - with open(os.path.join(TESTDATA_DIR, 'missing_product_id.txt')) \ - as myfile: + with open(os.path.join(TESTDATA_DIR, 'missing_product_id.txt')) as myfile: missing_product_id = myfile.read() - with open(os.path.join(TESTDATA_DIR, 'too_few_arguments_help.txt')) \ - as myfile: + with open(os.path.join(TESTDATA_DIR, 'too_few_arguments_help.txt')) as myfile: too_few_arguments_usage = myfile.read() with open(os.path.join(TESTDATA_DIR, 'help.txt')) as myfile: usage = myfile.read() - with open(os.path.join(TESTDATA_DIR, 'missing_observation_help.txt'))\ - as myfile: + with open(os.path.join(TESTDATA_DIR, 'missing_observation_help.txt')) as myfile: myfile.read() - with open(os.path.join(TESTDATA_DIR, - 'missing_positional_argument_help.txt')) \ - as myfile: + with open(os.path.join(TESTDATA_DIR, 'missing_positional_argument_help.txt')) as myfile: myfile.read() # too few arguments error message when running python3 @@ -602,23 +562,21 @@ def test_help(): with pytest.raises(MyExitError): main_app() if stdout_mock.getvalue(): - assert (too_few_arguments_usage == stdout_mock.getvalue()) + assert too_few_arguments_usage == stdout_mock.getvalue() # --help with patch('sys.stdout', new_callable=StringIO) as stdout_mock: sys.argv = ["fits2caom2", "-h"] with pytest.raises(MyExitError): main_app() - expected = stdout_mock.getvalue().replace( - 'options:', 'optional arguments:').strip('\n') + expected = stdout_mock.getvalue().replace('options:', 'optional arguments:').strip('\n') assert usage.strip('\n') == expected # missing productID when plane count is wrong with patch('sys.stderr', new_callable=StringIO) as stderr_mock: with patch('sys.stdout', new_callable=StringIO) as stdout_mock: bad_product_file = os.path.join(TESTDATA_DIR, 'bad_product_id.xml') - sys.argv = ["fits2caom2", "--in", bad_product_file, - "ad:CGPS/CGPS_MA1_HI_line_image.fits"] + sys.argv = ["fits2caom2", "--in", bad_product_file, "ad:CGPS/CGPS_MA1_HI_line_image.fits"] with pytest.raises(MyExitError): main_app() # inconsistencies between Python 3.7 and later versions. @@ -628,12 +586,18 @@ def test_help(): # missing productID when blueprint doesn't have one either with patch('sys.stdout', new_callable=StringIO) as stdout_mock: - with patch('sys.stderr', new_callable=StringIO) as stderr_mock, \ - patch('caom2utils.data_util.StorageClientWrapper'): - sys.argv = ["fits2caom2", "--observation", "test_collection_id", - "test_observation_id", - "ad:CGPS/CGPS_MA1_HI_line_image.fits", - "--resource-id", "ivo://cadc.nrc.ca/uvic/minoc"] + with patch('sys.stderr', new_callable=StringIO) as stderr_mock, patch( + 'caom2utils.data_util.StorageClientWrapper' + ): + sys.argv = [ + "fits2caom2", + "--observation", + "test_collection_id", + "test_observation_id", + "ad:CGPS/CGPS_MA1_HI_line_image.fits", + "--resource-id", + "ivo://cadc.nrc.ca/uvic/minoc", + ] with pytest.raises(MyExitError): main_app() # inconsistencies between Python 3.7 and later versions. @@ -660,11 +624,12 @@ def test_help(): """ -EXPECTED_OBS_XML = """ - + collection MA1_DRAO-ST @@ -717,6 +682,7 @@ def test_help(): """ +) def test_augment_observation(): @@ -725,12 +691,9 @@ def test_augment_observation(): test_obs_blueprint.set('Observation.target.standard', False) test_obs_blueprint.set('Observation.telescope.name', 'DRAO-ST') test_obs_blueprint.set('Observation.instrument.name', 'DRAO-ST') - test_obs_blueprint.set('Observation.telescope.geoLocationX', - '-2100330.87517') - test_obs_blueprint.set('Observation.telescope.geoLocationY', - '-3694247.82445') - test_obs_blueprint.set('Observation.telescope.geoLocationZ', - '4741018.33097') + test_obs_blueprint.set('Observation.telescope.geoLocationX', '-2100330.87517') + test_obs_blueprint.set('Observation.telescope.geoLocationY', '-3694247.82445') + test_obs_blueprint.set('Observation.telescope.geoLocationZ', '4741018.33097') test_obs_blueprint.set('Plane.dataProductType', 'cube') test_obs_blueprint.set('Artifact.productType', 'info') @@ -738,10 +701,8 @@ def test_augment_observation(): test_obs_blueprint.set('Plane.calibrationLevel', '2') test_fitsparser = FitsParser(sample_file_4axes_obs, test_obs_blueprint) test_fitsparser.blueprint = test_obs_blueprint - test_obs = SimpleObservation('collection', 'MA1_DRAO-ST', - Algorithm('exposure')) - test_fitsparser.augment_observation(test_obs, sample_file_4axes_uri, - product_id='HI-line') + test_obs = SimpleObservation('collection', 'MA1_DRAO-ST', Algorithm('exposure')) + test_fitsparser.augment_observation(test_obs, sample_file_4axes_uri, product_id='HI-line') assert test_obs is not None assert test_obs.planes is not None assert len(test_obs.planes) == 1 @@ -755,8 +716,7 @@ def test_augment_observation(): # results in xml output test_part.chunks.pop() output = BytesIO() - ow = ObservationWriter(False, False, "caom2", - obs_reader_writer.CAOM23_NAMESPACE) + ow = ObservationWriter(False, False, "caom2", obs_reader_writer.CAOM23_NAMESPACE) ow.write(test_obs, output) result = output.getvalue().decode('UTF-8') output.close() @@ -770,11 +730,9 @@ def test_augment_value_errors(): ob = ObsBlueprint(position_axes=(1, 2)) ob.set('Plane.productID', None) test_parser = BlueprintParser(obs_blueprint=ob) - test_obs = SimpleObservation('collection', 'MA1_DRAO-ST', - Algorithm('exposure')) + test_obs = SimpleObservation('collection', 'MA1_DRAO-ST', Algorithm('exposure')) with pytest.raises(ValueError): - test_parser.augment_observation( - test_obs, 'cadc:TEST/abc.fits.gz', product_id=None) + test_parser.augment_observation(test_obs, 'cadc:TEST/abc.fits.gz', product_id=None) with pytest.raises(ValueError): test_parser.augment_plane(test_obs, 'cadc:TEST/abc.fits.gz') @@ -786,8 +744,7 @@ def test_augment_value_errors(): def test_get_from_list(): test_fitsparser = FitsParser(sample_file_4axes) test_fitsparser.blueprint = ObsBlueprint() - result = test_fitsparser._get_from_list('Observation.intent', 0, - ObservationIntentType.SCIENCE) + result = test_fitsparser._get_from_list('Observation.intent', 0, ObservationIntentType.SCIENCE) assert result == ObservationIntentType.SCIENCE @@ -813,151 +770,144 @@ def test_update_fits_headers(): test_parser = FitsParser(src=[hdr1, hdr2, hdr3, hdr4, hdr5, hdr6, hdr7]) test_uri = 'ad:CFHT/1709071g.fits.gz' - update_blueprint(test_blueprint, test_uri, config={}, defaults={}, - overrides={}) - assert test_parser.blueprint._get('Observation.type') == \ - (['OBSTYPE'], None), 'unmodified blueprint' - - test_defaults = {'CTYPE1': 'RA---TAN', - 'CTYPE2': 'DEC--TAN', - 'CTYPE3': 'TIME', - 'CTYPE4': 'WAVE', - 'CDELT4': '1.2', - 'CRVAL4': '32'} - update_blueprint(test_blueprint, test_uri, config={}, - defaults=test_defaults, overrides={}) - assert test_blueprint._get('Chunk.position.axis.axis1.ctype') == \ - (['CTYPE1'], 'RA---TAN'), 'default value assigned' - assert test_blueprint._get('Chunk.position.axis.axis2.ctype') == \ - (['CTYPE2'], 'DEC--TAN'), 'default value assigned' - assert test_blueprint._get('Chunk.time.axis.axis.ctype') == \ - (['CTYPE3'], 'TIME'), 'default value assigned, value all upper case' + update_blueprint(test_blueprint, test_uri, config={}, defaults={}, overrides={}) + assert test_parser.blueprint._get('Observation.type') == (['OBSTYPE'], None), 'unmodified blueprint' + + test_defaults = { + 'CTYPE1': 'RA---TAN', + 'CTYPE2': 'DEC--TAN', + 'CTYPE3': 'TIME', + 'CTYPE4': 'WAVE', + 'CDELT4': '1.2', + 'CRVAL4': '32', + } + update_blueprint(test_blueprint, test_uri, config={}, defaults=test_defaults, overrides={}) + assert test_blueprint._get('Chunk.position.axis.axis1.ctype') == ( + ['CTYPE1'], + 'RA---TAN', + ), 'default value assigned' + assert test_blueprint._get('Chunk.position.axis.axis2.ctype') == ( + ['CTYPE2'], + 'DEC--TAN', + ), 'default value assigned' + assert test_blueprint._get('Chunk.time.axis.axis.ctype') == ( + ['CTYPE3'], + 'TIME', + ), 'default value assigned, value all upper case' # print(test_parser.blueprint) - test_defaults = {'CTYPE1': 'RA--TAN', - 'CTYPE2': 'DEC--TAN', - 'CTYPE3': 'TIME', - 'provenance.producer': 'CFHT', - 'provenance.project': 'STANDARD PIPELINE'} + test_defaults = { + 'CTYPE1': 'RA--TAN', + 'CTYPE2': 'DEC--TAN', + 'CTYPE3': 'TIME', + 'provenance.producer': 'CFHT', + 'provenance.project': 'STANDARD PIPELINE', + } test_config = load_config(java_config_file) test_overrides = load_config(override_file) - update_blueprint(test_blueprint, test_uri, test_config, - test_defaults, test_overrides) - assert test_blueprint._get('Plane.dataProductType') == \ - ([], DataProductType.IMAGE), 'default value assigned to configuration' - assert test_blueprint._get('Plane.provenance.producer') == \ - (['ORIGIN'], 'CFHT'), \ - 'default value assigned to configuration, all upper-case' - assert test_blueprint._get('Plane.provenance.project') == \ - (['ADC_ARCH'], 'STANDARD PIPELINE'), \ - 'default value assigned to configuration, with white-space' - assert test_blueprint._get('Observation.type') == 'OBJECT', \ - 'default value over-ridden, value all upper case' - assert test_blueprint._get( - 'Chunk.position.axis.function.refCoord.coord1.val', - 0) == '210.551666667', 'override HDU 0' - assert test_blueprint._get( - 'Chunk.position.axis.function.refCoord.coord1.val', 1) == \ - '210.551666667', 'override HDU 1' - assert test_blueprint._get( - 'Chunk.position.axis.function.refCoord.coord1.val', - 2) == '210.508333333', 'override HDU 2' - assert test_blueprint._get( - 'Chunk.position.axis.function.refCoord.coord1.val', - 3) == '210.898333333', 'override HDU 3' - assert test_blueprint._get( - 'Chunk.position.axis.function.refCoord.coord1.val', - 4) == '210.942083333', 'override HDU 4' - assert test_blueprint._get( - 'Chunk.position.axis.function.refCoord.coord1.val', - 5) == '0.000000000', 'override HDU 5' + update_blueprint(test_blueprint, test_uri, test_config, test_defaults, test_overrides) + assert test_blueprint._get('Plane.dataProductType') == ( + [], + DataProductType.IMAGE, + ), 'default value assigned to configuration' + assert test_blueprint._get('Plane.provenance.producer') == ( + ['ORIGIN'], + 'CFHT', + ), 'default value assigned to configuration, all upper-case' + assert test_blueprint._get('Plane.provenance.project') == ( + ['ADC_ARCH'], + 'STANDARD PIPELINE', + ), 'default value assigned to configuration, with white-space' + assert test_blueprint._get('Observation.type') == 'OBJECT', 'default value over-ridden, value all upper case' + assert ( + test_blueprint._get('Chunk.position.axis.function.refCoord.coord1.val', 0) == '210.551666667' + ), 'override HDU 0' + assert ( + test_blueprint._get('Chunk.position.axis.function.refCoord.coord1.val', 1) == '210.551666667' + ), 'override HDU 1' + assert ( + test_blueprint._get('Chunk.position.axis.function.refCoord.coord1.val', 2) == '210.508333333' + ), 'override HDU 2' + assert ( + test_blueprint._get('Chunk.position.axis.function.refCoord.coord1.val', 3) == '210.898333333' + ), 'override HDU 3' + assert ( + test_blueprint._get('Chunk.position.axis.function.refCoord.coord1.val', 4) == '210.942083333' + ), 'override HDU 4' + assert ( + test_blueprint._get('Chunk.position.axis.function.refCoord.coord1.val', 5) == '0.000000000' + ), 'override HDU 5' test_parser.blueprint = test_blueprint - assert test_parser._headers[0][ - 'CRVAL1'] == 210.551666667, 'override HDU 0' - assert test_parser._headers[1][ - 'CRVAL1'] == 210.551666667, 'override HDU 1' - assert test_parser._headers[2][ - 'CRVAL1'] == 210.508333333, 'override HDU 2' - assert test_parser._headers[3][ - 'CRVAL1'] == 210.898333333, 'override HDU 3' - assert test_parser._headers[4][ - 'CRVAL1'] == 210.942083333, 'override HDU 4' + assert test_parser._headers[0]['CRVAL1'] == 210.551666667, 'override HDU 0' + assert test_parser._headers[1]['CRVAL1'] == 210.551666667, 'override HDU 1' + assert test_parser._headers[2]['CRVAL1'] == 210.508333333, 'override HDU 2' + assert test_parser._headers[3]['CRVAL1'] == 210.898333333, 'override HDU 3' + assert test_parser._headers[4]['CRVAL1'] == 210.942083333, 'override HDU 4' assert test_parser._headers[5]['CRVAL1'] == 0.000000000, 'override HDU 5' - assert test_parser._headers[0][ - 'CRVAL3'] == 56789.429806900000, 'override HDU 0' + assert test_parser._headers[0]['CRVAL3'] == 56789.429806900000, 'override HDU 0' # this will fail because of DerivedObservation.members errors assert len(test_parser._errors) == 0, test_parser._errors -TEST_OVERRIDES = \ - {'obs.sequenceNumber': '1709071', - 'obs.intent': 'science', - 'obs.type': 'OBJECT', - 'target.standard': 'false', - 'proposal.project': '', - 'proposal.pi': 'Jean-Gabriel Cuby', - 'proposal.title': '', - 'plane.calibrationLevel': '1', - 'plane.dataRelease': '2015-02-01T00:00:00', - 'obs.metaRelease': '2014-05-12T10:18:55', - 'plane.metaRelease': '2014-05-12T10:18:55', - 'filtername': 'H.WC8201', - 'CRVAL4': '16310.000', - 'CDELT4': '2890.000', - 'resolvingPower': '5.64', - 'artifacts': { - 'ad:CFHT/1709071o.fits.fz': { - 0: {'artifact.productType': 'science', - 'artifact.contentChecksum': - 'md5:88bfd03471053a916067a4e6f80d332d', - 'CRPIX3': '0.500000000000', - 'CRVAL3': '56789.429806900000', - 'CDELT3': '0.000173611111', - 'time.resolution': '15.000000000000', - 'time.exposure': '15.000000000000', - 'NAXIS3': '3'}}, - 'ad:CFHT/1709071g.fits.gz': { - 0: {'artifact.productType': 'auxiliary', - 'artifact.contentChecksum': - 'md5:47cdd15371f82893ed384dec96240ae2', - 'CD1_1': '-0.000083333333', - 'CD1_2': '0.000000000000', - 'CD2_1': '0.000000000000', - 'CD2_2': '0.000083333333', - 'CRPIX3': '0.500000000000', - 'CRVAL3': '56789.429806900000', - 'CDELT3': '0.000000231481', - 'time.resolution': '0.020000000000', - 'time.exposure': '0.020000000000', - 'NAXIS3': '1964', - 'CRPIX1': '7.00000000', - 'CRPIX2': '7.00000000', - 'CRVAL1': '210.551666667', - 'CRVAL2': '54.526222222'}, - 1: {'CRPIX1': '7.00000000', - 'CRPIX2': '7.00000000', - 'CRVAL1': '210.551666667', - 'CRVAL2': '54.526222222'}, - 2: {'CRPIX1': '7.00000000', - 'CRPIX2': '7.00000000', - 'CRVAL1': '210.508333333', - 'CRVAL2': '54.345555556'}, - 3: {'CRPIX1': '7.00000000', - 'CRPIX2': '7.00000000', - 'CRVAL1': '210.898333333', - 'CRVAL2': '54.341916667'}, - 4: {'CRPIX1': '7.00000000', - 'CRPIX2': '7.00000000', - 'CRVAL1': '210.942083333', - 'CRVAL2': '54.446805556'}, - 5: {'CRPIX1': '7.00000000', - 'CRPIX2': '7.00000000', - 'CRVAL1': '0.000000000', - 'CRVAL2': '0.000000000'}, - 6: {'BITPIX': '0'}} - }} +TEST_OVERRIDES = { + 'obs.sequenceNumber': '1709071', + 'obs.intent': 'science', + 'obs.type': 'OBJECT', + 'target.standard': 'false', + 'proposal.project': '', + 'proposal.pi': 'Jean-Gabriel Cuby', + 'proposal.title': '', + 'plane.calibrationLevel': '1', + 'plane.dataRelease': '2015-02-01T00:00:00', + 'obs.metaRelease': '2014-05-12T10:18:55', + 'plane.metaRelease': '2014-05-12T10:18:55', + 'filtername': 'H.WC8201', + 'CRVAL4': '16310.000', + 'CDELT4': '2890.000', + 'resolvingPower': '5.64', + 'artifacts': { + 'ad:CFHT/1709071o.fits.fz': { + 0: { + 'artifact.productType': 'science', + 'artifact.contentChecksum': 'md5:88bfd03471053a916067a4e6f80d332d', + 'CRPIX3': '0.500000000000', + 'CRVAL3': '56789.429806900000', + 'CDELT3': '0.000173611111', + 'time.resolution': '15.000000000000', + 'time.exposure': '15.000000000000', + 'NAXIS3': '3', + } + }, + 'ad:CFHT/1709071g.fits.gz': { + 0: { + 'artifact.productType': 'auxiliary', + 'artifact.contentChecksum': 'md5:47cdd15371f82893ed384dec96240ae2', + 'CD1_1': '-0.000083333333', + 'CD1_2': '0.000000000000', + 'CD2_1': '0.000000000000', + 'CD2_2': '0.000083333333', + 'CRPIX3': '0.500000000000', + 'CRVAL3': '56789.429806900000', + 'CDELT3': '0.000000231481', + 'time.resolution': '0.020000000000', + 'time.exposure': '0.020000000000', + 'NAXIS3': '1964', + 'CRPIX1': '7.00000000', + 'CRPIX2': '7.00000000', + 'CRVAL1': '210.551666667', + 'CRVAL2': '54.526222222', + }, + 1: {'CRPIX1': '7.00000000', 'CRPIX2': '7.00000000', 'CRVAL1': '210.551666667', 'CRVAL2': '54.526222222'}, + 2: {'CRPIX1': '7.00000000', 'CRPIX2': '7.00000000', 'CRVAL1': '210.508333333', 'CRVAL2': '54.345555556'}, + 3: {'CRPIX1': '7.00000000', 'CRPIX2': '7.00000000', 'CRVAL1': '210.898333333', 'CRVAL2': '54.341916667'}, + 4: {'CRPIX1': '7.00000000', 'CRPIX2': '7.00000000', 'CRVAL1': '210.942083333', 'CRVAL2': '54.446805556'}, + 5: {'CRPIX1': '7.00000000', 'CRPIX2': '7.00000000', 'CRVAL1': '0.000000000', 'CRVAL2': '0.000000000'}, + 6: {'BITPIX': '0'}, + }, + }, +} def test_load_config_overrides(): @@ -974,20 +924,20 @@ def test_chunk_naxis(): test_defaults = {'CTYPE3': 'TIME'} test_config = {'Chunk.naxis': 'chunk.naxis'} test_overrides = {'chunk.naxis': '1'} - update_blueprint(test_blueprint, test_uri, config=test_config, - defaults=test_defaults, overrides=test_overrides) + update_blueprint(test_blueprint, test_uri, config=test_config, defaults=test_defaults, overrides=test_overrides) assert test_blueprint._get('Chunk.naxis') == '1', 'default value assigned' FitsParser([hdr1], test_blueprint) assert hdr1['NAXIS'] == 1 assert hdr1['ZNAXIS'] == 1 -EXPECTED_FILE_SCHEME_XML = """ - +EXPECTED_FILE_SCHEME_XML = ( + """ + test_collection_id test_observation_id 1999-01-01T00:00:00.000 @@ -1008,7 +958,9 @@ def test_chunk_naxis(): 2 - file://""" + sample_file_4axes + """ + file://""" + + sample_file_4axes + + """ science data application/fits @@ -1064,23 +1016,31 @@ def test_chunk_naxis(): """ +) def test_file_scheme_uris(): - """ Tests that local files as URIs will be accepted and processed.""" + """Tests that local files as URIs will be accepted and processed.""" fname = f'file://{sample_file_4axes}' - with patch('sys.stdout', new_callable=BytesIO) as stdout_mock, \ - patch('caom2utils.data_util.StorageInventoryClient', - autospec=True), \ - patch('cadcutils.net.ws.WsCapabilities.get_access_url', - autospec=True) as cap_mock: + with patch('sys.stdout', new_callable=BytesIO) as stdout_mock, patch( + 'caom2utils.data_util.StorageInventoryClient', autospec=True + ), patch('cadcutils.net.ws.WsCapabilities.get_access_url', autospec=True) as cap_mock: cap_mock.return_value = 'https://localhost' - sys.argv = ['fits2caom2', '--observation', 'test_collection_id', - 'test_observation_id', '--productID', 'test_product_id', - '--no_validate', - '--config', java_config_file, '--override', test_override, - fname] + sys.argv = [ + 'fits2caom2', + '--observation', + 'test_collection_id', + 'test_observation_id', + '--productID', + 'test_product_id', + '--no_validate', + '--config', + java_config_file, + '--override', + test_override, + fname, + ] main_app() if stdout_mock.getvalue(): expected = _get_obs(EXPECTED_FILE_SCHEME_XML) @@ -1090,19 +1050,19 @@ def test_file_scheme_uris(): def _get_obs(from_xml_string): - etree.parse = Mock(return_value=etree.ElementTree( - etree.fromstring(from_xml_string.encode('ascii')))) + etree.parse = Mock(return_value=etree.ElementTree(etree.fromstring(from_xml_string.encode('ascii')))) obsr = obs_reader_writer.ObservationReader() obs = obsr.read(None) return obs -EXPECTED_GENERIC_PARSER_FILE_SCHEME_XML = """ - +EXPECTED_GENERIC_PARSER_FILE_SCHEME_XML = ( + """ + test_collection_id test_observation_id @@ -1120,30 +1080,41 @@ def _get_obs(from_xml_string): text/plain 2486 md5:e6c08f3b8309f05a5a3330e27e3b44eb - file://""" + text_file + """ + file://""" + + text_file + + """ """ +) def test_generic_parser(): - """ Tests that BlueprintParser will be created.""" + """Tests that BlueprintParser will be created.""" fname = f'file://{text_file}' - with patch('sys.stdout', new_callable=BytesIO) as stdout_mock, \ - patch('caom2utils.data_util.StorageInventoryClient', - autospec=True), \ - patch('cadcutils.net.ws.WsCapabilities.get_access_url', - autospec=True) as cap_mock: + with patch('sys.stdout', new_callable=BytesIO) as stdout_mock, patch( + 'caom2utils.data_util.StorageInventoryClient', autospec=True + ), patch('cadcutils.net.ws.WsCapabilities.get_access_url', autospec=True) as cap_mock: cap_mock.return_value = 'https://localhost' - sys.argv = ['fits2caom2', '--local', fname, - '--observation', 'test_collection_id', - 'test_observation_id', '--productID', 'test_product_id', - '--config', java_config_file, '--override', text_override, - fname] + sys.argv = [ + 'fits2caom2', + '--local', + fname, + '--observation', + 'test_collection_id', + 'test_observation_id', + '--productID', + 'test_product_id', + '--config', + java_config_file, + '--override', + text_override, + fname, + ] main_app() if stdout_mock.getvalue(): expected = _get_obs(EXPECTED_GENERIC_PARSER_FILE_SCHEME_XML) @@ -1161,13 +1132,10 @@ def test_visit(): with pytest.raises(ImportError): _load_plugin(non_conformant_plugin_module) - test_fitsparser = FitsParser(sample_file_4axes, - ObsBlueprint(polarization_axis=1)) + test_fitsparser = FitsParser(sample_file_4axes, ObsBlueprint(polarization_axis=1)) kwargs = {} - _visit(test_plugin_module, test_fitsparser, test_obs, visit_local=None, - **kwargs) - _visit(test_class_plugin_module, test_fitsparser, test_obs, - visit_local=None, **kwargs) + _visit(test_plugin_module, test_fitsparser, test_obs, visit_local=None, **kwargs) + _visit(test_class_plugin_module, test_fitsparser, test_obs, visit_local=None, **kwargs) EXPECTED_CUSTOM_RANGE_BOUNDS_XML = ''' @@ -1300,7 +1268,8 @@ def test_visit(): def test_augment_artifact_bounds_range_from_blueprint(): test_blueprint = ObsBlueprint( - energy_axis=1, time_axis=2, polarization_axis=3, position_axes=(4, 5), custom_axis=6) + energy_axis=1, time_axis=2, polarization_axis=3, position_axes=(4, 5), custom_axis=6 + ) test_blueprint.set('Chunk.custom.axis.range.start.pix', '145.0') test_blueprint.set('Chunk.custom.axis.range.start.val', '-60000.0') test_blueprint.set('Chunk.custom.axis.range.end.pix', '-824.46002') @@ -1318,26 +1287,20 @@ def test_augment_artifact_bounds_range_from_blueprint(): test_blueprint.set('Chunk.polarization.axis.range.end.pix', '-824.46002') test_blueprint.set('Chunk.polarization.axis.range.end.val', '1') test_blueprint.set('Chunk.position.axis.range.start.coord1.pix', '145.0') - test_blueprint.set( - 'Chunk.position.axis.range.start.coord1.val', '-60000.0') - test_blueprint.set( - 'Chunk.position.axis.range.end.coord1.pix', '-824.46002') + test_blueprint.set('Chunk.position.axis.range.start.coord1.val', '-60000.0') + test_blueprint.set('Chunk.position.axis.range.end.coord1.pix', '-824.46002') test_blueprint.set('Chunk.position.axis.range.end.coord1.val', '1') test_blueprint.set('Chunk.position.axis.range.start.coord2.pix', '145.0') - test_blueprint.set( - 'Chunk.position.axis.range.start.coord2.val', '-60000.0') - test_blueprint.set( - 'Chunk.position.axis.range.end.coord2.pix', '-824.46002') + test_blueprint.set('Chunk.position.axis.range.start.coord2.val', '-60000.0') + test_blueprint.set('Chunk.position.axis.range.end.coord2.pix', '-824.46002') test_blueprint.set('Chunk.position.axis.range.end.coord2.val', '1') - test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, - uri='ad:TEST/test_blueprint') + test_fitsparser = FitsParser(sample_file_4axes, test_blueprint, uri='ad:TEST/test_blueprint') test_chunk = Chunk() test_chunk.custom = CustomWCS(CoordAxis1D(Axis('RM', 'm / s ** 2'))) test_chunk.energy = SpectralWCS(CoordAxis1D(Axis('WAVE', 'm')), 'TOPOCENT') test_chunk.time = TemporalWCS(CoordAxis1D(Axis('TIME', 'd'))) test_chunk.polarization = PolarizationWCS(CoordAxis1D(Axis('STOKES'))) - test_chunk.position = SpatialWCS(CoordAxis2D(Axis('RA', 'deg'), - Axis('DEC', 'deg'))) + test_chunk.position = SpatialWCS(CoordAxis2D(Axis('RA', 'deg'), Axis('DEC', 'deg'))) test_fitsparser._try_position_with_blueprint(test_chunk, 0) test_fitsparser._try_energy_with_blueprint(test_chunk, 0) test_fitsparser._try_time_with_blueprint(test_chunk, 0) @@ -1345,50 +1308,32 @@ def test_augment_artifact_bounds_range_from_blueprint(): test_fitsparser._try_observable_with_blueprint(test_chunk, 0) test_fitsparser._try_custom_with_blueprint(test_chunk, 0) - assert test_chunk.energy.axis.range is not None, \ - 'chunk.energy.axis.range should be declared' - assert test_chunk.time.axis.range is not None, \ - 'chunk.time.axis.range should be declared' - assert test_chunk.polarization.axis.range is not None, \ - 'chunk.polarization.axis.range should be declared' - assert test_chunk.position.axis.range is not None, \ - 'chunk.position.axis.range should be declared' + assert test_chunk.energy.axis.range is not None, 'chunk.energy.axis.range should be declared' + assert test_chunk.time.axis.range is not None, 'chunk.time.axis.range should be declared' + assert test_chunk.polarization.axis.range is not None, 'chunk.polarization.axis.range should be declared' + assert test_chunk.position.axis.range is not None, 'chunk.position.axis.range should be declared' assert test_chunk.custom.axis.range is not None, 'chunk.custom.axis.range should be declared' - ex = _get_from_str_xml(EXPECTED_ENERGY_RANGE_BOUNDS_XML, - ObservationReader()._get_spectral_wcs, - 'energy') - assert ex is not None, \ - 'energy string from expected output should be declared' + ex = _get_from_str_xml(EXPECTED_ENERGY_RANGE_BOUNDS_XML, ObservationReader()._get_spectral_wcs, 'energy') + assert ex is not None, 'energy string from expected output should be declared' result = get_differences(ex, test_chunk.energy) assert result is None, f'energy\n{result}' - ex = _get_from_str_xml(EXPECTED_TIME_RANGE_BOUNDS_XML, - ObservationReader()._get_temporal_wcs, - 'time') - assert ex is not None, \ - 'time string from expected output should be declared' + ex = _get_from_str_xml(EXPECTED_TIME_RANGE_BOUNDS_XML, ObservationReader()._get_temporal_wcs, 'time') + assert ex is not None, 'time string from expected output should be declared' result = get_differences(ex, test_chunk.time) assert result is None, f'time\n{result}' - ex = _get_from_str_xml(EXPECTED_POL_RANGE_BOUNDS_XML, - ObservationReader()._get_polarization_wcs, - 'polarization') - assert ex is not None, \ - 'polarization string from expected output should be declared' + ex = _get_from_str_xml(EXPECTED_POL_RANGE_BOUNDS_XML, ObservationReader()._get_polarization_wcs, 'polarization') + assert ex is not None, 'polarization string from expected output should be declared' result = get_differences(ex, test_chunk.polarization) assert result is None, f'polarization\n{result}' - ex = _get_from_str_xml(EXPECTED_POS_RANGE_BOUNDS_XML, - ObservationReader()._get_spatial_wcs, - 'position') - assert ex is not None, \ - 'position string from expected output should be declared' + ex = _get_from_str_xml(EXPECTED_POS_RANGE_BOUNDS_XML, ObservationReader()._get_spatial_wcs, 'position') + assert ex is not None, 'position string from expected output should be declared' result = get_differences(ex, test_chunk.position) assert result is None, f'position\n{result}' - ex = _get_from_str_xml(EXPECTED_CUSTOM_RANGE_BOUNDS_XML, - ObservationReader()._get_custom_wcs, - 'custom') + ex = _get_from_str_xml(EXPECTED_CUSTOM_RANGE_BOUNDS_XML, ObservationReader()._get_custom_wcs, 'custom') assert ex is not None, 'custom string from expected output should be declared' result = get_differences(ex, test_chunk.custom) assert result is None, f'custom\n{result}' @@ -1396,14 +1341,13 @@ def test_augment_artifact_bounds_range_from_blueprint(): def test_visit_generic_parser(): try: - sys.argv = ['fits2caom2', '--local', 'fname', '--observation', - 'test_collection_id', 'test_observation_id'] + sys.argv = ['fits2caom2', '--local', 'fname', '--observation', 'test_collection_id', 'test_observation_id'] test_parser = BlueprintParser() test_plugin = __name__ kwargs = {} - test_obs = SimpleObservation(collection='test_collection', - observation_id='test_obs_id', - algorithm=Algorithm('exposure')) + test_obs = SimpleObservation( + collection='test_collection', observation_id='test_obs_id', algorithm=Algorithm('exposure') + ) _visit(test_plugin, test_parser, test_obs, visit_local=None, **kwargs) except ImportError: pass # expect this exception @@ -1413,13 +1357,11 @@ def test_visit_generic_parser(): @patch('caom2utils.caom2blueprint.Client') def test_get_vos_headers(vos_mock): - test_uri = 'vos://cadc.nrc.ca!vospace/CAOMworkshop/Examples/DAO/' \ - 'dao_c122_2016_012725.fits' + test_uri = 'vos://cadc.nrc.ca!vospace/CAOMworkshop/Examples/DAO/' 'dao_c122_2016_012725.fits' get_orig = caom2utils.data_util.get_local_file_headers try: - caom2utils.data_util.get_local_file_headers = Mock( - side_effect=_get_local_headers) + caom2utils.data_util.get_local_file_headers = Mock(side_effect=_get_local_headers) test_headers = caom2utils.get_vos_headers(test_uri, subject=None) assert test_headers is not None, 'expect result' assert len(test_headers) == 1, 'wrong size of result' @@ -1434,21 +1376,20 @@ def test_get_vos_meta(vos_mock): get_orig = caom2utils.get_vos_headers try: caom2utils.get_vos_headers = Mock( - return_value={'md5sum': '5b00b00d4b06aba986c3663d09aa581f', - 'size': 682560, - 'type': 'application/octet-stream'}) + return_value={ + 'md5sum': '5b00b00d4b06aba986c3663d09aa581f', + 'size': 682560, + 'type': 'application/octet-stream', + } + ) vos_mock.return_value.get_node.side_effect = _get_node - test_uri = 'vos://cadc.nrc.ca!vospace/CAOMworkshop/Examples/DAO/' \ - 'dao_c122_2016_012725.fits' - test_artifact = Artifact(test_uri, ProductType.SCIENCE, - ReleaseType.DATA) + test_uri = 'vos://cadc.nrc.ca!vospace/CAOMworkshop/Examples/DAO/' 'dao_c122_2016_012725.fits' + test_artifact = Artifact(test_uri, ProductType.SCIENCE, ReleaseType.DATA) _get_and_update_artifact_meta(test_uri, test_artifact, subject=None) assert test_artifact is not None - assert test_artifact.content_checksum.uri == \ - 'md5:5b00b00d4b06aba986c3663d09aa581f', 'checksum wrong' + assert test_artifact.content_checksum.uri == 'md5:5b00b00d4b06aba986c3663d09aa581f', 'checksum wrong' assert test_artifact.content_length == 682560, 'length wrong' - assert test_artifact.content_type == 'application/fits', \ - 'content_type wrong' + assert test_artifact.content_type == 'application/fits', 'content_type wrong' assert vos_mock.called, 'mock not called' finally: caom2utils.get_vos_headers = get_orig @@ -1461,11 +1402,9 @@ def test_generic_parser1(): test_blueprint.set(test_key, '2013-10-10') logging.error(test_blueprint) test_parser = BlueprintParser() - assert test_parser._blueprint._plan[test_key] == \ - (['RELEASE', 'REL_DATE'], None), 'default value changed' + assert test_parser._blueprint._plan[test_key] == (['RELEASE', 'REL_DATE'], None), 'default value changed' test_parser.blueprint = test_blueprint - assert test_parser._blueprint._plan[test_key] == test_value, \ - 'original value over-ridden' + assert test_parser._blueprint._plan[test_key] == test_value, 'original value over-ridden' def test_get_external_headers(): @@ -1489,9 +1428,9 @@ def test_get_external_headers_fails(get_external_mock): test_uri = f'gemini:{test_collection}/abc.fits' test_product_id = 'TEST_PRODUCT_ID' test_blueprint = caom2utils.caom2blueprint.ObsBlueprint() - test_observation = SimpleObservation(collection=test_collection, - observation_id=test_obs_id, - algorithm=Algorithm(name='exposure')) + test_observation = SimpleObservation( + collection=test_collection, observation_id=test_obs_id, algorithm=Algorithm(name='exposure') + ) test_result = caom2utils.caom2blueprint._augment( obs=test_observation, product_id=test_product_id, @@ -1517,26 +1456,18 @@ def test_apply_blueprint(): hdr1 = fits.Header() hdr1['ORIGIN'] = '123' test_blueprint = ObsBlueprint() - assert test_blueprint._get('Plane.provenance.producer') == (['ORIGIN'], - None) + assert test_blueprint._get('Plane.provenance.producer') == (['ORIGIN'], None) test_blueprint.set_default('Plane.provenance.producer', 'abc') - assert test_blueprint._get('Plane.provenance.producer') == (['ORIGIN'], - 'abc') + assert test_blueprint._get('Plane.provenance.producer') == (['ORIGIN'], 'abc') test_blueprint.add_attribute('Plane.provenance.producer', 'IMAGESWV') - assert test_blueprint._get('Plane.provenance.producer') == (['IMAGESWV', - 'ORIGIN'], - 'abc') + assert test_blueprint._get('Plane.provenance.producer') == (['IMAGESWV', 'ORIGIN'], 'abc') test_parser = FitsParser(src=[hdr1], obs_blueprint=test_blueprint) - assert test_blueprint._get('Plane.provenance.producer') == (['IMAGESWV', - 'ORIGIN'], - 'abc') + assert test_blueprint._get('Plane.provenance.producer') == (['IMAGESWV', 'ORIGIN'], 'abc') assert test_parser._headers[0]['ORIGIN'] == '123', 'existing over-ridden' with pytest.raises(KeyError): test_parser._headers[0]['IMAGESWV'], 'should not be set' - assert test_blueprint._get('Plane.provenance.producer') == (['IMAGESWV', - 'ORIGIN'], - 'abc') + assert test_blueprint._get('Plane.provenance.producer') == (['IMAGESWV', 'ORIGIN'], 'abc') hdr1 = fits.Header() test_parser = FitsParser(src=[hdr1], obs_blueprint=test_blueprint) assert test_parser._headers[0]['ORIGIN'] == 'abc', 'should be set' @@ -1594,13 +1525,11 @@ def get_time_exposure(self, ext): test_blueprint.set('Artifact.releaseType', 'data') test_blueprint.set('Chunk.time.exposure', 'get_time_exposure()', 1) test_parser = FitsParser(src=[hdr1, hdr2], obs_blueprint=test_blueprint) - test_obs = SimpleObservation('collection', 'MA1_DRAO-ST', - Algorithm('exposure')) + test_obs = SimpleObservation('collection', 'MA1_DRAO-ST', Algorithm('exposure')) test_parser.augment_observation(test_obs, 'cadc:TEST/test_file_name.fits') assert 'PRODUCT_ID' in test_obs.planes.keys(), 'expect plane' test_plane = test_obs.planes['PRODUCT_ID'] - assert 'cadc:TEST/test_file_name.fits' in test_plane.artifacts.keys(), \ - 'expect artifact' + assert 'cadc:TEST/test_file_name.fits' in test_plane.artifacts.keys(), 'expect artifact' test_artifact = test_plane.artifacts.pop('cadc:TEST/test_file_name.fits') test_part = test_artifact.parts.pop('1') assert len(test_part.chunks) == 1, 'expect chunks' @@ -1613,8 +1542,7 @@ def get_time_exposure(self, ext): test_blueprint2.set('Plane.calibrationLevel', 'getCalibrationLevel()') test_blueprint2.set('Plane.dataProductType', 'broken_function()') test_parser2 = BlueprintParser(obs_blueprint=test_blueprint2) - test_obs2 = SimpleObservation('collection', 'MA1_DRAO-ST', - Algorithm('exposure')) + test_obs2 = SimpleObservation('collection', 'MA1_DRAO-ST', Algorithm('exposure')) with pytest.raises(ValueError): test_parser2.augment_observation(test_obs2, 'cadc:TEST/abc.fits.gz') @@ -1622,37 +1550,29 @@ def get_time_exposure(self, ext): def test_apply_blueprint_execute_external(): test_module = importlib.import_module(__name__) test_generic_blueprint = ObsBlueprint(module=test_module) - test_generic_blueprint.set( - 'Observation.type', '_get_test_obs_type(parameters)') + test_generic_blueprint.set('Observation.type', '_get_test_obs_type(parameters)') # generic parser - function execution should have occurred, the return # value is dependent on the parameters to the call test_gp = BlueprintParser(test_generic_blueprint) assert test_gp is not None, 'expect generic construction to complete' - assert test_gp._get_from_list('Observation.type', index=0) \ - == 'generic_parser_value', 'wrong generic plan value' + assert test_gp._get_from_list('Observation.type', index=0) == 'generic_parser_value', 'wrong generic plan value' # fits parser test_fits_blueprint = ObsBlueprint(module=test_module) - test_fits_blueprint.set( - 'Observation.type', '_get_test_obs_type(parameters)') - test_fits_parser = FitsParser(src=sample_file_4axes, - obs_blueprint=test_fits_blueprint) - assert test_fits_parser is not None, \ - 'expect fits construction to complete' - assert test_fits_parser._get_from_list('Observation.type', index=0) \ - == 'fits_parser_value', 'wrong fits plan value' + test_fits_blueprint.set('Observation.type', '_get_test_obs_type(parameters)') + test_fits_parser = FitsParser(src=sample_file_4axes, obs_blueprint=test_fits_blueprint) + assert test_fits_parser is not None, 'expect fits construction to complete' + assert ( + test_fits_parser._get_from_list('Observation.type', index=0) == 'fits_parser_value' + ), 'wrong fits plan value' def test_update_artifact_meta_errors(): test_uri = 'gemini:GEMINI/abc.jpg' - test_artifact = Artifact(uri=test_uri, - product_type=ProductType.SCIENCE, - release_type=ReleaseType.DATA) + test_artifact = Artifact(uri=test_uri, product_type=ProductType.SCIENCE, release_type=ReleaseType.DATA) client_mock = Mock(autospec=True) - client_mock.info.return_value = \ - FileInfo(id=test_uri, file_type='application/octet', size=42, - md5sum='md5:42') + client_mock.info.return_value = FileInfo(id=test_uri, file_type='application/octet', size=42, md5sum='md5:42') test_uri = 'gemini://test.fits' _get_and_update_artifact_meta(test_uri, test_artifact, client=client_mock) assert test_artifact.content_checksum is None, 'checksum' @@ -1661,19 +1581,15 @@ def test_update_artifact_meta_errors(): test_uri = 'gemini:GEMINI/abc.jpg' _get_and_update_artifact_meta(test_uri, test_artifact, client=client_mock) - assert test_artifact.content_checksum == ChecksumURI(uri='md5:42'), \ - 'checksum' + assert test_artifact.content_checksum == ChecksumURI(uri='md5:42'), 'checksum' assert test_artifact.content_length == 42, 'length' assert test_artifact.content_type == 'application/octet', 'type' # TODO - does this increase coverage? test_uri = 'file:///test.fits.header' - test_artifact = Artifact(uri=test_uri, - product_type=ProductType.SCIENCE, - release_type=ReleaseType.DATA) + test_artifact = Artifact(uri=test_uri, product_type=ProductType.SCIENCE, release_type=ReleaseType.DATA) client_mock.info.return_value = None - _get_and_update_artifact_meta(test_uri, test_artifact, net.Subject(), - client=client_mock) + _get_and_update_artifact_meta(test_uri, test_artifact, net.Subject(), client=client_mock) assert test_artifact.content_type is None, 'type' assert test_artifact.content_length is None, 'length' assert test_artifact.content_checksum is None, 'checksum' @@ -1684,15 +1600,22 @@ def test_update_artifact_meta_errors(): @patch('sys.stdout', new_callable=StringIO) @patch('caom2utils.caom2blueprint._augment') def test_gen_proc_failure(augment_mock, stdout_mock, cap_mock, client_mock): - """ Tests that gen_proc can return -1.""" + """Tests that gen_proc can return -1.""" augment_mock.return_value = None # return a broken Observation instance fname = f'file://{text_file}' - sys.argv = ['fits2caom2', '--local', fname, - '--observation', 'test_collection_id', - 'test_observation_id', '--lineage', - f'test_product_id/ad:TEST/{fname}', '--resource-id', - 'ivo://cadc.nrc.ca/test'] + sys.argv = [ + 'fits2caom2', + '--local', + fname, + '--observation', + 'test_collection_id', + 'test_observation_id', + '--lineage', + f'test_product_id/ad:TEST/{fname}', + '--resource-id', + 'ivo://cadc.nrc.ca/test', + ] test_args = get_gen_proc_arg_parser().parse_args() test_blueprints = {'test_collection_id': ObsBlueprint()} test_result = gen_proc(test_args, test_blueprints) @@ -1711,22 +1634,48 @@ def test_parser_construction(vos_mock, stdout_mock): test_blueprint.configure_custom_axis(1) test_blueprints = {test_uri: test_blueprint} kwargs = {} - augment(test_blueprints, no_validate=False, dump_config=False, - plugin=None, obs_obs_xml=None, in_obs_xml=None, collection='TEST', - observation='ABC', product_id='test_product_id', uri=test_uri, - netrc=False, file_name=None, verbose=False, debug=False, - quiet=False, **kwargs) - assert 'vos:goliaths/abc.fits.gz' in \ - stdout_mock.getvalue(), 'Artifact URI missing from Observation' + augment( + test_blueprints, + no_validate=False, + dump_config=False, + plugin=None, + obs_obs_xml=None, + in_obs_xml=None, + collection='TEST', + observation='ABC', + product_id='test_product_id', + uri=test_uri, + netrc=False, + file_name=None, + verbose=False, + debug=False, + quiet=False, + **kwargs, + ) + assert ( + 'vos:goliaths/abc.fits.gz' in stdout_mock.getvalue() + ), 'Artifact URI missing from Observation' test_out_fqn = os.path.join(TESTDATA_DIR, 'augment.xml') assert not os.path.exists(test_out_fqn) try: - augment(test_blueprints, no_validate=False, dump_config=False, - plugin=None, out_obs_xml=test_out_fqn, in_obs_xml=None, - collection='TEST', observation='ABC', - product_id='test_product_id', uri=test_uri, netrc=False, - file_name=None, verbose=False, debug=False, quiet=False, - **kwargs) + augment( + test_blueprints, + no_validate=False, + dump_config=False, + plugin=None, + out_obs_xml=test_out_fqn, + in_obs_xml=None, + collection='TEST', + observation='ABC', + product_id='test_product_id', + uri=test_uri, + netrc=False, + file_name=None, + verbose=False, + debug=False, + quiet=False, + **kwargs, + ) assert os.path.exists(test_out_fqn) finally: if os.path.exists(test_out_fqn): @@ -1747,8 +1696,7 @@ def _get_headers(file_name, subject): END """ delim = '\nEND' - extensions = \ - [e + delim for e in x.split(delim) if e.strip()] + extensions = [e + delim for e in x.split(delim) if e.strip()] headers = [fits.Header.fromstring(e, sep='\n') for e in extensions] return headers @@ -1786,8 +1734,7 @@ def _get_headers(file_name, subject): def _get_node(uri, limit, force): node = vos.Node('abc') - node.props = {'MD5': '5b00b00d4b06aba986c3663d09aa581f', - 'length': 682560} + node.props = {'MD5': '5b00b00d4b06aba986c3663d09aa581f', 'length': 682560} return node diff --git a/caom2utils/caom2utils/tests/test_obs_blueprint.py b/caom2utils/caom2utils/tests/test_obs_blueprint.py index bfe05d6c..48497f67 100644 --- a/caom2utils/caom2utils/tests/test_obs_blueprint.py +++ b/caom2utils/caom2utils/tests/test_obs_blueprint.py @@ -87,9 +87,14 @@ def test_obs_blueprint(): print(ObsBlueprint()) # default config with WCS info - assert str(ObsBlueprint(position_axes=(1, 2), energy_axis=3, - polarization_axis=4, time_axis=5, - obs_axis=6, custom_axis=7)).count('\n') == 90 + assert ( + str( + ObsBlueprint( + position_axes=(1, 2), energy_axis=3, polarization_axis=4, time_axis=5, obs_axis=6, custom_axis=7 + ) + ).count('\n') + == 90 + ) ob = ObsBlueprint() ob.configure_position_axes(axes=(1, 2)) @@ -116,13 +121,11 @@ def test_obs_blueprint(): # set default ob.clear('Observation.instrument.keywords') ob.add_attribute('Observation.instrument.keywords', 'INSTMODE') - assert "Observation.instrument.keywords = ['INSTMODE'], default = None" \ - in str(ob) + assert "Observation.instrument.keywords = ['INSTMODE'], default = None" in str(ob) ob.set_default('Observation.instrument.keywords', 'TEST') assert ob._plan['Observation.instrument.keywords'][1] == 'TEST' assert ob._plan['Observation.instrument.keywords'][0] == ['INSTMODE'] - assert "Observation.instrument.keywords = ['INSTMODE'], default = TEST" \ - in str(ob) + assert "Observation.instrument.keywords = ['INSTMODE'], default = TEST" in str(ob) # set fits attribute ob.add_attribute('Observation.proposal.id', 'PROP') @@ -130,27 +133,23 @@ def test_obs_blueprint(): ob.set_default('Observation.proposal.id', 'NOPROP') assert ob._plan['Observation.proposal.id'][0] == ['PROP2', 'PROP', 'RUNID'] assert ob._plan['Observation.proposal.id'][1] == 'NOPROP' - assert ("Observation.proposal.id = ['PROP2', 'PROP', 'RUNID'], " - "default = NOPROP") in str(ob) + assert ("Observation.proposal.id = ['PROP2', 'PROP', 'RUNID'], " "default = NOPROP") in str(ob) # set in extension ob.set('Chunk.energy.velang', 33, extension=1) - extension1_str = str(ob)[str(ob).index('extension 1'):] + extension1_str = str(ob)[str(ob).index('extension 1') :] assert 'Chunk.energy.velang = 33' in extension1_str # set fits attribute in extension - ob.add_attribute('Chunk.energy.axis.axis.ctype', 'MYCTYPE', - extension=1) - ob.add_attribute('Chunk.energy.axis.axis.ctype', 'MYCTYPE2', - extension=1) + ob.add_attribute('Chunk.energy.axis.axis.ctype', 'MYCTYPE', extension=1) + ob.add_attribute('Chunk.energy.axis.axis.ctype', 'MYCTYPE2', extension=1) ob.set_default('Chunk.energy.axis.axis.ctype', 'NOCTYPE', extension=1) - extension1_str = str(ob)[str(ob).index('extension 1'):] - assert ("Chunk.energy.axis.axis.ctype = ['MYCTYPE2', 'MYCTYPE'], " - "default = NOCTYPE") in extension1_str + extension1_str = str(ob)[str(ob).index('extension 1') :] + assert ("Chunk.energy.axis.axis.ctype = ['MYCTYPE2', 'MYCTYPE'], " "default = NOCTYPE") in extension1_str # set in a different extension ob.set('Chunk.energy.velang', 44, extension=2) - extension2_str = str(ob)[str(ob).index('extension 2'):] + extension2_str = str(ob)[str(ob).index('extension 2') :] assert 'Chunk.energy.velang = 44' in extension2_str # test get @@ -159,8 +158,7 @@ def test_obs_blueprint(): assert ob._get('Observation.instrument.keywords')[1] == 'TEST' assert ob._get('Chunk.energy.velang', extension=2) == 44 assert ob._get('Chunk.energy.velang', extension=1) == 33 - assert ob._get('Chunk.energy.axis.axis.ctype', extension=1)[0] ==\ - ['MYCTYPE2', 'MYCTYPE'] + assert ob._get('Chunk.energy.axis.axis.ctype', extension=1)[0] == ['MYCTYPE2', 'MYCTYPE'] assert ob._get('Chunk.energy.axis.axis.ctype', extension=1)[1] == 'NOCTYPE' # test get when keyword not present in extension and the default is used assert ob._get('Chunk.energy.specsys', extension=33)[0] == ['SPECSYS'] @@ -194,7 +192,7 @@ def test_obs_blueprint(): # set defaults in extension ob.set_default('Chunk.energy.axis.axis.ctype', 'NOCTYPE', extension=3) - extension3_str = str(ob)[str(ob).index('extension 3'):] + extension3_str = str(ob)[str(ob).index('extension 3') :] assert "Chunk.energy.axis.axis.ctype = NOCTYPE" in extension3_str assert len(ob._extensions) == 1 @@ -265,26 +263,24 @@ def test_obs_blueprint(): # adding the same thing twice does nothing - the test values are defaults result = ob._get('Observation.metaRelease') - initial_result_length = (len(result[0])) + initial_result_length = len(result[0]) ob.add_attribute('Observation.metaRelease', 'DATE-OBS') result = ob._get('Observation.metaRelease') - add_result_length = (len(result[0])) + add_result_length = len(result[0]) assert initial_result_length == add_result_length # in an extension result = ob._get('Chunk.energy.specsys', extension=1) - initial_result_length = (len(result[0])) + initial_result_length = len(result[0]) ob.add_attribute('Chunk.energy.specsys', 'SPECSYS') result = ob._get('Chunk.energy.specsys', extension=1) - add_result_length = (len(result[0])) + add_result_length = len(result[0]) assert initial_result_length == add_result_length, result def test_load_from_file_configure(): ob = ObsBlueprint() - assert not ob._pos_axes_configed, \ - 'Failure to initialize configure_position_axes' - assert not ob._energy_axis_configed, \ - 'Failure to initialize configure_energy_axis' + assert not ob._pos_axes_configed, 'Failure to initialize configure_position_axes' + assert not ob._energy_axis_configed, 'Failure to initialize configure_energy_axis' assert not ob._custom_axis_configed, 'custom config' assert not ob._obs_axis_configed, 'obs config' assert not ob._polarization_axis_configed, 'pol config' @@ -295,8 +291,7 @@ def test_load_from_file_configure(): ob._guess_axis_info() assert ob._pos_axes_configed, 'Failure to call configure_position_axes' assert ob._energy_axis_configed, 'Failure to call configure_energy_axis' - assert ob._wcs_std['Chunk.energy.axis.axis.ctype'] == 'CTYPE3', \ - ob._wcs_std['Chunk.energy.axis.axis.ctype'] + assert ob._wcs_std['Chunk.energy.axis.axis.ctype'] == 'CTYPE3', ob._wcs_std['Chunk.energy.axis.axis.ctype'] ob = ObsBlueprint() ob.add_attribute('Chunk.position.axis.axis1.ctype', 'CTYPE3') @@ -305,41 +300,39 @@ def test_load_from_file_configure(): ob._guess_axis_info() assert ob._pos_axes_configed, 'Failure to call configure_position_axes' assert ob._energy_axis_configed, 'Failure to call configure_energy_axis' - assert ob._wcs_std['Chunk.energy.axis.axis.ctype'] == 'CTYPE1', \ - ob._wcs_std['Chunk.energy.axis.axis.ctype'] + assert ob._wcs_std['Chunk.energy.axis.axis.ctype'] == 'CTYPE1', ob._wcs_std['Chunk.energy.axis.axis.ctype'] ob = ObsBlueprint() ob.set('Chunk.energy.axis.axis.ctype', 'WAVE') ob._guess_axis_info() - assert ob._wcs_std['Chunk.energy.axis.axis.ctype'] == 'CTYPE3', \ - ob._wcs_std['Chunk.energy.axis.axis.ctype'] + assert ob._wcs_std['Chunk.energy.axis.axis.ctype'] == 'CTYPE3', ob._wcs_std['Chunk.energy.axis.axis.ctype'] ob = ObsBlueprint() ob.set('Chunk.polarization.axis.axis.ctype', 'STOKES') ob._guess_axis_info() - assert ob._wcs_std['Chunk.polarization.axis.axis.ctype'] == 'CTYPE5', \ - ob._wcs_std['Chunk.polarization.axis.axis.ctype'] + assert ob._wcs_std['Chunk.polarization.axis.axis.ctype'] == 'CTYPE5', ob._wcs_std[ + 'Chunk.polarization.axis.axis.ctype' + ] assert ob._polarization_axis_configed, 'pol config' ob = ObsBlueprint() ob.set('Chunk.observable.axis.axis.ctype', 'COUNT') ob._guess_axis_info() - assert ob._wcs_std['Chunk.observable.axis.axis.ctype'] == 'CTYPE6', \ - ob._wcs_std['Chunk.observable.axis.axis.ctype'] + assert ob._wcs_std['Chunk.observable.axis.axis.ctype'] == 'CTYPE6', ob._wcs_std[ + 'Chunk.observable.axis.axis.ctype' + ] assert ob._obs_axis_configed, 'obs config' ob = ObsBlueprint() ob.set('Chunk.custom.axis.axis.ctype', 'FARDEP') ob._guess_axis_info() - assert ob._wcs_std['Chunk.custom.axis.axis.ctype'] == 'CTYPE7', \ - ob._wcs_std['Chunk.custom.axis.axis.ctype'] + assert ob._wcs_std['Chunk.custom.axis.axis.ctype'] == 'CTYPE7', ob._wcs_std['Chunk.custom.axis.axis.ctype'] assert ob._custom_axis_configed, 'custom config' ob = ObsBlueprint() ob.set('Chunk.time.axis.axis.ctype', 'TIME') ob._guess_axis_info() - assert ob._wcs_std['Chunk.time.axis.axis.ctype'] == 'CTYPE4', \ - ob._wcs_std['Chunk.time.axis.axis.ctype'] + assert ob._wcs_std['Chunk.time.axis.axis.ctype'] == 'CTYPE4', ob._wcs_std['Chunk.time.axis.axis.ctype'] assert ob._time_axis_configed, 'time config' # should get the position axes by default @@ -362,20 +355,17 @@ def test_load_from_file_configure(): ob.add_attribute('Chunk.energy.axis.axis.ctype', 'CTYPE6') ob.add_attribute('Chunk.observable.axis.axis.ctype', 'CTYPE7') ob._guess_axis_info() - assert ob._wcs_std['Chunk.polarization.axis.axis.ctype'] == 'CTYPE1', \ - ob._wcs_std['Chunk.polarization.axis.axis.ctype'] - assert ob._wcs_std['Chunk.custom.axis.axis.ctype'] == 'CTYPE2', \ - ob._wcs_std['Chunk.custom.axis.axis.ctype'] - assert ob._wcs_std['Chunk.position.axis.axis1.ctype'] == 'CTYPE3', \ - ob._wcs_std['Chunk.position.axis.axis1.ctype'] - assert ob._wcs_std['Chunk.position.axis.axis2.ctype'] == 'CTYPE4', \ - ob._wcs_std['Chunk.position.axis.axis2.ctype'] - assert ob._wcs_std['Chunk.time.axis.axis.ctype'] == 'CTYPE5', \ - ob._wcs_std['Chunk.time.axis.axis.ctype'] - assert ob._wcs_std['Chunk.energy.axis.axis.ctype'] == 'CTYPE6', \ - ob._wcs_std['Chunk.energy.axis.axis.ctype'] - assert ob._wcs_std['Chunk.observable.axis.axis.ctype'] == 'CTYPE7', \ - ob._wcs_std['Chunk.observable.axis.axis.ctype'] + assert ob._wcs_std['Chunk.polarization.axis.axis.ctype'] == 'CTYPE1', ob._wcs_std[ + 'Chunk.polarization.axis.axis.ctype' + ] + assert ob._wcs_std['Chunk.custom.axis.axis.ctype'] == 'CTYPE2', ob._wcs_std['Chunk.custom.axis.axis.ctype'] + assert ob._wcs_std['Chunk.position.axis.axis1.ctype'] == 'CTYPE3', ob._wcs_std['Chunk.position.axis.axis1.ctype'] + assert ob._wcs_std['Chunk.position.axis.axis2.ctype'] == 'CTYPE4', ob._wcs_std['Chunk.position.axis.axis2.ctype'] + assert ob._wcs_std['Chunk.time.axis.axis.ctype'] == 'CTYPE5', ob._wcs_std['Chunk.time.axis.axis.ctype'] + assert ob._wcs_std['Chunk.energy.axis.axis.ctype'] == 'CTYPE6', ob._wcs_std['Chunk.energy.axis.axis.ctype'] + assert ob._wcs_std['Chunk.observable.axis.axis.ctype'] == 'CTYPE7', ob._wcs_std[ + 'Chunk.observable.axis.axis.ctype' + ] with pytest.raises(ValueError): ob = ObsBlueprint() diff --git a/caom2utils/caom2utils/tests/test_polygonvalidator.py b/caom2utils/caom2utils/tests/test_polygonvalidator.py index e1ba430b..f51ed289 100644 --- a/caom2utils/caom2utils/tests/test_polygonvalidator.py +++ b/caom2utils/caom2utils/tests/test_polygonvalidator.py @@ -88,11 +88,11 @@ def test_open_polygon(): # should detect that the polygons is not clockwise with pytest.raises(AssertionError) as ex: validate_polygon(shape.Polygon(counter_clockwise_points)) - assert('clockwise winding direction' in str(ex.value)) + assert 'clockwise winding direction' in str(ex.value) # should detect that polygon is requires a minimum of 4 points with pytest.raises(AssertionError) as ex: validate_polygon(shape.Polygon(too_few_points)) - assert('invalid polygon: 2 points' in str(ex.value)) + assert 'invalid polygon: 2 points' in str(ex.value) # polygon default constructor validate_polygon(shape.Polygon()) @@ -115,8 +115,7 @@ def test_open_polygon(): v10 = shape.Vertex(46.757813, 56.145550, shape.SegmentType.LINE) v11 = shape.Vertex(26.015625, 55.354135, shape.SegmentType.LINE) v12 = shape.Vertex(0.0, 0.0, shape.SegmentType.CLOSE) - closed_vertices = [ - v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12] + closed_vertices = [v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12] # should detect that multipolygon is closed validate_multipolygon(shape.MultiPolygon(closed_vertices)) @@ -134,7 +133,7 @@ def test_polygon_self_intersection(): with pytest.raises(AssertionError) as ex: poly = shape.Polygon(points_with_self_intersecting_segments) validate_polygon(poly) - assert('self intersecting' in str(ex.value)) + assert 'self intersecting' in str(ex.value) # should detect self segment intersection of the polygon near the # South Pole, with the Pole outside the polygon @@ -147,7 +146,7 @@ def test_polygon_self_intersection(): with pytest.raises(AssertionError) as ex: poly = shape.Polygon(points_with_self_intersecting_segments) validate_polygon(poly) - assert('self intersecting' in str(ex.value)) + assert 'self intersecting' in str(ex.value) # should detect self segment intersection of the polygon near the # South Pole, with the Pole inside the polygon @@ -160,7 +159,7 @@ def test_polygon_self_intersection(): with pytest.raises(AssertionError) as ex: poly = shape.Polygon(points_with_self_intersecting_segments) validate_polygon(poly) - assert('self intersecting' in str(ex.value)) + assert 'self intersecting' in str(ex.value) # should detect self segment intersection of the polygon which # intersects with meridian = 0 @@ -173,7 +172,7 @@ def test_polygon_self_intersection(): with pytest.raises(AssertionError) as ex: poly = shape.Polygon(points_with_self_intersecting_segments) validate_polygon(poly) - assert('self intersecting' in str(ex.value)) + assert 'self intersecting' in str(ex.value) def test_open_multipolygon(): @@ -195,12 +194,10 @@ def test_open_multipolygon(): too_few_vertices = [v0, v1, v6] two_moves_vertices = [v0, v1, v7, v2, v3, v4, v5, v6] no_move_vertices = [v1, v2, v3, v4, v5, v6] - two_closes_vertices = [ - v0, v1, v2, v3, v4, v5, v7, v8, v9, v10, v11, v12] + two_closes_vertices = [v0, v1, v2, v3, v4, v5, v7, v8, v9, v10, v11, v12] no_close_vertices = [v0, v1, v2, v3, v4, v5] min_closed_vertices = [v0, v1, v2, v6] - closed_vertices = [ - v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12] + closed_vertices = [v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12] rv0 = shape.Vertex(26.015625, 55.354135, shape.SegmentType.MOVE) rv1 = shape.Vertex(46.757813, 56.145550, shape.SegmentType.LINE) rv2 = shape.Vertex(55.898438, 62.734601, shape.SegmentType.LINE) @@ -214,40 +211,35 @@ def test_open_multipolygon(): rv10 = shape.Vertex(-108.984375, 70.480896, shape.SegmentType.LINE) rv11 = shape.Vertex(-126.210938, 67.991108, shape.SegmentType.LINE) rv12 = shape.Vertex(0.0, 0.0, shape.SegmentType.CLOSE) - counter_clockwise_vertices = [ - rv0, rv1, rv2, rv3, rv4, rv5, rv6, rv7, rv8, rv9, rv10, rv11, rv12] + counter_clockwise_vertices = [rv0, rv1, rv2, rv3, rv4, rv5, rv6, rv7, rv8, rv9, rv10, rv11, rv12] # should detect that the polygons is not clockwise with pytest.raises(AssertionError) as ex: validate_multipolygon(shape.MultiPolygon(counter_clockwise_vertices)) - assert('clockwise winding direction' in str(ex.value)) + assert 'clockwise winding direction' in str(ex.value) # should detect that there are not enough number of vertices to # produce a multipolygon with pytest.raises(AssertionError) as ex: validate_multipolygon(shape.MultiPolygon(no_vertices)) - assert('invalid polygon: 0 vertices' in str(ex.value)) + assert 'invalid polygon: 0 vertices' in str(ex.value) with pytest.raises(AssertionError) as ex: validate_multipolygon(shape.MultiPolygon(too_few_vertices)) - assert('invalid polygon: 3 vertices' in str(ex.value)) + assert 'invalid polygon: 3 vertices' in str(ex.value) # no close between two 'MOVE' with pytest.raises(AssertionError) as ex: validate_multipolygon(shape.MultiPolygon(two_moves_vertices)) - assert( - 'invalid polygon: MOVE vertex when loop open' in str(ex.value)) + assert 'invalid polygon: MOVE vertex when loop open' in str(ex.value) # no 'MOVE' before a 'CLOSE' with pytest.raises(AssertionError) as ex: validate_multipolygon(shape.MultiPolygon(no_move_vertices)) - assert( - 'invalid polygon: first vertex is not a MOVE' in str(ex.value)) + assert 'invalid polygon: first vertex is not a MOVE' in str(ex.value) # no 'MOVE' between two 'CLOSE' with pytest.raises(AssertionError) as ex: validate_multipolygon(shape.MultiPolygon(two_closes_vertices)) - assert( - 'invalid polygon: MOVE vertex when loop open' in str(ex.value)) + assert 'invalid polygon: MOVE vertex when loop open' in str(ex.value) # no 'CLOSE' after a 'MOVE' with pytest.raises(AssertionError) as ex: validate_multipolygon(shape.MultiPolygon(no_close_vertices)) - assert( - 'invalid polygon: last vertex is not a CLOSE' in str(ex.value)) + assert 'invalid polygon: last vertex is not a CLOSE' in str(ex.value) # multipolygon default constructor -> too few vertices with pytest.raises(AssertionError): validate_multipolygon(shape.MultiPolygon(None)) @@ -259,45 +251,45 @@ def test_open_multipolygon(): p = shape.MultiPolygon(vertices=closed_vertices) validate_multipolygon(p) actual_vertices = p.vertices - assert(actual_vertices[0].cval1 == closed_vertices[0].cval1) - assert(actual_vertices[0].cval2 == closed_vertices[0].cval2) - assert(actual_vertices[0].type == shape.SegmentType.MOVE) - assert(actual_vertices[1].cval1 == closed_vertices[1].cval1) - assert(actual_vertices[1].cval2 == closed_vertices[1].cval2) - assert(actual_vertices[1].type == shape.SegmentType.LINE) - assert(actual_vertices[2].cval1 == closed_vertices[2].cval1) - assert(actual_vertices[2].cval2 == closed_vertices[2].cval2) - assert(actual_vertices[2].type == shape.SegmentType.LINE) - assert(actual_vertices[3].cval1 == closed_vertices[3].cval1) - assert(actual_vertices[3].cval2 == closed_vertices[3].cval2) - assert(actual_vertices[3].type == shape.SegmentType.LINE) - assert(actual_vertices[4].cval1 == closed_vertices[4].cval1) - assert(actual_vertices[4].cval2 == closed_vertices[4].cval2) - assert(actual_vertices[4].type == shape.SegmentType.LINE) - assert(actual_vertices[5].cval1 == closed_vertices[5].cval1) - assert(actual_vertices[5].cval2 == closed_vertices[5].cval2) - assert(actual_vertices[5].type == shape.SegmentType.LINE) - assert(actual_vertices[6].cval1 == closed_vertices[6].cval1) - assert(actual_vertices[6].cval2 == closed_vertices[6].cval2) - assert(actual_vertices[6].type == shape.SegmentType.CLOSE) - assert(actual_vertices[7].cval1 == closed_vertices[7].cval1) - assert(actual_vertices[7].cval2 == closed_vertices[7].cval2) - assert(actual_vertices[7].type == shape.SegmentType.MOVE) - assert(actual_vertices[8].cval1 == closed_vertices[8].cval1) - assert(actual_vertices[8].cval2 == closed_vertices[8].cval2) - assert(actual_vertices[8].type == shape.SegmentType.LINE) - assert(actual_vertices[9].cval1 == closed_vertices[9].cval1) - assert(actual_vertices[9].cval2 == closed_vertices[9].cval2) - assert(actual_vertices[9].type == shape.SegmentType.LINE) - assert(actual_vertices[10].cval1 == closed_vertices[10].cval1) - assert(actual_vertices[10].cval2 == closed_vertices[10].cval2) - assert(actual_vertices[10].type == shape.SegmentType.LINE) - assert(actual_vertices[11].cval1 == closed_vertices[11].cval1) - assert(actual_vertices[11].cval2 == closed_vertices[11].cval2) - assert(actual_vertices[11].type == shape.SegmentType.LINE) - assert(actual_vertices[12].cval1 == closed_vertices[12].cval1) - assert(actual_vertices[12].cval2 == closed_vertices[12].cval2) - assert(actual_vertices[12].type == shape.SegmentType.CLOSE) + assert actual_vertices[0].cval1 == closed_vertices[0].cval1 + assert actual_vertices[0].cval2 == closed_vertices[0].cval2 + assert actual_vertices[0].type == shape.SegmentType.MOVE + assert actual_vertices[1].cval1 == closed_vertices[1].cval1 + assert actual_vertices[1].cval2 == closed_vertices[1].cval2 + assert actual_vertices[1].type == shape.SegmentType.LINE + assert actual_vertices[2].cval1 == closed_vertices[2].cval1 + assert actual_vertices[2].cval2 == closed_vertices[2].cval2 + assert actual_vertices[2].type == shape.SegmentType.LINE + assert actual_vertices[3].cval1 == closed_vertices[3].cval1 + assert actual_vertices[3].cval2 == closed_vertices[3].cval2 + assert actual_vertices[3].type == shape.SegmentType.LINE + assert actual_vertices[4].cval1 == closed_vertices[4].cval1 + assert actual_vertices[4].cval2 == closed_vertices[4].cval2 + assert actual_vertices[4].type == shape.SegmentType.LINE + assert actual_vertices[5].cval1 == closed_vertices[5].cval1 + assert actual_vertices[5].cval2 == closed_vertices[5].cval2 + assert actual_vertices[5].type == shape.SegmentType.LINE + assert actual_vertices[6].cval1 == closed_vertices[6].cval1 + assert actual_vertices[6].cval2 == closed_vertices[6].cval2 + assert actual_vertices[6].type == shape.SegmentType.CLOSE + assert actual_vertices[7].cval1 == closed_vertices[7].cval1 + assert actual_vertices[7].cval2 == closed_vertices[7].cval2 + assert actual_vertices[7].type == shape.SegmentType.MOVE + assert actual_vertices[8].cval1 == closed_vertices[8].cval1 + assert actual_vertices[8].cval2 == closed_vertices[8].cval2 + assert actual_vertices[8].type == shape.SegmentType.LINE + assert actual_vertices[9].cval1 == closed_vertices[9].cval1 + assert actual_vertices[9].cval2 == closed_vertices[9].cval2 + assert actual_vertices[9].type == shape.SegmentType.LINE + assert actual_vertices[10].cval1 == closed_vertices[10].cval1 + assert actual_vertices[10].cval2 == closed_vertices[10].cval2 + assert actual_vertices[10].type == shape.SegmentType.LINE + assert actual_vertices[11].cval1 == closed_vertices[11].cval1 + assert actual_vertices[11].cval2 == closed_vertices[11].cval2 + assert actual_vertices[11].type == shape.SegmentType.LINE + assert actual_vertices[12].cval1 == closed_vertices[12].cval1 + assert actual_vertices[12].cval2 == closed_vertices[12].cval2 + assert actual_vertices[12].type == shape.SegmentType.CLOSE def test_multipoly_self_intersect(): @@ -310,37 +302,30 @@ def test_multipoly_self_intersect(): v5 = shape.Vertex(0.0, 0.0, shape.SegmentType.CLOSE) points_with_self_intersecting_segments = [v1, v2, v3, v4, v5] with pytest.raises(AssertionError) as ex: - validate_multipolygon( - shape.MultiPolygon(points_with_self_intersecting_segments)) - assert('self intersecting' in str(ex.value)) + validate_multipolygon(shape.MultiPolygon(points_with_self_intersecting_segments)) + assert 'self intersecting' in str(ex.value) # should detect self segment intersection of the multipolygon near # the South Pole, with the Pole outside the multipolygon v1 = shape.Vertex(0.6128286003, -89.8967940441, shape.SegmentType.MOVE) - v2 = shape.Vertex( - 210.6391743183, -89.9073892376, shape.SegmentType.LINE) - v3 = shape.Vertex( - 90.6405151921, -89.8972874698, shape.SegmentType.LINE) + v2 = shape.Vertex(210.6391743183, -89.9073892376, shape.SegmentType.LINE) + v3 = shape.Vertex(90.6405151921, -89.8972874698, shape.SegmentType.LINE) v4 = shape.Vertex(270.6114701911, -89.90689353, shape.SegmentType.LINE) v5 = shape.Vertex(0.0, 0.0, shape.SegmentType.CLOSE) points_with_self_intersecting_segments = [v1, v2, v3, v4, v5] with pytest.raises(AssertionError) as ex: - validate_multipolygon( - shape.MultiPolygon(points_with_self_intersecting_segments)) - assert('self intersecting' in str(ex.value)) + validate_multipolygon(shape.MultiPolygon(points_with_self_intersecting_segments)) + assert 'self intersecting' in str(ex.value) # should detect self segment intersection of the multipolygon near the # South Pole, with the Pole inside the multipolygon v1 = shape.Vertex(0.6128286003, -89.8967940441, shape.SegmentType.MOVE) - v2 = shape.Vertex( - 130.6391743183, -89.9073892376, shape.SegmentType.LINE) - v3 = shape.Vertex( - 90.6405151921, -89.8972874698, shape.SegmentType.LINE) + v2 = shape.Vertex(130.6391743183, -89.9073892376, shape.SegmentType.LINE) + v3 = shape.Vertex(90.6405151921, -89.8972874698, shape.SegmentType.LINE) v4 = shape.Vertex(270.6114701911, -89.90689353, shape.SegmentType.LINE) v5 = shape.Vertex(0.0, 0.0, shape.SegmentType.CLOSE) points_with_self_intersecting_segments = [v1, v2, v3, v4, v5] with pytest.raises(AssertionError) as ex: - validate_multipolygon( - shape.MultiPolygon(points_with_self_intersecting_segments)) - assert('self intersecting' in str(ex.value)) + validate_multipolygon(shape.MultiPolygon(points_with_self_intersecting_segments)) + assert 'self intersecting' in str(ex.value) # should detect self segment intersection of the multipolygon which # intersects with meridian = 0 v1 = shape.Vertex(-7.910156, 13.293411, shape.SegmentType.MOVE) @@ -350,9 +335,8 @@ def test_multipoly_self_intersect(): v5 = shape.Vertex(0.0, 0.0, shape.SegmentType.CLOSE) points_with_self_intersecting_segments = [v1, v2, v3, v4, v5] with pytest.raises(AssertionError) as ex: - validate_multipolygon( - shape.MultiPolygon(points_with_self_intersecting_segments)) - assert('self intersecting' in str(ex.value)) + validate_multipolygon(shape.MultiPolygon(points_with_self_intersecting_segments)) + assert 'self intersecting' in str(ex.value) def test_failures(): diff --git a/caom2utils/caom2utils/tests/test_si_uris.py b/caom2utils/caom2utils/tests/test_si_uris.py index 44e4ed72..61b90e76 100644 --- a/caom2utils/caom2utils/tests/test_si_uris.py +++ b/caom2utils/caom2utils/tests/test_si_uris.py @@ -90,8 +90,7 @@ def _get_mock(id_ignore, dest, fhead): ) def _info_mock(uri): - return FileInfo( - id=uri, size=12, file_type='application/fits', md5sum='abc') + return FileInfo(id=uri, size=12, file_type='application/fits', md5sum='abc') si_mock.return_value.cadcget.side_effect = _get_mock si_mock.return_value.cadcinfo.side_effect = _info_mock @@ -103,11 +102,13 @@ def _info_mock(uri): if os.path.exists(out_fqn): os.unlink(out_fqn) - sys.argv = ('caom2gen --debug -o {} --no_validate ' - '--resource-id ivo://cadc.nrc.ca/test ' - '--observation TEST_COLLECTION TEST_OBS_ID ' - '--lineage test_product_id/cadc:TEST/test_file.fits ' - '--blueprint {}'.format(out_fqn, bp_fqn)).split() + sys.argv = ( + 'caom2gen --debug -o {} --no_validate ' + '--resource-id ivo://cadc.nrc.ca/test ' + '--observation TEST_COLLECTION TEST_OBS_ID ' + '--lineage test_product_id/cadc:TEST/test_file.fits ' + '--blueprint {}'.format(out_fqn, bp_fqn) + ).split() caom2blueprint.caom2gen() assert os.path.exists(out_fqn), 'expect output file' diff --git a/caom2utils/caom2utils/tests/test_wcsvalidator.py b/caom2utils/caom2utils/tests/test_wcsvalidator.py index 016c23f4..e0992447 100644 --- a/caom2utils/caom2utils/tests/test_wcsvalidator.py +++ b/caom2utils/caom2utils/tests/test_wcsvalidator.py @@ -67,9 +67,22 @@ # from caom2utils import wcsvalidator, validate_wcs, InvalidWCSError -from caom2 import artifact, observation, part, plane, caom_util, Axis, \ - chunk, CoordAxis1D, CoordBounds1D, CoordFunction1D, CoordRange1D, \ - PolarizationWCS, RefCoord, wcs +from caom2 import ( + artifact, + observation, + part, + plane, + caom_util, + Axis, + chunk, + CoordAxis1D, + CoordBounds1D, + CoordFunction1D, + CoordRange1D, + PolarizationWCS, + RefCoord, + wcs, +) from caom2.caom_util import TypedList, TypedOrderedDict from ..wcsvalidator import WcsPolarizationState import pytest @@ -83,15 +96,14 @@ class TemporalWCSValidatorTests(unittest.TestCase): def test_temporalwcs_validator(self): good_temporal_wcs = TimeTestUtil.good_wcs() - assert(good_temporal_wcs.axis.function is not None) + assert good_temporal_wcs.axis.function is not None wcsvalidator._validate_temporal_wcs(good_temporal_wcs) wcsvalidator._validate_temporal_wcs(None) def test_bad_temporalwcs(self): bad_temporal_wcs = TimeTestUtil.bad_ctype_wcs() - with self.assertRaisesRegex( - InvalidWCSError, 'unexpected TIMESYS, CTYPE'): + with self.assertRaisesRegex(InvalidWCSError, 'unexpected TIMESYS, CTYPE'): wcsvalidator._validate_temporal_wcs(bad_temporal_wcs) bad_temporal_wcs = TimeTestUtil.bad_cunit_wcs() @@ -99,8 +111,7 @@ def test_bad_temporalwcs(self): wcsvalidator._validate_temporal_wcs(bad_temporal_wcs) bad_temporal_wcs = TimeTestUtil.bad_range_wcs() - with self.assertRaisesRegex( - InvalidWCSError, 'range.end not >= range.start'): + with self.assertRaisesRegex(InvalidWCSError, 'range.end not >= range.start'): wcsvalidator._validate_temporal_wcs(bad_temporal_wcs) @@ -109,45 +120,39 @@ def test_bad_temporalwcs(self): class CustomWCSValidatorTests(unittest.TestCase): def test_customwcs_validator(self): good_custom_wcs = CustomTestUtil.good_wcs() - assert(good_custom_wcs.axis is not None) + assert good_custom_wcs.axis is not None wcsvalidator._validate_custom_wcs(good_custom_wcs) wcsvalidator._validate_custom_wcs(None) def test_bad_customwcs(self): bad_custom_wcs = CustomTestUtil.bad_ctype_wcs() - with self.assertRaisesRegex( - InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): + with self.assertRaisesRegex(InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): wcsvalidator._validate_custom_wcs(bad_custom_wcs) bad_custom_wcs = CustomTestUtil.bad_cunit_wcs() - with self.assertRaisesRegex( - InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): + with self.assertRaisesRegex(InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): wcsvalidator._validate_custom_wcs(bad_custom_wcs) bad_custom_wcs = CustomTestUtil.bad_range_wcs() - with self.assertRaisesRegex( - InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): + with self.assertRaisesRegex(InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): wcsvalidator._validate_custom_wcs(bad_custom_wcs) bad_custom_wcs = CustomTestUtil.bad_bounds_wcs() - with self.assertRaisesRegex( - InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): + with self.assertRaisesRegex(InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): wcsvalidator._validate_custom_wcs(bad_custom_wcs) bad_custom_wcs = CustomTestUtil.bad_function_wcs() - with self.assertRaisesRegex( - InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): + with self.assertRaisesRegex(InvalidWCSError, 'CUSTOM_WCS_VALIDATION_ERROR:'): wcsvalidator._validate_custom_wcs(bad_custom_wcs) @pytest.mark.skipif(single_test, reason='Single test mode') class SpatialWCSValidatorTests(unittest.TestCase): def test_spatialwcs_validator(self): - spatialtest = SpatialTestUtil() good_spatial_wcs = spatialtest.good_wcs() - assert(good_spatial_wcs.axis.function is not None) + assert good_spatial_wcs.axis.function is not None wcsvalidator._validate_spatial_wcs(good_spatial_wcs) # None is valid wcsvalidator._validate_spatial_wcs(None) @@ -166,7 +171,7 @@ class SpectralWCSValidatorTests(unittest.TestCase): def test_spectralwcs_validator(self): energyTest = EnergyTestUtil() good_spectral_wcs = energyTest.good_wcs() - assert(good_spectral_wcs.axis.function is not None) + assert good_spectral_wcs.axis.function is not None wcsvalidator._validate_spectral_wcs(good_spectral_wcs) wcsvalidator._validate_spectral_wcs(None) @@ -294,7 +299,7 @@ def good_wcs(): sx = float(54321.0) nx = 200 ds = float(0.01) - goodwcs = TimeTestUtil.get_test_function(True, px, sx*nx*ds, nx, ds) + goodwcs = TimeTestUtil.get_test_function(True, px, sx * nx * ds, nx, ds) return goodwcs @staticmethod @@ -304,7 +309,7 @@ def bad_ctype_wcs(): nx = 200 ds = float(0.01) - badwcs = TimeTestUtil.get_test_function(True, px, sx*nx*ds, nx, ds) + badwcs = TimeTestUtil.get_test_function(True, px, sx * nx * ds, nx, ds) # Should fail on the ctype badwcs.axis.axis.ctype = "bla" return badwcs @@ -425,8 +430,7 @@ def bad_function_wcs(): delta = 0.0 ref_coord = RefCoord(float(0.9), float(1.1)) func = CoordFunction1D(naxis, delta, ref_coord) - axis_1d = CoordAxis1D(wcs.Axis(ctype, unit), error, range, bounds, - func) + axis_1d = CoordAxis1D(wcs.Axis(ctype, unit), error, range, bounds, func) return chunk.CustomWCS(axis_1d) @staticmethod @@ -464,11 +468,9 @@ def bad_wcs(): spatial_wcs = chunk.SpatialWCS(axis) spatial_wcs.equinox = None dim = wcs.Dimension2D(1024, 1024) - ref = wcs.Coord2D(wcs.RefCoord( - 512.0, 10.0), wcs.RefCoord(512.0, 20.0)) + ref = wcs.Coord2D(wcs.RefCoord(512.0, 10.0), wcs.RefCoord(512.0, 20.0)) # Create Invalid function - axis.function = wcs.CoordFunction2D( - dim, ref, 1.0e-3, 0.0, 0.0, 0.0) # singular CD matrix + axis.function = wcs.CoordFunction2D(dim, ref, 1.0e-3, 0.0, 0.0, 0.0) # singular CD matrix return spatial_wcs @staticmethod @@ -494,8 +496,7 @@ def get_test_function(px, py, sx, sy, gal): # Simple frame set: 1000x1000 pixels, 1 pixel = 1.0e-3 deg dim = wcs.Dimension2D(1000, 1000) ref = wcs.Coord2D(wcs.RefCoord(px, sx), wcs.RefCoord(py, sy)) - axis_2d.function = wcs.CoordFunction2D( - dim, ref, 1.e-3, 0.0, 0.0, 1.0e-3) + axis_2d.function = wcs.CoordFunction2D(dim, ref, 1.0e-3, 0.0, 0.0, 1.0e-3) return spatial_wcs @@ -542,8 +543,7 @@ def bad_wcs(): sx = float(400.0) nx = 200 ds = float(1.0) - bad_energy = EnergyTestUtil.getTestFunction( - True, px, sx * nx * ds, nx, ds) + bad_energy = EnergyTestUtil.getTestFunction(True, px, sx * nx * ds, nx, ds) # Make function invalid c1 = wcs.RefCoord(0.5, 2000.0) bad_energy.axis.function = wcs.CoordFunction1D(100, 10.0, c1) @@ -580,14 +580,13 @@ def getTestFunction(complete, px, sx, nx, ds): return spectral_wcs -class ObservationTestUtil(): +class ObservationTestUtil: def __init__(self): pass @staticmethod def get_test_observation(): - obs = observation.Observation( - "collection", "obsID", observation.Algorithm("algo")) + obs = observation.Observation("collection", "obsID", observation.Algorithm("algo")) p1 = PlaneTestUtil.get_test_plane("planeID1") p2 = PlaneTestUtil.get_test_plane("planeID2") p3 = PlaneTestUtil.get_test_plane("planeID3") @@ -597,7 +596,7 @@ def get_test_observation(): return obs -class PlaneTestUtil(): +class PlaneTestUtil: def __init__(self): pass @@ -617,7 +616,7 @@ def get_test_plane(planeID): return test_plane -class ArtifactTestUtil(): +class ArtifactTestUtil: def __init__(self): pass @@ -638,8 +637,7 @@ def get_test_artifact(uri, ptype): # chunk.ProductType.SCIENCE is a common type if ptype is None: ptype = chunk.ProductType.SCIENCE - test_artifact = artifact.Artifact( - uri, ptype, artifact.ReleaseType.DATA) + test_artifact = artifact.Artifact(uri, ptype, artifact.ReleaseType.DATA) chunks = TypedList(chunk.Chunk) chunks.append(ArtifactTestUtil.get_good_test_chunk(ptype)) @@ -650,7 +648,7 @@ def get_test_artifact(uri, ptype): return test_artifact -class PartTestUtil(): +class PartTestUtil: def __init__(self): pass @@ -665,7 +663,7 @@ def get_test_part(pname, ptype): @pytest.mark.skipif(single_test, reason='Single test mode') -class TestValidatePolarizationWcs(): +class TestValidatePolarizationWcs: def test_none_polarization_wcs(self): # Polarization is None, should not produce an error wcsvalidator._validate_polarization_wcs(None) @@ -701,8 +699,8 @@ def test_range(self): polarization = PolarizationWCS(axis_1d) with pytest.raises(InvalidWCSError) as ex: wcsvalidator._validate_polarization_wcs(polarization) - assert('Invalid Polarization WCS' in str(ex.value)) - assert('11' in str(ex.value)) + assert 'Invalid Polarization WCS' in str(ex.value) + assert '11' in str(ex.value) # Polarization axis range contains invalid negative values start = RefCoord(float(-9.1), float(-8.9)) @@ -712,8 +710,8 @@ def test_range(self): polarization = PolarizationWCS(axis_1d) with pytest.raises(InvalidWCSError) as ex: wcsvalidator._validate_polarization_wcs(polarization) - assert('Invalid Polarization WCS' in str(ex.value)) - assert('-9' in str(ex.value)) + assert 'Invalid Polarization WCS' in str(ex.value) + assert '-9' in str(ex.value) # Polarization axis range contains an invalid value (0) within a range start = RefCoord(float(-8.1), float(-7.9)) @@ -723,8 +721,8 @@ def test_range(self): polarization = PolarizationWCS(axis_1d) with pytest.raises(InvalidWCSError) as ex: wcsvalidator._validate_polarization_wcs(polarization) - assert('Invalid Polarization WCS' in str(ex.value)) - assert('0' in str(ex.value)) + assert 'Invalid Polarization WCS' in str(ex.value) + assert '0' in str(ex.value) def test_bounds(self): # Polarization bounds is None, should not produce an error @@ -763,8 +761,8 @@ def test_bounds(self): polarization = PolarizationWCS(axis_1d) with pytest.raises(InvalidWCSError) as ex: wcsvalidator._validate_polarization_wcs(polarization) - assert('Invalid Polarization WCS' in str(ex.value)) - assert('11' in str(ex.value)) + assert 'Invalid Polarization WCS' in str(ex.value) + assert '11' in str(ex.value) # Polarization axis bounds contains more than one invalid range start = RefCoord(float(0.9), float(1.1)) @@ -778,8 +776,8 @@ def test_bounds(self): polarization = PolarizationWCS(axis_1d) with pytest.raises(InvalidWCSError) as ex: wcsvalidator._validate_polarization_wcs(polarization) - assert('Invalid Polarization WCS' in str(ex.value)) - assert('-9' in str(ex.value)) + assert 'Invalid Polarization WCS' in str(ex.value) + assert '-9' in str(ex.value) def test_function(self): # Polarization function is None, should not produce an error @@ -812,12 +810,12 @@ def test_function(self): polarization = PolarizationWCS(axis_1d) with pytest.raises(InvalidWCSError) as ex: wcsvalidator._validate_polarization_wcs(polarization) - assert('Invalid Polarization WCS' in str(ex.value)) - assert('Invalid naxis value' in str(ex.value)) + assert 'Invalid Polarization WCS' in str(ex.value) + assert 'Invalid naxis value' in str(ex.value) @pytest.mark.skipif(single_test, reason='Single test mode') -class TestWcsPolarizationState(): +class TestWcsPolarizationState: def test_all(self): # valid keys for i in range(1, 11): diff --git a/caom2utils/caom2utils/wcs_parsers.py b/caom2utils/caom2utils/wcs_parsers.py index 11bc1d20..f095e6a5 100644 --- a/caom2utils/caom2utils/wcs_parsers.py +++ b/caom2utils/caom2utils/wcs_parsers.py @@ -72,67 +72,39 @@ from astropy.wcs import SingularMatrixError, utils, Wcsprm, WCS from caom2 import ( - Axis, Chunk, Coord2D, CoordAxis1D, CoordAxis2D, CoordError, CoordFunction1D, CoordFunction2D, CustomWCS, + Axis, + Chunk, + Coord2D, + CoordAxis1D, + CoordAxis2D, + CoordError, + CoordFunction1D, + CoordFunction2D, + CustomWCS, Dimension2D, - ObservableAxis, PolarizationWCS, RefCoord, Slice, - SpatialWCS, SpectralWCS, - TemporalWCS + ObservableAxis, + PolarizationWCS, + RefCoord, + Slice, + SpatialWCS, + SpectralWCS, + TemporalWCS, ) from caom2utils.blueprints import ObsBlueprint, _to_float, _to_int, _to_str -CUSTOM_CTYPES = [ - 'RM', - 'FDEP' -] - -POSITION_CTYPES = [ - ['RA', - 'GLON', - 'ELON', - 'HLON', - 'SLON'], - ['DEC', - 'GLAT', - 'ELAT', - 'HLAT', - 'SLAT'] -] - -ENERGY_CTYPES = [ - 'FREQ', - 'ENER', - 'WAVN', - 'VRAD', - 'WAVE', - 'VOPT', - 'ZOPT', - 'AWAV', - 'VELO', - 'BETA'] +CUSTOM_CTYPES = ['RM', 'FDEP'] + +POSITION_CTYPES = [['RA', 'GLON', 'ELON', 'HLON', 'SLON'], ['DEC', 'GLAT', 'ELAT', 'HLAT', 'SLAT']] + +ENERGY_CTYPES = ['FREQ', 'ENER', 'WAVN', 'VRAD', 'WAVE', 'VOPT', 'ZOPT', 'AWAV', 'VELO', 'BETA'] # From http://hea-www.cfa.harvard.edu/~arots/TimeWCS/ -TIME_KEYWORDS = [ - 'TIME', - 'TAI', - 'TT', - 'TDT', - 'ET', - 'IAT', - 'UT1', - 'UTC', - 'GMT', - 'GPS', - 'TCG', - 'TCB', - 'TDB', - 'LOCAL'] +TIME_KEYWORDS = ['TIME', 'TAI', 'TT', 'TDT', 'ET', 'IAT', 'UT1', 'UTC', 'GMT', 'GPS', 'TCG', 'TCB', 'TDB', 'LOCAL'] POLARIZATION_CTYPES = ['STOKES'] -OBSERVABLE_CTYPES = [ - 'observable', - 'FLUX'] +OBSERVABLE_CTYPES = ['observable', 'FLUX'] class HDULoggingFilter(logging.Filter): @@ -227,20 +199,16 @@ def _set_wcs(self): self.assign_sanitize(crpix, count + 1, 'Chunk.position.axis.function.refCoord.coord2.pix') self.assign_sanitize(crval, count, 'Chunk.position.axis.function.refCoord.coord1.val') self.assign_sanitize(crval, count + 1, 'Chunk.position.axis.function.refCoord.coord2.val') - x = self._blueprint._get('Chunk.position.axis.function.cd11', - self._extension) + x = self._blueprint._get('Chunk.position.axis.function.cd11', self._extension) if x is not None and not ObsBlueprint.needs_lookup(x): cd[count][0] = x - x = self._blueprint._get('Chunk.position.axis.function.cd12', - self._extension) + x = self._blueprint._get('Chunk.position.axis.function.cd12', self._extension) if x is not None and not ObsBlueprint.needs_lookup(x): cd[count][1] = x - x = self._blueprint._get('Chunk.position.axis.function.cd21', - self._extension) + x = self._blueprint._get('Chunk.position.axis.function.cd21', self._extension) if x is not None and not ObsBlueprint.needs_lookup(x): cd[count + 1][0] = x - x = self._blueprint._get('Chunk.position.axis.function.cd22', - self._extension) + x = self._blueprint._get('Chunk.position.axis.function.cd22', self._extension) if x is not None and not ObsBlueprint.needs_lookup(x): cd[count + 1][1] = x self.assign_sanitize(crder, count, 'Chunk.position.axis.error1.rnder') @@ -428,8 +396,7 @@ def augment_position(self, chunk): chunk.position_axis_1 = position_axes_indices[0] chunk.position_axis_2 = position_axes_indices[1] - axis = self._get_spatial_axis(chunk.position_axis_1 - 1, - chunk.position_axis_2 - 1) + axis = self._get_spatial_axis(chunk.position_axis_1 - 1, chunk.position_axis_2 - 1) if axis is None: self.logger.debug('No WCS Position axis.function') @@ -555,12 +522,11 @@ def augment_observable(self, chunk): def _finish_chunk_observable(self, chunk): self.logger.debug('Begin _finish_chunk_observable') - ctype = self._wcs.wcs.ctype[chunk.observable_axis-1] - cunit = self._wcs.wcs.ctype[chunk.observable_axis-1] - pix_bin = _to_int(self._wcs.wcs.crpix[chunk.observable_axis-1]) + ctype = self._wcs.wcs.ctype[chunk.observable_axis - 1] + cunit = self._wcs.wcs.ctype[chunk.observable_axis - 1] + pix_bin = _to_int(self._wcs.wcs.crpix[chunk.observable_axis - 1]) if ctype is not None and cunit is not None and pix_bin is not None: - chunk.observable = ObservableAxis( - Slice(self._get_axis(0, ctype, cunit), pix_bin)) + chunk.observable = ObservableAxis(Slice(self._get_axis(0, ctype, cunit), pix_bin)) self.logger.debug('End _finish_chunk_observable') def _finish_chunk_position(self, chunk): @@ -648,11 +614,9 @@ def _finish_time(self): self.logger.debug('End _finish_time') def _get_axis(self, index, over_ctype=None, over_cunit=None): - """ Assemble a generic axis """ - aug_ctype = str(self.wcs.ctype[index]) if over_ctype is None \ - else over_ctype - aug_cunit = str(self.wcs.cunit[index]) if over_cunit is None \ - else over_cunit + """Assemble a generic axis""" + aug_ctype = str(self.wcs.ctype[index]) if over_ctype is None else over_ctype + aug_cunit = str(self.wcs.cunit[index]) if over_cunit is None else over_cunit if aug_cunit is not None and len(aug_cunit) == 0: aug_cunit = None aug_axis = Axis(aug_ctype, aug_cunit) @@ -684,14 +648,14 @@ def _get_axis_length(self, for_axis): if len(self._wcs.array_shape) == 1: result = self._wcs.array_shape[0] else: - result = self._wcs.array_shape[for_axis-1] + result = self._wcs.array_shape[for_axis - 1] if isinstance(result, tuple): # the blueprint is incompletely configured raise ValueError(f'Could not find axis length for axis {for_axis}') return _to_int(result) def _get_cd(self, x_index, y_index): - """ returns cd info""" + """returns cd info""" try: if self.wcs.has_cd(): @@ -705,8 +669,7 @@ def _get_cd(self, x_index, y_index): cd21 = self.wcs.crota[y_index] cd22 = self.wcs.cdelt[y_index] except AttributeError: - self.logger.debug( - f'Error searching for CD* values {sys.exc_info()[1]}') + self.logger.debug(f'Error searching for CD* values {sys.exc_info()[1]}') cd11 = None cd12 = None cd21 = None @@ -750,9 +713,7 @@ def _get_position_axis(self): elif (xindex is None) and (yindex is None): return None else: - raise ValueError('Found only one position axis ra/dec: {}/{} in ' - '{}'. - format(xindex, yindex, self.file)) + raise ValueError('Found only one position axis ra/dec: {}/{} in ' '{}'.format(xindex, yindex, self.file)) def _get_ref_coord(self, index): aug_crpix = _to_float(self._sanitize(self.wcs.crpix[index])) @@ -775,27 +736,30 @@ def _get_spatial_axis(self, xindex, yindex): if x_ref_coord and y_ref_coord: aug_ref_coord = Coord2D(x_ref_coord, y_ref_coord) - aug_cd11, aug_cd12, aug_cd21, aug_cd22 = \ - self._get_cd(xindex, yindex) - - if aug_dimension is not None and \ - aug_ref_coord is not None and \ - aug_cd11 is not None and \ - aug_cd12 is not None and \ - aug_cd21 is not None and \ - aug_cd22 is not None: - aug_function = CoordFunction2D(aug_dimension, aug_ref_coord, - aug_cd11, aug_cd12, - aug_cd21, aug_cd22) + aug_cd11, aug_cd12, aug_cd21, aug_cd22 = self._get_cd(xindex, yindex) + + if ( + aug_dimension is not None + and aug_ref_coord is not None + and aug_cd11 is not None + and aug_cd12 is not None + and aug_cd21 is not None + and aug_cd22 is not None + ): + aug_function = CoordFunction2D(aug_dimension, aug_ref_coord, aug_cd11, aug_cd12, aug_cd21, aug_cd22) self.logger.debug('End CoordFunction2D augmentation.') else: aug_function = None - aug_axis = CoordAxis2D(self._get_axis(xindex), - self._get_axis(yindex), - self._get_coord_error(xindex), - self._get_coord_error(yindex), - None, None, aug_function) + aug_axis = CoordAxis2D( + self._get_axis(xindex), + self._get_axis(yindex), + self._get_coord_error(xindex), + self._get_coord_error(yindex), + None, + None, + aug_function, + ) self.logger.debug('End CoordAxis2D augmentation.') return aug_axis @@ -854,8 +818,7 @@ def _finish_chunk_observable(self, chunk): cunit = self.header.get(f'CUNIT{chunk.observable_axis}') pix_bin = self.header.get(f'CRPIX{chunk.observable_axis}') if ctype is not None and cunit is not None and pix_bin is not None: - chunk.observable = ObservableAxis( - Slice(self._get_axis(0, ctype, cunit), pix_bin)) + chunk.observable = ObservableAxis(Slice(self._get_axis(0, ctype, cunit), pix_bin)) self.logger.debug('End _finish_chunk_observable') def _finish_chunk_position(self, chunk): @@ -876,18 +839,15 @@ def _finish_chunk_time(self, chunk): chunk.time.resolution = _to_float(self.header.get('TIMEDEL')) chunk.time.timesys = str(self.header.get('TIMESYS', 'UTC')) chunk.time.trefpos = self.header.get('TREFPOS', None) - chunk.time.mjdref = self.header.get('MJDREF', - self.header.get('MJDDATE')) + chunk.time.mjdref = self.header.get('MJDREF', self.header.get('MJDDATE')) self.logger.debug('End _finish_chunk_time') def _get_axis_length(self, for_axis): # try ZNAXIS first in order to get the size of the original # image in case it was FITS compressed - result = _to_int(self._sanitize( - self.header.get(f'ZNAXIS{for_axis}'))) + result = _to_int(self._sanitize(self.header.get(f'ZNAXIS{for_axis}'))) if result is None: - result = _to_int(self._sanitize( - self.header.get(f'NAXIS{for_axis}'))) + result = _to_int(self._sanitize(self.header.get(f'NAXIS{for_axis}'))) if result is None: msg = f'Could not find axis length for axis {for_axis}' raise ValueError(msg) diff --git a/caom2utils/caom2utils/wcs_util.py b/caom2utils/caom2utils/wcs_util.py index 42eff1a2..2cecf92e 100644 --- a/caom2utils/caom2utils/wcs_util.py +++ b/caom2utils/caom2utils/wcs_util.py @@ -146,22 +146,18 @@ def function1d_to_interval(temporal_wcs, function_1d): return shape.SubInterval(min(a, b), max(a, b)) except Exception as ex: - raise ValueError( - f"Invalid function in Temporal WCS: {repr(ex)}") + raise ValueError(f"Invalid function in Temporal WCS: {repr(ex)}") @staticmethod def validate_wcs(temporal_wcs): ctype = temporal_wcs.axis.axis.ctype sb = "" - if ctype == TARGET_CTYPE \ - and (temporal_wcs.timesys is None - or temporal_wcs.timesys == TARGET_TIMESYS): + if ctype == TARGET_CTYPE and (temporal_wcs.timesys is None or temporal_wcs.timesys == TARGET_TIMESYS): pass elif ctype == TARGET_TIMESYS and temporal_wcs.timesys is None: pass else: - sb = "unexpected TIMESYS, CTYPE: {},{}".format( - temporal_wcs.timesys, ctype) + sb = "unexpected TIMESYS, CTYPE: {},{}".format(temporal_wcs.timesys, ctype) cunit = temporal_wcs.axis.axis.cunit if TARGET_CUNIT != cunit: @@ -280,27 +276,23 @@ def _use_chunk(atype, ptype, ctype, matches): return False if ctype is not None and ctype != matches: - logger.debug( - f"use_chunk=False: Chunk.product_type={ctype}") + logger.debug(f"use_chunk=False: Chunk.product_type={ctype}") return False if ptype is not None and ptype != matches: - logger.debug( - f"use_chunk=False: Part.product_type={ptype}") + logger.debug(f"use_chunk=False: Part.product_type={ptype}") return False if atype == matches: - logger.debug( - f"use_chunk=True: Artifact.product_type={atype}") + logger.debug(f"use_chunk=True: Artifact.product_type={atype}") return True - logger.debug("use_chunk=False: product_type={},{},{}". - format(atype, ptype, ctype)) + logger.debug("use_chunk=False: product_type={},{},{}".format(atype, ptype, ctype)) return False @staticmethod def val2pix(wcs, func, val): - """ val2pix calculates pixel from value + """val2pix calculates pixel from value :param wcs A CustomWCS :param func A CoordFunction1D @@ -313,7 +305,7 @@ def val2pix(wcs, func, val): @staticmethod def function1d_to_interval(wcs, func): - """ function1d_to_interval calculates interval for CoordFunction1D + """function1d_to_interval calculates interval for CoordFunction1D :param wcs A CustomWCS :param r A CoordFunction1D @@ -321,9 +313,7 @@ def function1d_to_interval(wcs, func): """ CustomAxisUtil.validate_wcs(wcs) if func.delta == 0.0 and func.naxis > 1: - raise ValueError( - "Invalid CoordFunction1D: found {} pixels and delta = 0.0". - format(func.naxis)) + raise ValueError("Invalid CoordFunction1D: found {} pixels and delta = 0.0".format(func.naxis)) p1 = 0.5 p2 = float(func.naxis) + 0.5 @@ -334,7 +324,7 @@ def function1d_to_interval(wcs, func): @staticmethod def range1d_to_interval(wcs, r): - """ range1d_to_interval calculates interval for CoordRange1D + """range1d_to_interval calculates interval for CoordRange1D :param wcs A CustomWCS :param r A CoordRange1D @@ -348,7 +338,9 @@ def range1d_to_interval(wcs, r): if delta == 0.0 and np > 1.0: raise ValueError( "Invalid CoordRange1D: found {} + pixels and delta = 0.0 in \ - [{},{}]".format(np, a, b) + [{},{}]".format( + np, a, b + ) ) return shape.Interval(min(a, b), max(a, b)) @@ -387,23 +379,23 @@ def _get_ctype(artifacts, product_type): for p_key in a.parts: p = a.parts[p_key] for c in p.chunks: - if c.custom is not None and \ - CustomAxisUtil._use_chunk( - a.product_type, p.product_type, - c.product_type, product_type): + if c.custom is not None and CustomAxisUtil._use_chunk( + a.product_type, p.product_type, c.product_type, product_type + ): current_ctype = c.custom.axis.axis.ctype if first_ctype is None: if current_ctype in CustomAxisUtil.ctype_cunit_map: first_ctype = current_ctype else: - raise ValueError("Unsupported CTYPE: {}". - format(current_ctype)) + raise ValueError("Unsupported CTYPE: {}".format(current_ctype)) if current_ctype != first_ctype: raise ValueError( "CTYPE must be the same across all Artifacts. \ - Found: {} and {}".format(current_ctype, - first_ctype)) + Found: {} and {}".format( + current_ctype, first_ctype + ) + ) return first_ctype @@ -414,11 +406,11 @@ def compute(artifacts): if axis_ctype is not None: c = plane.CustomAxis(axis_ctype) if product_type is not None: - c.bounds = CustomAxisUtil.compute_bounds( - artifacts, product_type, axis_ctype) + c.bounds = CustomAxisUtil.compute_bounds(artifacts, product_type, axis_ctype) if c.dimension is None: c.dimension = CustomAxisUtil.compute_dimension_from_wcs( - c.bounds, artifacts, product_type, axis_ctype) + c.bounds, artifacts, product_type, axis_ctype + ) return c else: # No ctype found for chosen product type @@ -426,7 +418,7 @@ def compute(artifacts): @staticmethod def compute_bounds(artifacts, product_type, expected_ctype): - """ Compute bounds. + """Compute bounds. :param artifacts List of Artifacts :param product_type A Product_type @@ -439,46 +431,36 @@ def compute_bounds(artifacts, product_type, expected_ctype): for p_key in a.parts: p = a.parts[p_key] for c in p.chunks: - if c is not None and c.custom is not None and \ - CustomAxisUtil._use_chunk( - a.product_type, p.product_type, - c.product_type, product_type): + if ( + c is not None + and c.custom is not None + and CustomAxisUtil._use_chunk(a.product_type, p.product_type, c.product_type, product_type) + ): current_ctype = c.custom.axis.axis.ctype - if current_ctype is None or \ - current_ctype != expected_ctype: + if current_ctype is None or current_ctype != expected_ctype: raise ValueError( "CTYPE must be the same across all Artifacts. \ Found: {}. Expected: {}".format( - current_ctype, expected_ctype)) + current_ctype, expected_ctype + ) + ) else: range = c.custom.axis.range bounds = c.custom.axis.bounds function = c.custom.axis.function if range is not None: - s = CustomAxisUtil.range1d_to_interval( - c.custom, range) - logger.debug( - "[compute_bounds] range -> sub: {}". - format(s)) - CustomAxisUtil._merge_into_list( - s, subs, union_scale) + s = CustomAxisUtil.range1d_to_interval(c.custom, range) + logger.debug("[compute_bounds] range -> sub: {}".format(s)) + CustomAxisUtil._merge_into_list(s, subs, union_scale) elif bounds is not None: for cr in bounds.samples: - s = CustomAxisUtil.range1d_to_interval( - c.custom, cr) - logger.debug( - "[compute_bounds] bounds -> sub: {}". - format(s)) - CustomAxisUtil._merge_into_list( - s, subs, union_scale) + s = CustomAxisUtil.range1d_to_interval(c.custom, cr) + logger.debug("[compute_bounds] bounds -> sub: {}".format(s)) + CustomAxisUtil._merge_into_list(s, subs, union_scale) elif function is not None: - s = CustomAxisUtil.function1d_to_interval( - c.custom, function) - logger.debug( - "[compute_bounds] function -> sub: {}". - format(s)) - CustomAxisUtil._merge_into_list( - s, subs, union_scale) + s = CustomAxisUtil.function1d_to_interval(c.custom, function) + logger.debug("[compute_bounds] function -> sub: {}".format(s)) + CustomAxisUtil._merge_into_list(s, subs, union_scale) if len(subs) == 0: return None @@ -493,9 +475,8 @@ def compute_bounds(artifacts, product_type, expected_ctype): return shape.Interval(lb, ub, subs) @staticmethod - def compute_dimension_from_wcs(bounds, artifacts, - product_type, expected_ctype): - """ Compute dimensionality (number of pixels). + def compute_dimension_from_wcs(bounds, artifacts, product_type, expected_ctype): + """Compute dimensionality (number of pixels). :param bounds A sampled interval :param artifacts List of Artifacts @@ -515,17 +496,19 @@ def compute_dimension_from_wcs(bounds, artifacts, for p_key in a.parts: p = a.parts[p_key] for c in p.chunks: - if c is not None and c.custom is not None and \ - CustomAxisUtil._use_chunk( - a.product_type, p.product_type, - c.product_type, product_type): + if ( + c is not None + and c.custom is not None + and CustomAxisUtil._use_chunk(a.product_type, p.product_type, c.product_type, product_type) + ): current_ctype = c.custom.axis.axis.ctype - if current_ctype is None or \ - current_ctype != expected_ctype: + if current_ctype is None or current_ctype != expected_ctype: raise ValueError( "CTYPE must be the same across all Artifacts. \ Found: {}. Expected: {}".format( - current_ctype, expected_ctype)) + current_ctype, expected_ctype + ) + ) else: num += 1 ss = abs(c.custom.axis.function.delta) @@ -549,9 +532,8 @@ def compute_dimension_from_wcs(bounds, artifacts, return int(round(abs(x2 - x1))) @staticmethod - def compute_dimension_from_range_bounds( - artifacts, product_type, expected_ctype): - """ Compute dimensionality (number of pixels). + def compute_dimension_from_range_bounds(artifacts, product_type, expected_ctype): + """Compute dimensionality (number of pixels). :param artifacts List of Artifacts :param product_type A Product_type @@ -565,24 +547,21 @@ def compute_dimension_from_range_bounds( for p_key in a.parts: p = a.parts[p_key] for c in p.chunks: - if CustomAxisUtil._use_chunk( - a.product_type, p.product_type, - c.product_type, product_type): + if CustomAxisUtil._use_chunk(a.product_type, p.product_type, c.product_type, product_type): current_ctype = c.custom.axis.axis.ctype - if current_ctype is None or \ - current_ctype != expected_ctype: + if current_ctype is None or current_ctype != expected_ctype: raise ValueError( "CTYPE must be the same across all Artifacts. \ Found: {}. Expected: {}".format( - current_ctype, expected_ctype)) + current_ctype, expected_ctype + ) + ) else: - n = CustomAxisUtil._get_num_pixels( - c.custom.axis, False) + n = CustomAxisUtil._get_num_pixels(c.custom.axis, False) num_pixels += n if num_pixels > 0.0: - logger.debug("compute_dimension_from_range_bounds: {}".format( - num_pixels)) + logger.debug("compute_dimension_from_range_bounds: {}".format(num_pixels)) return int(num_pixels) logger.debug("compute_dimension_from_range_bounds: None") @@ -598,22 +577,22 @@ def validate_wcs(custom_wcs): map_cunit = CustomAxisUtil.ctype_cunit_map[ctype] if map_cunit is None: - raise ValueError( - f"Invalid CTYPE: {ctype}") + raise ValueError(f"Invalid CTYPE: {ctype}") if map_cunit != cunit: raise ValueError( "Invalid CUNIT for CTYPE: {}. Expected: {}. Found {} \ (normalized, raw: {})".format( - ctype, map_cunit, cunit, raw_cunit)) + ctype, map_cunit, cunit, raw_cunit + ) + ) @staticmethod def _normalize_unit(raw_cunit): normalized_unit = raw_cunit if "^" in raw_cunit: normalized_unit = raw_cunit.replace("^", "**") - logger.debug("normalized unit: {} to {}".format( - raw_cunit, normalized_unit)) + logger.debug("normalized unit: {} to {}".format(raw_cunit, normalized_unit)) return normalized_unit @@ -649,7 +628,7 @@ def _get_range(self, from_range): if from_range is not None: lb = int(round(from_range.start.val)) ub = int(round(from_range.end.val)) - return range(lb, ub+1) + return range(lb, ub + 1) return None @staticmethod @@ -691,6 +670,5 @@ def get_range_from_function(function): if function.naxis >= 1: return range(1, function.naxis + 1) else: - raise ValueError( - f'Invalid naxis value: {function.naxis}') + raise ValueError(f'Invalid naxis value: {function.naxis}') return None diff --git a/caom2utils/caom2utils/wcsvalidator.py b/caom2utils/caom2utils/wcsvalidator.py index ab645f2c..9bc23d9f 100644 --- a/caom2utils/caom2utils/wcsvalidator.py +++ b/caom2utils/caom2utils/wcsvalidator.py @@ -70,8 +70,7 @@ from caom2utils.wcs_util import TimeUtil, EnergyUtil, ORIGIN from . import wcs_util from .wcs_util import PolarizationWcsUtil, CustomAxisUtil -from caom2 import Artifact, Chunk, Observation, Part, Plane, \ - PolarizationState +from caom2 import Artifact, Chunk, Observation, Part, Plane, PolarizationState import numpy as np import logging @@ -158,7 +157,9 @@ def _validate_chunk(chunk): error_string = "CustomWCS or axis definition null." raise InvalidWCSError( "Invalid CustomWCS: {} Axis: {}, WCS: {}".format( - error_string, str(chunk.custom_axis), str(chunk.custom))) + error_string, str(chunk.custom_axis), str(chunk.custom) + ) + ) def _validate_spatial_wcs(position): @@ -169,16 +170,13 @@ def _validate_spatial_wcs(position): # There's not much that can be validated about range & bounds if position.axis.function is not None: fn2D = position.axis.function - _check_transform(float(fn2D.dimension.naxis1 / 2), - float(fn2D.dimension.naxis2 / 2)) + _check_transform(float(fn2D.dimension.naxis1 / 2), float(fn2D.dimension.naxis2 / 2)) logger.debug('position_axis.function succeeded.') except Exception as e: error_string = repr(e) if len(error_string) > 0: - raise InvalidWCSError( - "Invalid SpatialWCS: {}: {}".format( - error_string, str(position))) + raise InvalidWCSError("Invalid SpatialWCS: {}: {}".format(error_string, str(position))) def _check_transform(lower, upper): @@ -222,9 +220,7 @@ def _validate_spectral_wcs(energy): error_msg = repr(ex) if len(error_msg) > 0: - raise InvalidWCSError( - "Invalid Spectral WCS: {}: {}".format( - error_msg, str(energy))) + raise InvalidWCSError("Invalid Spectral WCS: {}: {}".format(error_msg, str(energy))) def _validate_temporal_wcs(time): @@ -253,9 +249,7 @@ def _validate_temporal_wcs(time): error_msg = repr(e) if len(error_msg) > 0: - raise InvalidWCSError( - "Invalid Temporal WCS: {}: {}".format( - error_msg, str(time))) + raise InvalidWCSError("Invalid Temporal WCS: {}: {}".format(error_msg, str(time))) def _validate_range(a_range): @@ -274,12 +268,10 @@ def _validate_bounds(bounds): def _validate_function(a_function): - naxis_range = \ - PolarizationWcsUtil.get_range_from_function(a_function) + naxis_range = PolarizationWcsUtil.get_range_from_function(a_function) if naxis_range is not None: for pix in naxis_range: - WcsPolarizationState.to_value( - int(round(wcs_util.pix2val(a_function, pix)))) + WcsPolarizationState.to_value(int(round(wcs_util.pix2val(a_function, pix)))) def _validate_polarization_wcs(polarization_wcs): @@ -300,8 +292,7 @@ def _validate_polarization_wcs(polarization_wcs): _validate_function(axis.function) logger.debug('polarization_axis.function succeeded.') except Exception as e: - raise InvalidWCSError( - f"Invalid Polarization WCS: {str(e)}") + raise InvalidWCSError(f"Invalid Polarization WCS: {str(e)}") def _validate_axes(chunk): @@ -318,19 +309,16 @@ def _validate_axes(chunk): value = attr_dict.get(key) if value is not None and value <= chunk.naxis: # Ignore axes greater than naxis: situation is allowed - if axis_list[value] is not None and \ - len(axis_list[value].strip()) > 0: + if axis_list[value] is not None and len(axis_list[value].strip()) > 0: # Flag duplicate axis definitions - error_msg += "Duplicate axis number: {}: {}, {}"\ - .format(value, key, axis_list[value]) + error_msg += "Duplicate axis number: {}: {}, {}".format(value, key, axis_list[value]) else: axis_list[value] = key # Validate the number and quality of the axis definitions # Count from 1, as 0 will never be filled if axis_list[0] != "": - error_msg += "\tInvalid axis definition (0): {}.".\ - format(axis_list[0]) + error_msg += "\tInvalid axis definition (0): {}.".format(axis_list[0]) x = 0 for i in range(1, chunk.naxis + 1): @@ -342,8 +330,7 @@ def _validate_axes(chunk): if error_msg.strip(): # Report all errors found during validation, throw an error and go - raise InvalidWCSError( - f"Invalid Axes: {error_msg}") + raise InvalidWCSError(f"Invalid Axes: {error_msg}") def _validate_custom_wcs(custom): @@ -369,32 +356,41 @@ def _validate_custom_wcs(custom): # CoordFunction1D if custom_axis.function is not None: logger.debug('custom_axis.function to interval validation.') - CustomAxisUtil.function1d_to_interval( - custom, custom_axis.function) + CustomAxisUtil.function1d_to_interval(custom, custom_axis.function) logger.debug('custom_axis.function to interval succeeded.') except Exception as e: error_msg = repr(e) if len(error_msg) > 0: - raise InvalidWCSError( - f"CUSTOM_WCS_VALIDATION_ERROR: {error_msg}") + raise InvalidWCSError(f"CUSTOM_WCS_VALIDATION_ERROR: {error_msg}") -class WcsPolarizationState(): +class WcsPolarizationState: """ A dictionary which maps an integer to a PolarizationState value. """ + MAP = { - 1: PolarizationState.I, 2: PolarizationState.Q, - 3: PolarizationState.U, 4: PolarizationState.V, - 5: PolarizationState.POLI, 6: PolarizationState.FPOLI, - 7: PolarizationState.POLA, 8: PolarizationState.EPOLI, - 9: PolarizationState.CPOLI, 10: PolarizationState.NPOLI, - -1: PolarizationState.RR, -2: PolarizationState.LL, - -3: PolarizationState.RL, -4: PolarizationState.LR, - -5: PolarizationState.XX, -6: PolarizationState.YY, - -7: PolarizationState.XY, -8: PolarizationState.YX} + 1: PolarizationState.I, + 2: PolarizationState.Q, + 3: PolarizationState.U, + 4: PolarizationState.V, + 5: PolarizationState.POLI, + 6: PolarizationState.FPOLI, + 7: PolarizationState.POLA, + 8: PolarizationState.EPOLI, + 9: PolarizationState.CPOLI, + 10: PolarizationState.NPOLI, + -1: PolarizationState.RR, + -2: PolarizationState.LL, + -3: PolarizationState.RL, + -4: PolarizationState.LR, + -5: PolarizationState.XX, + -6: PolarizationState.YY, + -7: PolarizationState.XY, + -8: PolarizationState.YX, + } @staticmethod def to_value(key): From baa71e877ee76740c68abb29c0fa10fd1d6044fb Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Wed, 17 Jan 2024 17:11:51 -0800 Subject: [PATCH 21/36] CADC-12017 - set the cardinality of parser:wcs_parser as 1:n. --- caom2utils/caom2utils/caom2blueprint.py | 18 +-- caom2utils/caom2utils/parsers.py | 123 ++++++++++-------- .../caom2utils/tests/test_fits2caom2.py | 6 +- 3 files changed, 84 insertions(+), 63 deletions(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index feb6d62b..19508153 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -67,20 +67,20 @@ # """ -There is a dual inheritance hierarchy in this module: +There is a dual inheritance hierarchy in this package: BlueprintParser ^ | - ContentParser <>--------------------------- WcsParser - ^ ^ - | | - --------------------- ------------------------ - | | | | - | Hdf5Parser <>----- Hdf5WcsParser | - | | - FitsParser <>--------------------------------------------- FitsWcsParser + ContentParser <1:n>--------------------------- WcsParser + ^ ^ + | | + --------------------- ------------------------ + | | | | + | Hdf5Parser <1:n>----- Hdf5WcsParser | + | | + FitsParser <1:n>--------------------------------------------- FitsWcsParser The *WcsParser hierarchy uses astropy.wcs for WCS construction and correctness when building CAOM records. diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index dde9b9bb..16759cc6 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -206,10 +206,10 @@ def augment_plane(self, plane, artifact_uri): self._to_release_type(self._get_from_list('Artifact.releaseType', index=0)), ) plane.artifacts[artifact_uri] = artifact - self.augment_artifact(artifact, 0) + self.augment_artifact(artifact) self.logger.debug(f'End CAOM2 plane augmentation for {artifact_uri}.') - def augment_artifact(self, artifact, index): + def augment_artifact(self, artifact): """ Augments a given CAOM2 artifact with available information :param artifact: existing CAOM2 artifact to be augmented @@ -459,65 +459,71 @@ def _get_datetime(self, from_value): class ContentParser(BlueprintParser): def __init__(self, obs_blueprint=None, uri=None): super().__init__(obs_blueprint, uri) - self._wcs_parser = WcsParser(obs_blueprint, extension=0) + self._wcs_parsers = {} + self._wcs_parsers[0] = WcsParser(obs_blueprint, extension=0) def _get_chunk_naxis(self, chunk, index): chunk.naxis = self._get_from_list('Chunk.naxis', index, self.blueprint.get_configed_axes_count()) - def augment_artifact(self, artifact, index): + def _get_num_parts(self): + """return the number of Parts to create for a CAOM record + """ + return len(self._blueprint._extensions) + 1 + + def augment_artifact(self, artifact): """ Augments a given CAOM2 artifact with available content information :param artifact: existing CAOM2 artifact to be augmented :param index: int Part name """ - super().augment_artifact(artifact, index) + super().augment_artifact(artifact) - self.logger.debug(f'Begin content artifact augmentation for {artifact.uri}') + self.logger.error(f'Begin content artifact augmentation for {artifact.uri}') if self.blueprint.get_configed_axes_count() == 0: raise TypeError(f'No WCS Data. End content artifact augmentation for ' f'{artifact.uri}.') - if self.add_parts(artifact, index): - part = artifact.parts[str(index)] - part.product_type = self._get_from_list('Part.productType', index) - part.meta_producer = self._get_from_list('Part.metaProducer', index=0, current=part.meta_producer) + for index in range(0, self._get_num_parts()): + if self.add_parts(artifact, index): + part = artifact.parts[str(index)] + part.product_type = self._get_from_list('Part.productType', index) + part.meta_producer = self._get_from_list('Part.metaProducer', index=0, current=part.meta_producer) - # each Part has one Chunk, if it's not an empty part as determined - # just previously - if not part.chunks: - part.chunks.append(caom2.Chunk()) - chunk = part.chunks[0] - chunk.meta_producer = self._get_from_list('Chunk.metaProducer', index=0, current=chunk.meta_producer) + # each Part has one Chunk, if it's not an empty part as determined just previously + if not part.chunks: + part.chunks.append(caom2.Chunk()) + chunk = part.chunks[0] + chunk.meta_producer = self._get_from_list('Chunk.metaProducer', index=0, current=chunk.meta_producer) - self._get_chunk_naxis(chunk, index) + self._get_chunk_naxis(chunk, index) - # order by which the blueprint is used to set WCS information: - # 1 - try to construct the information for an axis from WCS information - # 2 - if the WCS information is insufficient, try to construct the information from the blueprint - # 3 - Always try to fill the range metadata from the blueprint. - if self.blueprint._pos_axes_configed: - self._wcs_parser.augment_position(chunk) - self._try_position_with_blueprint(chunk, index) + # order by which the blueprint is used to set WCS information: + # 1 - try to construct the information for an axis from WCS information + # 2 - if the WCS information is insufficient, try to construct the information from the blueprint + # 3 - Always try to fill the range metadata from the blueprint. + if self.blueprint._pos_axes_configed: + self._wcs_parsers[index].augment_position(chunk) + self._try_position_with_blueprint(chunk, index) - if self.blueprint._energy_axis_configed: - self._wcs_parser.augment_energy(chunk) - self._try_energy_with_blueprint(chunk, index) + if self.blueprint._energy_axis_configed: + self._wcs_parsers[index].augment_energy(chunk) + self._try_energy_with_blueprint(chunk, index) - if self.blueprint._time_axis_configed: - self._wcs_parser.augment_temporal(chunk) - self._try_time_with_blueprint(chunk, index) + if self.blueprint._time_axis_configed: + self._wcs_parsers[index].augment_temporal(chunk) + self._try_time_with_blueprint(chunk, index) - if self.blueprint._polarization_axis_configed: - self._wcs_parser.augment_polarization(chunk) - self._try_polarization_with_blueprint(chunk, index) + if self.blueprint._polarization_axis_configed: + self._wcs_parsers[index].augment_polarization(chunk) + self._try_polarization_with_blueprint(chunk, index) - if self.blueprint._obs_axis_configed: - self._wcs_parser.augment_observable(chunk) - self._try_observable_with_blueprint(chunk, index) + if self.blueprint._obs_axis_configed: + self._wcs_parsers[index].augment_observable(chunk) + self._try_observable_with_blueprint(chunk, index) - if self.blueprint._custom_axis_configed: - self._wcs_parser.augment_custom(chunk) - self._try_custom_with_blueprint(chunk, index) + if self.blueprint._custom_axis_configed: + self._wcs_parsers[index].augment_custom(chunk) + self._try_custom_with_blueprint(chunk, index) self.logger.debug(f'End content artifact augmentation for {artifact.uri}.') @@ -1530,6 +1536,7 @@ def __init__(self, src, obs_blueprint=None, uri=None): :param uri: which artifact augmentation is based on """ self.logger = logging.getLogger(__name__) + self._wcs_parsers = {} self._headers = [] self.parts = 0 self.file = '' @@ -1549,6 +1556,11 @@ def __init__(self, src, obs_blueprint=None, uri=None): self.uri = uri self.apply_blueprint() + def _get_num_parts(self): + """return the number of Parts to create for a CAOM record + """ + return len(self._headers) + @property def headers(self): """ @@ -1560,6 +1572,7 @@ def headers(self): def add_parts(self, artifact, index): # there is one Part per extension, the name is the extension number + # logging.error(f'index {index} has data aray {FitsParser._has_data_array(self._headers[index])} has chunk {self.blueprint.has_chunk(index)}') if FitsParser._has_data_array(self._headers[index]) and self.blueprint.has_chunk(index): if str(index) not in artifact.parts.keys(): # TODO use extension name? @@ -1690,12 +1703,12 @@ def apply_blueprint(self): return - def augment_artifact(self, artifact, index=0): + def augment_artifact(self, artifact): """ Augments a given CAOM2 artifact with available FITS information :param artifact: existing CAOM2 artifact to be augmented """ - self.logger.debug('Begin artifact augmentation for {} with {} HDUs.'.format(artifact.uri, len(self.headers))) + self.logger.error(f'Begin artifact augmentation for {artifact.uri} with {len(self.headers)} HDUs.') if self.blueprint.get_configed_axes_count() == 0: raise TypeError('No WCS Data. End artifact augmentation for {}.'.format(artifact.uri)) @@ -1703,10 +1716,10 @@ def augment_artifact(self, artifact, index=0): for i, header in enumerate(self.headers): if not self.add_parts(artifact, i): # artifact-level attributes still require updating - BlueprintParser.augment_artifact(self, artifact, 0) + BlueprintParser.augment_artifact(self, artifact) continue - self._wcs_parser = FitsWcsParser(header, self.file, str(i)) - super().augment_artifact(artifact, i) + self._wcs_parsers[i] = FitsWcsParser(header, self.file, str(i)) + super().augment_artifact(artifact) self.logger.debug(f'End artifact augmentation for {artifact.uri}.') @@ -1920,7 +1933,16 @@ def __init__(self, obs_blueprint, uri, h5_file, find_roots_here='sitedata'): super().__init__(obs_blueprint, uri) # used to set the astropy wcs info, resulting in a validated wcs # that can be used to construct a valid CAOM2 record - self._wcs_parser = None + self._wcs_parsers = {} + + def _get_num_parts(self): + """return the number of Parts to create for a CAOM record + """ + result = len(self._blueprint._extensions) + if result == 0: + # for HDF5 files, cutouts should be supported in the future, so the minimum is one Part/Chunk construction + result = 1 + return result def apply_blueprint_from_file(self): """ @@ -2108,12 +2130,11 @@ def apply_blueprint(self): self.logger.debug('Done apply_blueprint') return - def augment_artifact(self, artifact, index=0): - self._wcs_parser = Hdf5WcsParser(self._blueprint, 0) - super().augment_artifact(artifact, 0) - for ii in range(1, len(self._blueprint._extensions)): - self._wcs_parser = Hdf5WcsParser(self._blueprint, ii) - super().augment_artifact(artifact, ii) + def augment_artifact(self, artifact): + for ii in range(0, self._get_num_parts()): + # one WCS parser per Part/Chunk + self._wcs_parsers[ii] = Hdf5WcsParser(self._blueprint, ii) + super().augment_artifact(artifact) def _get_chunk_naxis(self, chunk, index): chunk.naxis = self._get_from_list('Chunk.naxis', index, chunk.naxis) diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index 42390ccd..0995fff8 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -190,9 +190,9 @@ def test_hdf5_wcs_parser_set_wcs(): assert test_subject is not None, 'expect a result' test_subject.augment_artifact(test_artifact) if bp == test_position_bp: - assert test_subject._wcs_parser._wcs.naxis == 2, 'wrong pos axis' + assert test_subject._wcs_parsers[0]._wcs.naxis == 2, 'wrong pos axis' else: - assert test_subject._wcs_parser._wcs.naxis == 1, 'wrong axis count' + assert test_subject._wcs_parsers[0]._wcs.naxis == 1, 'wrong axis count' def test_augment_failure(): @@ -738,7 +738,7 @@ def test_augment_value_errors(): test_parser.augment_plane(test_obs, 'cadc:TEST/abc.fits.gz') with pytest.raises(ValueError): - test_parser.augment_artifact(test_obs, 0) + test_parser.augment_artifact(test_obs) def test_get_from_list(): From 2be7de6f5a788c55a29e68f32f4689a67da26e44 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Mon, 22 Jan 2024 12:54:21 -0800 Subject: [PATCH 22/36] CADC-13017 - interim commit, add handling for extensions in apply_blueprint. --- caom2utils/caom2utils/parsers.py | 69 ++++++++++++------- .../caom2utils/tests/test_collections.py | 5 +- caom2utils/caom2utils/wcs_parsers.py | 6 +- 3 files changed, 50 insertions(+), 30 deletions(-) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index 16759cc6..072c32bf 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -244,7 +244,7 @@ def augment_artifact(self, artifact): def _get_from_list(self, lookup, index, current=None): value = None try: - keywords = self.blueprint._get(lookup) + keywords = self.blueprint._get(lookup, index) except KeyError: self.add_error(lookup, sys.exc_info()[1]) self.logger.debug(f'Could not find {lookup} in configuration.') @@ -354,17 +354,18 @@ def _execute_external(self, value, key, extension): tb = traceback.format_exc() self.logger.debug(tb) self.logger.error(e) - try: - result = execute(parameter) - self.logger.debug(f'Key {key} calculated value of {result} using {value} type {type(result)}') - except Exception as e: - msg = 'Failed to execute {} for {} in {}'.format(execute.__name__, key, self.uri) - self.logger.error(msg) - self.logger.debug('Input parameter was {}, value was {}'.format(parameter, value)) - self._errors.append(msg) - tb = traceback.format_exc() - self.logger.debug(tb) - self.logger.error(e) + if execute: + try: + result = execute(parameter) + self.logger.debug(f'Key {key} calculated value of {result} using {value} type {type(result)}') + except Exception as e: + msg = f'Failed to execute {execute.__name__} for {key} in {self.uri}' + self.logger.error(msg) + self.logger.debug(f'Input parameter was {parameter}, value was {value}') + self._errors.append(msg) + tb = traceback.format_exc() + self.logger.debug(tb) + self.logger.error(e) return result def _execute_external_instance(self, value, key, extension): @@ -835,6 +836,7 @@ def _get_axis_wcs(self, label, wcs, index): aug_naxis_index = None if aug_axis is not None: if aug_range is None: + self.logger.debug(f'Try Function construction since Range construction failed for {label}.') if wcs is None or wcs.axis is None or wcs.axis.function is None: aug_ref_coord = self._two_param_constructor( f'Chunk.{label}.axis.function.refCoord.pix', @@ -1586,6 +1588,7 @@ def add_parts(self, artifact, index): return result def apply_blueprint(self): + self.logger.debug(f'Begin apply_blueprint {self.uri}') # pointers that are short to type exts = self.blueprint._extensions wcs_std = self.blueprint._wcs_std @@ -1641,12 +1644,25 @@ def apply_blueprint(self): continue hdr = self.headers[extension] for key, value in exts[extension].items(): - if ObsBlueprint.is_table(value): + if ObsBlueprint.needs_lookup(value): + # alternative attributes provided for standard wcs attrib. + for v in value[0]: + if v in hdr and v not in wcs_std[key].split(','): + keywords = wcs_std[key].split(',') + for keyword in keywords: + _set_by_type(hdr, keyword, str(hdr[v])) + elif ObsBlueprint.is_table(value): + continue + elif ObsBlueprint.has_no_value(value): continue - keywords = wcs_std[key].split(',') - for keyword in keywords: - _set_by_type(hdr, keyword, value) - logging.debug('{}: set to {} in extension {}'.format(keyword, value, extension)) + else: + if key in wcs_std.keys(): + keywords = wcs_std[key].split(',') + for keyword in keywords: + _set_by_type(hdr, keyword, value) + logging.debug(f'{keyword}: set to {value} in extension {extension}') + else: + exts[extension][key] = value # apply defaults to all extensions for key, value in plan.items(): if ObsBlueprint.has_default_value(value): @@ -1739,7 +1755,7 @@ def _get_chunk_naxis(self, chunk, index=None): def _get_from_list(self, lookup, index, current=None): value = None try: - keys = self.blueprint._get(lookup) + keys = self.blueprint._get(lookup, index) except KeyError: self.add_error(lookup, sys.exc_info()[1]) self.logger.debug(f'Could not find {lookup!r} in caom2blueprint configuration.') @@ -2152,15 +2168,16 @@ def _set_by_type(header, keyword, value): float_value = None int_value = None - try: - float_value = float(value) - except ValueError: - pass + if value is not None: + try: + float_value = float(value) + except ValueError: + pass - try: - int_value = int(value) - except ValueError: - pass + try: + int_value = int(value) + except ValueError: + pass if float_value and not str(value).isdecimal() or re.match(r'0\.0*', str(value)): header.set(keyword, float_value) diff --git a/caom2utils/caom2utils/tests/test_collections.py b/caom2utils/caom2utils/tests/test_collections.py index 47f0f078..f0866f77 100644 --- a/caom2utils/caom2utils/tests/test_collections.py +++ b/caom2utils/caom2utils/tests/test_collections.py @@ -248,7 +248,10 @@ def _get_cardinality(directory): elif 'brite' in directory: return '--lineage HD36486_65-Ori-VIII-2021_BAb_1_5_A/cadc:BRITE-Constellation/HD36486.orig' elif 'gemini' in directory: - return '--lineage GN-2003A-Q-51-2-004/cadc:GEMINI/N20030325S0098.fits' + if 'S20230518S0121' in directory: + return '--lineage GS-2023A-SV-101-13-009/cadc:GEMINI/S20230518S0121.fits' + else: + return '--lineage GN-2003A-Q-51-2-004/cadc:GEMINI/N20030325S0098.fits' elif 'lotss' in directory: return '--lineage P124+62_mosaic/astron:LOTSS/P124+62/mosaic.fits' else: diff --git a/caom2utils/caom2utils/wcs_parsers.py b/caom2utils/caom2utils/wcs_parsers.py index f095e6a5..852e901f 100644 --- a/caom2utils/caom2utils/wcs_parsers.py +++ b/caom2utils/caom2utils/wcs_parsers.py @@ -677,10 +677,10 @@ def _get_cd(self, x_index, y_index): return cd11, cd12, cd21, cd22 - def _get_coord_error(self, index): + def _get_coord_error(self, wcs_index): aug_coord_error = None - aug_csyer = self._sanitize(self.wcs.csyer[index]) - aug_crder = self._sanitize(self.wcs.crder[index]) + aug_csyer = self._sanitize(self.wcs.csyer[wcs_index]) + aug_crder = self._sanitize(self.wcs.crder[wcs_index]) if aug_csyer is not None and aug_crder is not None: aug_coord_error = CoordError(aug_csyer, aug_crder) return aug_coord_error From 6f03d7720cc4d9c20e195ea1f1afcb8972191890 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Mon, 22 Jan 2024 13:47:54 -0800 Subject: [PATCH 23/36] CADC-13017 - update __init__.py --- caom2utils/caom2utils/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/caom2utils/caom2utils/__init__.py b/caom2utils/caom2utils/__init__.py index ff0143ff..ea806bab 100755 --- a/caom2utils/caom2utils/__init__.py +++ b/caom2utils/caom2utils/__init__.py @@ -4,9 +4,12 @@ TODO """ +from .blueprints import * # noqa from .data_util import * # noqa from .caom2blueprint import * # noqa from .legacy import * # noqa +from .parsers import * # noqa +from .wcs_parsers import * # noqa from .wcs_util import * # noqa from .wcsvalidator import * # noqa from .caomvalidator import * # noqa From 2f1a9449fe7d95667eb71b6383fa6050085c7a88 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Thu, 25 Jan 2024 15:12:35 -0800 Subject: [PATCH 24/36] CADC-13017 - test cases cover DerivedObservation.members, differening values in blueprints. --- caom2utils/caom2utils/blueprints.py | 31 +- caom2utils/caom2utils/parsers.py | 54 +- .../caom2utils/tests/data/edge_case.blueprint | 1 + .../S20230518S0121/S20230518S0121.blueprint | 124 ++ .../S20230518S0121.expected.xml | 342 ++++ .../S20230518S0121/S20230518S0121.fits.header | 1741 +++++++++++++++++ .../S20230518S0121/S20230518S0121.module | 43 + .../gemini/S20230518S0121/S20230518S0121.py | 65 + .../wrgnN20140428S0085_arc.blueprint} | 3 +- .../wrgnN20140428S0085_arc.expected.xml | 40 + .../wrgnN20140428S0085_arc.fits | 1 + .../wrgnN20140428S0085_arc.py} | 0 .../Cdemo_ext2_SCIRED.fits.header | 181 -- .../tests/data/omm/Cdemo_ext2_SCIRED/omm.py | 0 .../tests/data/omm/Cdemo_ext2_SCIRED/y.xml | 57 - .../caom2utils/tests/test_collections.py | 30 +- .../caom2utils/tests/test_fits2caom2.py | 8 + 17 files changed, 2415 insertions(+), 306 deletions(-) create mode 100644 caom2utils/caom2utils/tests/data/edge_case.blueprint create mode 100644 caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.blueprint create mode 100644 caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.expected.xml create mode 100644 caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.fits.header create mode 100644 caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.module create mode 100644 caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.py rename caom2utils/caom2utils/tests/data/{omm/Cdemo_ext2_SCIRED/omm.blueprint => gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.blueprint} (63%) create mode 100644 caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.expected.xml create mode 100644 caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.fits rename caom2utils/caom2utils/tests/data/{omm/Cdemo_ext2_SCIRED/omm.module => gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.py} (100%) delete mode 100644 caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/Cdemo_ext2_SCIRED.fits.header delete mode 100644 caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/omm.py delete mode 100644 caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/y.xml diff --git a/caom2utils/caom2utils/blueprints.py b/caom2utils/caom2utils/blueprints.py index 952e551c..930bdc28 100644 --- a/caom2utils/caom2utils/blueprints.py +++ b/caom2utils/caom2utils/blueprints.py @@ -841,26 +841,27 @@ def load_from_file(self, file_name): :param file_name: The fully-qualified pathname for the blueprint file on disk. """ + ext = 0 with open(file_name) as file: for line in file: + if '#' in line: + if line.find('#') == 0: + # ignore lines starting with a comment + continue + line = line.split('#')[0] if '=' in line: - if '#' in line: - if line.find('#') == 0: - # ignore lines starting with a comment - continue - line = line.split('#')[0] key, value = line.split('=', 1) if 'default' in value: - temp = value.replace('default', '').replace('=', '').strip('\n').strip() - default = temp.rsplit(',')[1] - temp_list = temp.rsplit(',')[0].replace('[', '').replace(']', '').replace('\'', '').split(',') + temp = value.split(', default') + default = temp[1].replace('=', '').strip() + temp_list = [ii.replace('[', '').replace(']', '').replace('\'', '').strip() for ii in temp[0].split(',')] if 'None' in default: default = None - else: - default = default.strip() cleaned_up_value = (temp_list, default) else: - if '[' in value: + if value.strip() and value.strip()[0] == '(': + cleaned_up_value = tuple(ii.strip() for ii in value.strip().replace('(', '').replace(')', '').replace('\'', '').split(',')) + elif '[' in value: temp_list = value.replace('[', '').replace(']', '').replace('\'', '').split(',') temp_list_2 = [] for ii in temp_list: @@ -870,7 +871,13 @@ def load_from_file(self, file_name): cleaned_up_value = value.strip('\n').strip() if cleaned_up_value == 'None': cleaned_up_value = None - self.set(key.strip(), cleaned_up_value) + self.set(key.strip(), cleaned_up_value, ext) + elif 'extension' in line: + # pattern is 'extension #:' + new_ext = _to_int(line.strip('extension').strip('\n').strip(':')) + if isinstance(new_ext, int): + self.logger.info(f'Add extension {new_ext} to blueprint.') + ext = new_ext self._guess_axis_info() @classproperty diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index 072c32bf..397e2d90 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -354,18 +354,18 @@ def _execute_external(self, value, key, extension): tb = traceback.format_exc() self.logger.debug(tb) self.logger.error(e) - if execute: - try: - result = execute(parameter) - self.logger.debug(f'Key {key} calculated value of {result} using {value} type {type(result)}') - except Exception as e: - msg = f'Failed to execute {execute.__name__} for {key} in {self.uri}' - self.logger.error(msg) - self.logger.debug(f'Input parameter was {parameter}, value was {value}') - self._errors.append(msg) - tb = traceback.format_exc() - self.logger.debug(tb) - self.logger.error(e) + return result + try: + result = execute(parameter) + self.logger.debug(f'Key {key} calculated value of {result} using {value} type {type(result)}') + except Exception as e: + msg = f'Failed to execute {execute.__name__} for {key} in {self.uri}' + self.logger.error(msg) + self.logger.debug(f'Input parameter was {parameter}, value was {value}') + self._errors.append(msg) + tb = traceback.format_exc() + self.logger.debug(tb) + self.logger.error(e) return result def _execute_external_instance(self, value, key, extension): @@ -397,13 +397,12 @@ def _execute_external_instance(self, value, key, extension): return result try: result = execute(extension) - self.logger.debug('Key {} calculated value of {} using {}'.format(key, result, value)) + self.logger.debug(f'Key {key} calculated value of {result} using {value}') except ValueError as e2: # DB 23-03-22 - # Anything that you can do to make the CAOM2 record creation fail - # in this case of bad WCS metadata would be useful. Use - # ValueError because that happens to be what astropy is throwing - # for a SkyCoord construction failure. + # Anything that you can do to make the CAOM2 record creation fail in this case of bad WCS metadata + # would be useful. Use ValueError because that happens to be what astropy is throwing for a SkyCoord + # construction failure. raise Caom2Exception(e2) except Exception as e: msg = 'Failed to execute {} for {} in {}'.format(execute, key, self.uri) @@ -568,7 +567,7 @@ def augment_observation(self, observation, artifact_uri, product_id=None): observation.target_position = self._get_target_position(observation.target_position) observation.telescope = self._get_telescope(observation.telescope) observation.environment = self._get_environment(observation.environment) - self.logger.debug(f'End content observation augmentation for {artifact_uri}.') + self.logger.debug('End content observation augmentation.') def augment_plane(self, plane, artifact_uri): """ @@ -722,8 +721,9 @@ def _get_members(self, obs): raise TypeError('Cannot apply blueprint for DerivedObservation to a ' 'simple observation') elif isinstance(obs, caom2.DerivedObservation): lookup = self.blueprint._get('DerivedObservation.members', extension=1) - if ObsBlueprint.is_table(lookup) and len(self.headers) > 1: - member_list = self._get_from_table('DerivedObservation.members', 1) + if ObsBlueprint.is_table(lookup): + *_, extension = lookup + member_list = self._get_from_table('DerivedObservation.members', int(extension)) # ensure the members are good little ObservationURIs if member_list.startswith('caom:'): members = member_list @@ -741,8 +741,9 @@ def _get_members(self, obs): members = self._get_from_list('DerivedObservation.members', index=0, current=obs.members) elif isinstance(obs, caom2.CompositeObservation): lookup = self.blueprint._get('CompositeObservation.members', extension=1) - if ObsBlueprint.is_table(lookup) and len(self.headers) > 1: - member_list = self._get_from_table('CompositeObservation.members', 1) + if ObsBlueprint.is_table(lookup): + *_, extension = lookup + member_list = self._get_from_table('CompositeObservation.members', int(extension)) # ensure the members are good little ObservationURIs if member_list.startswith('caom:'): members = member_list @@ -1716,7 +1717,6 @@ def apply_blueprint(self): for i in range(1, 6): if (f'CTYPE{i}' in header) and ('-SIP' not in header[f'CTYPE{i}']) and (f'DP{i}' not in header): header[f'DP{i}'] = 'NAXES: 1' - return def augment_artifact(self, artifact): @@ -1833,12 +1833,10 @@ def _get_from_table(self, lookup, extension): with fits.open(self.file) as fits_data: if fits_data[extension].header['XTENSION'] != 'BINTABLE': raise ValueError( - 'Got {} when looking for a BINTABLE ' - 'extension.'.format(fits_data[extension].header['XTENSION']) + f'Got {fits_data[extension].header["XTENSION"]} when looking for a BINTABLE extension.' ) - for ii in keywords[1]: - for jj in fits_data[extension].data[keywords[2]][ii]: - value = f'{jj} {value}' + for ii in fits_data[extension].data[keywords[1]]: + value = f'{ii} {value}' self.logger.debug(f'{lookup}: value is {value}') return value diff --git a/caom2utils/caom2utils/tests/data/edge_case.blueprint b/caom2utils/caom2utils/tests/data/edge_case.blueprint new file mode 100644 index 00000000..1fa1129b --- /dev/null +++ b/caom2utils/caom2utils/tests/data/edge_case.blueprint @@ -0,0 +1 @@ +Plane.provenance.producer = ['IMAGESWV', 'ORIGIN'], default = Gemini Observatory diff --git a/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.blueprint b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.blueprint new file mode 100644 index 00000000..f0bc48f2 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.blueprint @@ -0,0 +1,124 @@ +Observation.observationID = ['OBSID'], default = None +Observation.type = OBJECT +Observation.intent = science +Observation.metaRelease = 2023-05-18 09:12:54 +Observation.metaProducer = GEMINI2caom2/0.0.0 +Observation.algorithm.name = exposure +Observation.instrument.name = GHOST +Observation.telescope.geoLocationX = 1820184.055747326 +Observation.telescope.geoLocationY = -5208320.066618682 +Observation.telescope.geoLocationZ = -3194829.197152751 +Observation.environment.ambientTemp = ['TEMPERAT'], default = None +Plane.productID = S20230518S0121 +Plane.metaRelease = 2023-05-18 09:12:54 +Plane.dataRelease = 2023-05-18 09:12:54 +Plane.dataProductType = image +Plane.calibrationLevel = 1 +Plane.metaProducer = GEMINI2caom2/0.0.0 +Plane.provenance.name = Gemini Observatory Data +Plane.provenance.project = Gemini Archive +Plane.provenance.producer = ['IMAGESWV', 'ORIGIN'], default = Gemini Observatory +Plane.provenance.reference = http://archive.gemini.edu/searchform/GS-2023A-SV-101-13-009 +Plane.provenance.lastExecuted = ['DATE-FTS'], default = None +Artifact.productType = science +Artifact.releaseType = data +Artifact.uri = gemini:GEMINI/S20230518S0121.fits +Artifact.metaProducer = GEMINI2caom2/0.0.0 +Chunk = include +Chunk.metaProducer = GEMINI2caom2/0.0.0 +Chunk.energy.specsys = TOPOCENT +Chunk.time.exposure = get_exposure(uri) +extension 1: +Chunk.time.exposure = get_exposure(uri) +Chunk.time.resolution = get_exposure(uri) +Chunk.time.axis.axis.ctype = TIME +Chunk.time.axis.axis.cunit = d +Chunk.time.axis.error.syser = 1e-07 +Chunk.time.axis.error.rnder = 1e-07 +Chunk.time.axis.function.naxis = 1 +Chunk.time.axis.function.delta = get_time_delta(header) +Chunk.time.axis.function.refCoord.pix = 0.5 +Chunk.time.axis.function.refCoord.val = get_time_function_val(header) +# extension 2: +# Chunk.energy.specsys = TOPOCENT +# Chunk.energy.axis.axis.ctype = WAVE +# Chunk.energy.axis.axis.cunit = nm +# Chunk.energy.axis.range.start.pix = 0.5 +# Chunk.energy.axis.range.start.val = _get_energy_chunk_range_start_val(header) +# Chunk.energy.axis.range.end.pix = 1.5 +# Chunk.energy.axis.range.end.val = _get_energy_chunk_range_start_val(header) +extension 3: +Chunk.energy.specsys = TOPOCENT +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = nm +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = _get_energy_chunk_range_start_val(header) +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = _get_energy_chunk_range_start_val(header) +extension 4: +Chunk.energy.specsys = TOPOCENT +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = nm +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = _get_energy_chunk_range_start_val(header) +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = _get_energy_chunk_range_start_val(header) +extension 5: +Chunk.energy.specsys = TOPOCENT +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = nm +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = _get_energy_chunk_range_start_val(header) +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = _get_energy_chunk_range_start_val(header) +extension 6: +Chunk.energy.specsys = TOPOCENT +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = nm +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = _get_energy_chunk_range_start_val(header) +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = _get_energy_chunk_range_start_val(header) +extension 8: +Chunk.energy.specsys = TOPOCENT +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = nm +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = _get_energy_chunk_range_start_val(header) +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = _get_energy_chunk_range_start_val(header) +extension 9: +Chunk.energy.specsys = TOPOCENT +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = nm +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = _get_energy_chunk_range_start_val(header) +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = _get_energy_chunk_range_start_val(header) +extension 10: +Chunk.energy.specsys = TOPOCENT +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = nm +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = _get_energy_chunk_range_start_val(header) +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = _get_energy_chunk_range_start_val(header) +extension 11: +Chunk.energy.specsys = TOPOCENT +Chunk.energy.axis.axis.ctype = WAVE +Chunk.energy.axis.axis.cunit = nm +Chunk.energy.axis.range.start.pix = 0.5 +Chunk.energy.axis.range.start.val = _get_energy_chunk_range_start_val(header) +Chunk.energy.axis.range.end.pix = 1.5 +Chunk.energy.axis.range.end.val = _get_energy_chunk_range_start_val(header) +extension 12: +Chunk.time.exposure = get_exposure(uri) +Chunk.time.resolution = get_exposure(uri) +Chunk.time.axis.axis.ctype = TIME +Chunk.time.axis.axis.cunit = d +Chunk.time.axis.error.syser = 1e-07 +Chunk.time.axis.error.rnder = 1e-07 +Chunk.time.axis.function.naxis = 1 +Chunk.time.axis.function.delta = get_time_delta(header) +Chunk.time.axis.function.refCoord.pix = 0.5 +Chunk.time.axis.function.refCoord.val = get_time_function_val(header) diff --git a/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.expected.xml b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.expected.xml new file mode 100644 index 00000000..6c3f6e61 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.expected.xml @@ -0,0 +1,342 @@ + + + GEMINI + GS-2023A-SV-101-13-009 + 2023-05-18T09:12:54.000 + + exposure + + OBJECT + science + + CD-38 245 + + + Gemini-South + 1820184.055747326 + -5208320.066618682 + -3194829.197152751 + + + GHOST + + + + GS-2023A-SV-101-13-009 + 2023-05-18T09:12:54.000 + 2023-05-18T09:12:54.000 + image + 1 + + Gemini Observatory Data + Gemini Archive + Gemini Observatory + http://archive.gemini.edu/searchform/GS-2023A-SV-101-13-009 + + + + cadc:GEMINI/S20230518S0121.fits + science + data + application/fits + 18383040 + md5:916912dfa1bc11c82dea4f7171876fe2 + + + 0 + + + + 1 + + + 2 + 4 + + + + TIME + d + + + 1e-07 + 1e-07 + + + 1 + 0.00020833333333331 + + 0.5 + 60082.38395833333 + + + + UTC + 1200.0 + 1200.0 + + + + + + 2 + + + + 3 + + + 2 + + + + WAVE + nm + + + + 0.5 + 347.0 + + + 1.5 + 347.0 + + + + TOPOCENT + + + + + + 4 + + + 2 + + + + WAVE + nm + + + + 0.5 + 347.0 + + + 1.5 + 347.0 + + + + TOPOCENT + + + + + + 5 + + + 2 + + + + WAVE + nm + + + + 0.5 + 347.0 + + + 1.5 + 347.0 + + + + TOPOCENT + + + + + + 6 + + + 2 + + + + WAVE + nm + + + + 0.5 + 347.0 + + + 1.5 + 347.0 + + + + TOPOCENT + + + + + + 7 + + + + 8 + + + 2 + + + + WAVE + nm + + + + 0.5 + 520.0 + + + 1.5 + 520.0 + + + + TOPOCENT + + + + + + 9 + + + 2 + + + + WAVE + nm + + + + 0.5 + 520.0 + + + 1.5 + 520.0 + + + + TOPOCENT + + + + + + 10 + + + 2 + + + + WAVE + nm + + + + 0.5 + 520.0 + + + 1.5 + 520.0 + + + + TOPOCENT + + + + + + 11 + + + 2 + + + + WAVE + nm + + + + 0.5 + 520.0 + + + 1.5 + 520.0 + + + + TOPOCENT + + + + + + 12 + + + 2 + 4 + + + + TIME + d + + + 1e-07 + 1e-07 + + + 1 + 0.000208333333333365 + + 0.5 + 60082.3978587963 + + + + UTC + 1200.0 + 1200.0 + + + + + + + + + + diff --git a/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.fits.header b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.fits.header new file mode 100644 index 00000000..7aeea1aa --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.fits.header @@ -0,0 +1,1741 @@ +Filename: S20230518S0121.fits.bz2 + +AstroData Tags: {'BUNDLE', 'SIDEREAL', 'STD', 'GEMINI', 'RAW', 'GHOST', 'NxN', 'SOUTH', 'UNPREPARED'} + + +--- PHU --- +SIMPLE = T / file does conform to FITS standard +BITPIX = 8 / number of bits per data pixel +NAXIS = 0 / number of data axes +EXTEND = T / FITS dataset may contain extensions +OBJECT = 'CD-38 245' / Object Name +COMMENT FITS (Flexible Image Transport System) format is defined in 'Astronomy +COMMENT and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H +DATALAB = 'GS-2023A-SV-101-13-009' / Gemini Datalabel +OBSERVER= 'Lindsay Magill' / Observer +OBSERVAT= 'Gemini-South' / Observatory (Gemini-North|Gemini-South) +TELESCOP= 'Gemini-South' / Name of telescope (Gemini-North|Gemini-South) +PARALLAX= 2.291E-4 / Target Parallax +RADVEL = 46.76397557344451 / Target Heliocentric Radial Velocity +EPOCH = 2000.0 / Target Coordinate Epoch +EQUINOX = 2000.0 / Equinox of coordinate system +TRKEQUIN= 2000.0 / Tracking equinox +SSA = 'P Candia' / SSA +RA = 11.6508125 / Target Right Ascension +DEC = -37.65932222222222 / Target Declination +ELEVATIO= 33.91709027777778 / Telescope Elevation at the start of exposure +AZIMUTH = 117.221875 / Telescope Azimuth at the start of exposure +CRPA = -75.44041479883009 / Cass Rotator Position Angle at start +HA = '-04:34:33.46' / Telescope hour angle at the start of exposure +LT = '06:12:53.0' / Local time at start of exposure +TRKFRAME= 'FK5 ' / Tracking co-ordinate +RATRACK = 0.0 / Differential tracking rate RA +DECTRACK= 0.0 / Differential tracking rate Dec +TRKEPOCH= 60082.37901098344 / Differential tracking reference epoch +FRAME = 'FK5 ' / Target coordinate system +PMRA = 0.001284983346988 / Target proper motion in RA +PMDEC = -0.007580999566854 / Target proper motion in Declination +WAVELENG= 6550.0 / Effective Target Wavelength (A) +RAWIQ = 'Any ' / Raw Image Quality +RAWCC = '50-percentile' / Raw Cloud Cover +RAWWV = 'UNKNOWN ' / Raw Water Vapour/Transparency +RAWBG = '80-percentile' / Raw Background +RAWPIREQ= 'UNKNOWN ' / PI Requirements Met +RAWGEMQA= 'UNKNOWN ' / Gemini Quality Assessment +CGUIDMOD= 'Basic ' / Driving mode for carousel +UT = '09:12:53.5' / UTC at start of exposure +DATE = '2023-05-18' / UTC Date of observation (YYYY-MM-DD) +M2BAFFLE= 'VISIBLE ' / Position of M2 baffle +M2CENBAF= 'CLOSED ' / Position of M2 central hole baffle +ST = '20:13:08.9' / Sidereal time at the start of the exposure +XOFFSET = 0.0 / Telescope offset in x in arcsec +YOFFSET = 0.0 / Telescope offset in y in arcsec +POFFSET = -0.0 / Telescope offset in p in arcsec +QOFFSET = 0.0 / Telescope offset in q in arcsec +RAOFFSET= 0.0 / Telescope offset in RA in arcsec +DECOFFSE= 0.0 / Telescope offset in DEC in arcsec +RATRGOFF= 0.0 / Target offset in RA in arcsec +DECTRGOF= 0.0 / Target offset in DEC in arcsec +PA = 0.0 / Instrument Position Angle at start (degrees) +IAA = 359.95 / Instrument Alignment Angle +SFRT2 = -0.10339729725767496 / Science fold rotation angle (degrees) +SFTILT = -0.08999999999999997 / Science fold tilt angle (degrees) +SFLINEAR= -348.512 / Science fold linear position (mm) +AOFOLD = 'park-pos.' / AO Pick-Off Mirror Position +PWFS1_ST= 'parked ' / PWFS1 probe state (frozen,guiding,parked) +PWFS2_ST= 'guiding ' / PWFS2 probe state (frozen,guiding,parked) +AOWFS_ST= 'parked ' / AOWFS probe state (frozen,guiding,parked) +SCIBAND = 1 / Science Ranking Band +REQIQ = '85-percentile' / Requested Image Quality +REQCC = '70-percentile' / Requested Cloud Cover +REQBG = '80-percentile' / Requested Background +REQWV = 'Any ' / Requested Water Vapour +NUMREQTW= 0 / Number of Requested Timing Window REQTW entries +HUMIDITY= 39.900000000000006 / The Relative Humidity (fraction, 0..101). +TAMBIEN2= 45.284 / The ambient temp (F). +TAMBIENT= 7.38 / The ambient temp (C). +PRESSURE= 544.8826278055891 / The atmospheric pressure (mm Hg). +PRESSUR2= 72645.0528 / The atmospheric pressure (Pa). +DEWPOINT= -5.36240059250872 / The dew point (C). +DEWPOIN2= 22.347678933484303 / The dew point (F). +WINDSPEE= 10.100000000000001 / The wind speed (m/s). +WINDSPE2= 22.593056549749466 / The wind speed (mph). +WINDDIRE= 308.0 / The wind direction (degrees). +INPORT = 1 / Number of ISS port where instrument is located +RESOLUT = 'Standard' / GHOST Resolution mode +TARGETM = 'SRIFU1 Target, SRIFU2 Sky' / GHOST Target mode +READRED = 'Medium ' / Red camera detector readout +READBLU = 'Slow ' / Blue camera detector readout +REDCCDS = '2 x 4 ' / Red camera binning +BLUCCDS = '2 x 4 ' / Blue camera binning +AIRMASS = 1.7049880137585713 / Mean airmass for the exposure +AMSTART = 1.7885098140552325 / Airmass at start of exposure +AMEND = 1.6271710956892416 / Airmass at end of exposure +PROP_MD = F / Proprietary Metadata +RELEASE = '2023-06-18' / End of proprietary period YYY-MM-DD +INSTRUME= 'GHOST ' / Instrument name +NREDEXP = 1 / Number of red exposures +REDEXPT = 1200.0 / Red camera exposure time +NBLUEEXP= 1 / Number of blue exposures +BLUEEXPT= 1200.0 / Blue camera exposure time +NSLITEXP= 66 / Number of slit-viewing exposures +SLITEXPT= 18.0 / Slitviewer camera exposure time +FAGITAT1= 'F ' / GHOST Fibre agitator 1 enabled +FAGITAT2= 'F ' / GHOST Fibre agitator 2 enabled +SRIFU1 = 'CD-38 245' / IF SRIFU1 is enabled, name of SRIFU1, if set to +SRIFU2 = 'Sky ' / IF SRIFU2 is enabled, name of SRIFU2, if set to +HRIFU1 = ' ' / IF HRIFU1 is enabled, name of HRIFU1; if not se +HRIFU2 = ' ' / default to sky; if not set no name and blank fi +BASEPO = 'T ' / If base is linked to target; should default to +SMPNAME = 'SMP_STD_ONLY' / Slit Unit slit mask positioner wheel position +OBSTYPE = 'OBJECT ' / Observation type +OBSCLASS= 'science ' / Observe class +GEMPRGID= 'GS-2023A-SV-101' / Gemini programme ID +OBSID = 'GS-2023A-SV-101-13' / Gemini Observation ID +NEXTEND = 71 +ORIGNAME= 'S20230518S0121.fits' / Original filename prior to processing + +--- HDU 0 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 150 / length of data axis 1 +NAXIS2 = 130 / length of data axis 2 +PCOUNT = 0 / number of random group parameters +GCOUNT = 1 / number of random groups +INSTRSUB= 'INSTRUMENT' / Instrument sub-system (INSTRUMENT,CAL,...) +BZERO = 32768. +BSCALE = 1. +DARKTIME= 18.136 / Total dark time of the observation +ELAPSED = 18. / Total elapsed time between shutter open and rea +EXPOSED = 18. / Actual exposure time +EXPREQ = 18. / Requested exposure time +NOTES = ' ' +OBJECT = ' ' +OBSERVER= ' ' +RUN = 1 / Exposure run number +CCDTEMP = 0. +READMODE= 0 / (CHIP_READOUT_SLOW=0, CHIP_READOUT_MEDIUM=1, C +RDOUT = 0.011 / Readout time +EXPTIME = 18. +I1HBFWHM= 2.44651072989264 / IFU1 Hi Res Blue FWHM +I1HRFWHM= 1.27986780636425 / IFU1 Hi Res Red FWHM +I1SBFWHM= 1.34736791549026 / IFU1 Std Res Blue FWHM +I1SRFWHM= 99. / IFU1 Std Res Red FWHM +I2SBFWHM= 1.22170304377238 / IFU2 Std Res Blue FWHM +I2SRFWHM= 4.00985112966506 / IFU2 Std Res Red FWHM +OBSTYPE = 'OBJECT ' / Observation type +FILENAME= 'sv-20230518.091254-0000.fits' / Original host filename +OBSID = 'sv-20230518.091254-0000' / Observation ID +INSTRUME= 'GHOST ' / Instrument name +CAMERA = 'SLITV ' / Camera name +WINDOW = ' ' / Readout window name +DETECTOR= 'Ghost BigEye Sony ICX674' / Detector identification +DETSIZE = '[1:1928,1:1452]' / Detector size (pixels) +NCCDS = 1 / Number of CCDs +NAMPS = 1 / Number of amplifiers +COMMENT = 'FITSKW1 ' / Not available +COMMENT = 'FITSKW2 ' / Not available +COMMENT = 'FITSKW3 ' / Not available +COMMENT = 'FITSKW4 ' / Not available +OBJECT1 = ' ' +TARGET1 = 2 / IFU_TARGET_NONE=0, IFU_TARGET_SKY=1, IFU_TARGET +OBJECT2 = ' ' +TARGET2 = 1 / IFU_TARGET_NONE=0, IFU_TARGET_SKY=1, IFU_TARGET +IFU1GDX = 0.195591860768047 / Cass Unit Positioner 1 focal plane guide X offs +IFU1GDY = 0.24419448526399 / Cass Unit Positioner 1 focal plane guide Y offs +IFU2GDX = 0. / Cass Unit Positioner 2 focal plane guide X offs +IFU2GDY = 0. / Cass Unit Positioner 2 focal plane guide Y offs +ADC1A = 22.4582190934533 / Cassegrain Unit Positioner 1 ADC aprism angle +ADC1B = 128.417718133915 / Cassegrain Unit Positioner 1 ADC bprism angle +ADC2A = 22.4583714517757 / Cassegrain Unit Positioner 2 ADC aprism angle +ADC2B = 128.417601400657 / Cassegrain Unit Positioner 2 ADC bprism angle +CURACT1 = 7.2 / Cassegrain Unit electronics rack temperature 1 +CURACT2 = 8.5 / Cassegrain Unit electronics rack temperature 2 +HRIFU1D = -37.6584773522382 / Cassegrain Unit Positioner 1 Hi Dec +HRIFU1R = 11.6516234743036 / Cassegrain Unit Positioner 1 Hi RA +SKIFU1D = -37.6593695218957 / Cassegrain Unit Positioner 1 Sky Dec +SKIFU1R = 11.6522907122998 / Cassegrain Unit Positioner 1 Sky RA +SRIFU1D = -37.6593689922103 / Cassegrain Unit Positioner 1 Std Dec +IFU1X = 0.00598936270399975 / Cassegrain Unit Positioner 1 focal plane X posi +IFU1Y = 0.00588557857599992 / Cassegrain Unit Positioner 1 focal plane Y posi +SRIFU1R = 11.6509379706258 / Cassegrain Unit Positioner 1 RA +I1HGD0D = -37.6583789435213 / Cassegrain Unit high res IFU guide fiber 0 Dec +I1HGD0R = 11.6518396845883 / Cassegrain Unit high res IFU guide fiber 0 RA +I1HGD1D = -37.658575929685 / Cassegrain Unit high res IFU guide fiber 1 Dec +I1HGD1R = 11.6518392620204 / Cassegrain Unit high res IFU guide fiber 1 RA +I1HGD2D = -37.6586743384012 / Cassegrain Unit high res IFU guide fiber 2 Dec +I1HGD2R = 11.6516230511615 / Cassegrain Unit high res IFU guide fiber 2 RA +I1HGD3D = -37.6585757605605 / Cassegrain Unit high res IFU guide fiber 3 Dec +I1HGD3R = 11.6514072634457 / Cassegrain Unit high res IFU guide fiber 3 RA +I1HGD4D = -37.6583787743983 / Cassegrain Unit high res IFU guide fiber 4 Dec +I1HGD4R = 11.6514076871599 / Cassegrain Unit high res IFU guide fiber 4 RA +I1HGD5D = -37.6582803660752 / Cassegrain Unit high res IFU guide fiber 5 Dec +I1HGD5R = 11.6516238974435 / Cassegrain Unit high res IFU guide fiber 5 RA +I1HSC00D= -37.6584773522382 / Cassegrain Unit high res IFU science fiber 0 De +I1HSC00R= 11.6516234743036 / Cassegrain Unit high res IFU science fiber 0 RA +I1HSC10D= -37.6585430705798 / Cassegrain Unit high res IFU science fiber 10 D +I1HSC10R= 11.6517673327178 / Cassegrain Unit high res IFU science fiber 10 R +I1HSC11D= -37.6585758734852 / Cassegrain Unit high res IFU science fiber 11 D +I1HSC11R= 11.6516952624953 / Cassegrain Unit high res IFU science fiber 11 R +I1HSC12D= -37.6586086763469 / Cassegrain Unit high res IFU science fiber 12 D +I1HSC12R= 11.6516231922091 / Cassegrain Unit high res IFU science fiber 12 R +I1HSC13D= -37.6585758171104 / Cassegrain Unit high res IFU science fiber 13 D +I1HSC13R= 11.6515512629704 / Cassegrain Unit high res IFU science fiber 13 R +I1HSC14D= -37.6585429578302 / Cassegrain Unit high res IFU science fiber 14 D +I1HSC14R= 11.6514793337954 / Cassegrain Unit high res IFU science fiber 14 R +I1HSC15D= -37.6584772957761 / Cassegrain Unit high res IFU science fiber 15 D +I1HSC15R= 11.6514794749699 / Cassegrain Unit high res IFU science fiber 15 R +I1HSC16D= -37.6584116337219 / Cassegrain Unit high res IFU science fiber 16 D +I1HSC16R= 11.6514796161441 / Cassegrain Unit high res IFU science fiber 16 R +I1HSC17D= -37.6583788309477 / Cassegrain Unit high res IFU science fiber 17 D +I1HSC17R= 11.6515516863025 / Cassegrain Unit high res IFU science fiber 17 R +I1HSC18D= -37.6583460281296 / Cassegrain Unit high res IFU science fiber 18 D +I1HSC18R= 11.6516237563971 / Cassegrain Unit high res IFU science fiber 18 R +I1HSC01D= -37.6584445493764 / Cassegrain Unit high res IFU science fiber 1 De +I1HSC01R= 11.6516955444622 / Cassegrain Unit high res IFU science fiber 1 RA +I1HSC02D= -37.6585102114308 / Cassegrain Unit high res IFU science fiber 2 De +I1HSC02R= 11.6516954034789 / Cassegrain Unit high res IFU science fiber 2 RA +I1HSC03D= -37.6585430142925 / Cassegrain Unit high res IFU science fiber 3 De +I1HSC03R= 11.6516233332565 / Cassegrain Unit high res IFU science fiber 3 RA +I1HSC04D= -37.6585101550561 / Cassegrain Unit high res IFU science fiber 4 De +I1HSC04R= 11.6515514040814 / Cassegrain Unit high res IFU science fiber 4 RA +I1HSC05D= -37.6584444930019 / Cassegrain Unit high res IFU science fiber 5 De +I1HSC05R= 11.651551545192 / Cassegrain Unit high res IFU science fiber 5 RA +I1HSC06D= -37.6584116901839 / Cassegrain Unit high res IFU science fiber 6 De +I1HSC06R= 11.6516236153505 / Cassegrain Unit high res IFU science fiber 6 RA +I1HSC07D= -37.658378887322 / Cassegrain Unit high res IFU science fiber 7 De +I1HSC07R= 11.6516956854453 / Cassegrain Unit high res IFU science fiber 7 RA +I1HSC08D= -37.6584117464708 / Cassegrain Unit high res IFU science fiber 8 De +I1HSC08R= 11.6517676145571 / Cassegrain Unit high res IFU science fiber 8 RA +I1HSC09D= -37.6584774085253 / Cassegrain Unit high res IFU science fiber 9 De +I1HSC09R= 11.6517674736376 / Cassegrain Unit high res IFU science fiber 9 RA +I1SGD0D = -37.6592048846885 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD0R = 11.6510583253829 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD1D = -37.6593690873136 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD1R = 11.6511779723974 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD2D = -37.6595331949569 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD2R = 11.6510576176386 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD3D = -37.6595330996097 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD3R = 11.6508176153367 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD4D = -37.6593688966206 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD4R = 11.6506979688548 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD5D = -37.6592047893428 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD5R = 11.6508183241423 / Cassegrain Unit Positioner 1 std res IFU guide +I1SSC0D = -37.6593689922103 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC0R = 11.6509379706258 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC1D = -37.6593143214446 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC1R = 11.6510580894688 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC2D = -37.6594237582008 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC2R = 11.6510578535541 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC3D = -37.6594784289661 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC3R = 11.6509377345338 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC4D = -37.659423662854 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC4R = 11.6508178516059 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC5D = -37.6593142260984 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC5R = 11.6508180878744 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC6D = -37.6592595554544 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC6R = 11.6509382067171 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSK0D = -37.6593695218957 / Cassegrain Unit std res IFU sky fiber 0 Dec +I1SSK0R = 11.6522907122998 / Cassegrain Unit std res IFU sky fiber 0 RA +I1SSK1D = -37.6594242865172 / Cassegrain Unit std res IFU sky fiber 1 Dec +I1SSK1R = 11.6524105962268 / Cassegrain Unit std res IFU sky fiber 1 RA +I1SSK2D = -37.6594789586542 / Cassegrain Unit std res IFU sky fiber 2 Dec +I1SSK2R = 11.6522904782019 / Cassegrain Unit std res IFU sky fiber 2 RA +SRIFU2D = -37.6829846584165 / Cassegrain Unit Positioner 2 Dec +IFU2X = -86.923269710296 / Cassegrain Unit Positioner 2 focal plane X posi +IFU2Y = -52.106378584976 / Cassegrain Unit Positioner 2 focal plane Y posi +SRIFU2R = 11.6009858740267 / Cassegrain Unit Positioner 2 RA +I2HSK0D = -37.6838969539684 / Cassegrain Unit high res IFU sky fiber 0 Dec +I2HSK0R = 11.6007041678923 / Cassegrain Unit high res IFU sky fiber 0 RA +I2HSK1D = -37.6838643068243 / Cassegrain Unit high res IFU sky fiber 1 Dec +I2HSK1R = 11.6007759642589 / Cassegrain Unit high res IFU sky fiber 1 RA +I2HSK2D = -37.6839299797529 / Cassegrain Unit high res IFU sky fiber 2 Dec +I2HSK2R = 11.6007759007549 / Cassegrain Unit high res IFU sky fiber 2 RA +I2HSK3D = -37.6839626268969 / Cassegrain Unit high res IFU sky fiber 3 Dec +I2HSK3R = 11.6007041043247 / Cassegrain Unit high res IFU sky fiber 3 RA +I2HSK4D = -37.6839296010689 / Cassegrain Unit high res IFU sky fiber 4 Dec +I2HSK4R = 11.6006323714626 / Cassegrain Unit high res IFU sky fiber 4 RA +I2HSK5D = -37.6838639281404 / Cassegrain Unit high res IFU sky fiber 5 Dec +I2HSK5R = 11.6006324350936 / Cassegrain Unit high res IFU sky fiber 5 RA +I2HSK6D = -37.6838312810397 / Cassegrain Unit high res IFU sky fiber 6 Dec +I2HSK6R = 11.6007042314597 / Cassegrain Unit high res IFU sky fiber 6 RA +I2SGD0D = -37.6828207913155 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD0R = 11.6011056382783 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD1D = -37.6829852887438 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD1R = 11.6012250864737 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD2D = -37.6831491559652 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD2R = 11.6011053222202 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD3D = -37.6831485253956 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD3R = 11.6008661092466 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD4D = -37.6829840276061 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD4R = 11.6007466615849 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD5D = -37.6828201607474 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD5R = 11.600866426363 / Cassegrain Unit Positioner 2 std res IFU guide +I2SSC0D = -37.6829846584165 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC0R = 11.6009858740267 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC1D = -37.6829302461989 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC1R = 11.6011055329259 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC2D = -37.6830397010822 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC2R = 11.6011054275732 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC3D = -37.6830941132994 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC3R = 11.6009857684975 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC4D = -37.6830390705131 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC4R = 11.6008662149524 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC5D = -37.6829296156303 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC5R = 11.6008663206578 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC6D = -37.6828752035334 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC6R = 11.6009859795556 / Cassegrain Unit Positioner 2 std res IFU scienc +CUAIRT = 8.1319620349768 / Cassegrain Unit air temperature +CUHUM = 31.7941831720968 / Cassegrain Unit humidity +CUT1 = 6.8 / Cassegrain Unit temperature 1 +CUT2 = 6.1 / Cassegrain Unit temperature 2 +CFWNAME = 'CFW_CLOSED1' / Slit Unit calibration filter wheel position +CFWPOS = 11 / Slit Unit calibration filter wheel position (CF +FA1ACTIV= 0 / Is the Slit Unit fiber agitator 1 active? +FA2ACTIV= 0 / Is the Slit Unit fiber agitator 2 active? +SLURACT1= 13. / Slit Unit electronics rack temperature 1 +SLURACT2= 9. / Slit Unit electronics rack temperature 2 +AGCTEMP = 16. / Slit Unit acquisition & guiding camera temperat +THXELAMP= 0 / Slit Unit HCL power flag +PNEUMISO= 0 / Slit Unit optical bench pneumatic isolator erro +SVCTEMP = 21.9 / Slit Unit slit viewing camera temperature +SMPNAME = 'SMP_STD_ONLY' / Slit Unit slit mask positioner wheel position +SMPPOS = 2 / Slit Unit slit mask position (SMP_UNKNOWN=0, SM +BFOCUS = -11.29693797412 / Spectrograph blue focus stage position +BTEMP1 = -120. / Spectrograph blue Lakeshore 336 temperature 1 +BTEMP2 = -115.633 / Spectrograph blue Lakeshore 336 temperature 2 +BTEMP3 = -167.402 / Spectrograph blue Lakeshore 336 temperature 3 +BVAC = 0.000805 / Spectrograph blue combined vacuum value +SPERACT1= 16.8 / Spectrograph electronics rack temperature 1 +SPERACT2= 10.3 / Spectrograph electronics rack temperature 2 +DRYAIRT = 19.78 / Spectrograph Lakeshore 224 dry injection air te +GRATMNTT= 20.425 / Spectrograph Lakeshore 224 grating mount temper +INENCT1 = 20.321 / Spectrograph Lakeshore 224 inner enclosure temp +INENCT2 = 20.453 / Spectrograph Lakeshore 224 inner enclosure temp +OPBENCHT= 20.447 / Spectrograph Lakeshore 224 optical bench temper +OUTENCT1= 20.358 / Spectrograph Lakeshore 224 outer enclosure temp +OUTENCT2= 20.004 / Spectrograph Lakeshore 224 outer enclosure temp +PLABAIRT= 13.846 / Spectrograph Lakeshore 224 pier lab air tempera +BCRYOHT = 23. / Spectrograph onewire blue cryostat housing temp +OUTENCH1= 16.0481015748993 / Spectrograph onewire outer enclosure humidity 1 +OUTENCH2= 16.9149066048102 / Spectrograph onewire outer enclosure humidity 2 +OUTENCP1= 737.0263671875 / Spectrograph onewire outer enclosure pressure 1 +OUTENCP2= 737.28125 / Spectrograph onewire outer enclosure pressure 2 +PLABH = 23.6845318031986 / Spectrograph onewire pier lab humidity +PLABP = 736.0751953125 / Spectrograph onewire pier lab pressure +PLABT1 = 14.0625 / Spectrograph onewire pier lab temperature 1 +PLABT2 = 14. / Spectrograph onewire pier lab temperature 2 +RAC1T = 15.6875 / Spectrograph onewire rack 1 back top temperatur +RAC2CHAT= 8.3125 / Spectrograph onewire rack2 Chiller Air temperat +RAC2T = 10.5 / Spectrograph onewire rack 2 back top temperatur +RAC3CHAT= 20. / Spectrograph onewire rack 3 chiller air tempera +RAC3T = 17.6875 / Spectrograph onewire rack 3 front top temperatu +RCRYOHT = 23.25 / Spectrograph onewire red cryostat housing tempe +SVCASET = 20.5 / Spectrograph onewire slit viewing camera CCD ca +RFOCUS = -12.796948238768 / Spectrograph red focus stage position +RTEMP1 = -117.706 / Spectrograph red Lakeshore 336 temperature 1 +RTEMP2 = -106.824 / Spectrograph red Lakeshore 336 temperature 2 +RTEMP3 = -164.755 / Spectrograph red Lakeshore 336 temperature 3 +RPIAX = 0. / Spectrograph rPi accelerometer x value mg +RPIAY = 0. / Spectrograph rPi accelerometer y value mg +RPIAZ = 0. / Spectrograph rPi accelerometer z value mg +RPIH = 0. / Spectrograph rPi humidity % +RPIP = 0. / Spectrograph rPi pressure hPA +RPIT = 0. / Spectrograph rPi temperature C +RVAC = 0.002515 / Spectrograph red vacuum combined vacuum value +MEERT1 = 19.9988708496094 / Spectrograph thermal enclosure Meerstetter 1 ob +MEERT11 = 0. / Spectrograph thermal enclosure Meerstetter 11 o +MEERT12 = 19.9917907714844 / Spectrograph thermal enclosure Meerstetter 12 o +MEERT13 = 20.0020141601562 / Spectrograph thermal enclosure Meerstetter 13 o +MEERT14 = 19.9994506835938 / Spectrograph thermal enclosure Meerstetter 14 o +MEERT15 = 19.9989929199219 / Spectrograph thermal enclosure Meerstetter 15 o +MEERT16 = 19.9973754882812 / Spectrograph thermal enclosure Meerstetter 16 o +MEERT17 = 20.0008544921875 / Spectrograph thermal enclosure Meerstetter 17 o +MEERT18 = 19.9890747070312 / Spectrograph thermal enclosure Meerstetter 18 o +MEERT19 = 20.0069885253906 / Spectrograph thermal enclosure Meerstetter 19 o +MEERT20 = 19.9876708984375 / Spectrograph thermal enclosure Meerstetter 20 o +MEERT2 = 19.9945068359375 / Spectrograph thermal enclosure Meerstetter 2 ob +MEERT21 = 19.9993591308594 / Spectrograph thermal enclosure Meerstetter 21 o +MEERT22 = 19.9959716796875 / Spectrograph thermal enclosure Meerstetter 22 o +MEERT23 = 20.0005187988281 / Spectrograph thermal enclosure Meerstetter 23 o +MEERT24 = 20.1638793945312 / Spectrograph thermal enclosure Meerstetter 24 o +MEERT25 = 0. / Spectrograph thermal enclosure Meerstetter 25 o +MEERT3 = 20.0113830566406 / Spectrograph thermal enclosure Meerstetter 3 ob +MEERT4 = 19.9964294433594 / Spectrograph thermal enclosure Meerstetter 4 ob +MEERT5 = 20.0000915527344 / Spectrograph thermal enclosure Meerstetter 5 ob +MEERT6 = 19.9842834472656 / Spectrograph thermal enclosure Meerstetter 6 ob +MEERT7 = 20.0056762695312 / Spectrograph thermal enclosure Meerstetter 7 ob +MEERT8 = 19.9993591308594 / Spectrograph thermal enclosure Meerstetter 8 ob +MEERT9 = 20.0060729980469 / Spectrograph thermal enclosure Meerstetter 9 ob +MEERT10 = 20.0045776367188 / Spectrograph thermal enclosure Meerstetter 10 o +CONTROLR= 'ghostSlitViewer' / Controller Name +CONHWV = 'BigEye G-283' / Controller hardware version +CCDNAME = 'Ghost BigEye Sony ICX674' / CCD identification +CCDSIZE = '[1:1928,1:1452]' / CCD size +CCDNAMPS= 1 / Number of amplifiers used to readout CCD +CCDSEC = '[801:1100,681:940]' / Region of CCD read +LTV1 = -4.000000E+02 / Image transformation vector +LTV2 = -3.400000E+02 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 5.000000E-01 / Image transformation matrix +CCDSUM = '2 2 ' / CCD pixel summing +AMPNAME = 'A ' / Amplifier identification +GAIN = 1.000000E+00 / Amplifier Gain (e-/ADU) +RDNOISE = 1.000000E+00 / Read noise for amp (e-) +SATURATE= 16383 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[1:1928,1:1452]' / Amplifier size +COMMENT = 'BIASSEC = 0' / No BIASSEC +AMPSEC = '[801:1100,681:940]' / Amplifier section +ATV1 = 800 / Amplifier transformation vector +ATV2 = 680 / Amplifier transformation vector +ATM1_1 = 1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = 1 / Amplifier transformation matrix +TRIMSEC = '[1:150,1:130]' / Trim section +DATASEC = '[1:150,1:130]' / Data section +DETSEC = '[801:1100,681:940]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +UTSTART = '09:12:54' / UTC of observation start +EXPUTST = '09:12:54' / UTC of observation start +UTEND = '09:13:12' / UTC of observation end +EXPUTEND= '09:13:12' / UTC of observation end +DATE-OBS= '2023-05-18' / Date of observation start +EQUINOX = 2000 / Standard FK5 (years) +RADESYS = 'FK5 ' / Coordinate reference frame +IMAGESWV= 'CICADA Release X.Y.Z' / Image creation software version +KWDICT = 'CICADA FITS V 1.8' / Keyword dictionary version +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 1 / Added by AstroData + +--- HDU 60 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 8 / number of bits per data pixel +NAXIS = 0 / number of data axes +PCOUNT = 0 / number of random group parameters +GCOUNT = 1 / number of random groups +DARKTIME= 1207.914 / Total dark time of the observation +ELAPSED = 1200. / Total elapsed time between shutter open and rea +EXPOSED = 1200. / Actual exposure time +EXPREQ = 1200. / Requested exposure time +NOTES = ' ' +ORIGNAME= 'S20230518S0121' / Original filename +OBJECT = ' ' +OBSERVER= ' ' +RUN = 1 / Exposure run number +CCDTEMP = 0. +READMODE= 0 / (CHIP_READOUT_SLOW=0, CHIP_READOUT_MEDIUM=1, C +RDOUT = 7.684 / Readout time +EXPTIME = 1200. +OBSTYPE = 'OBJECT ' / Observation type +FILENAME= 'bl-20230518.091254-0000.fits' / Original host filename +OBSID = 'bl-20230518.091254-0000' / Observation ID +INSTRUME= 'GHOST ' / Instrument name +CAMERA = 'BLUE ' / Camera name +WINDOW = ' ' / Readout window name +DETECTOR= 'EEV231-84' / Detector identification +DETSIZE = '[1:4096,1:4112]' / Detector size (pixels) +NCCDS = 1 / Number of CCDs +NAMPS = 4 / Number of amplifiers +COMMENT = 'FITSKW1 ' / Not available +COMMENT = 'FITSKW2 ' / Not available +COMMENT = 'FITSKW3 ' / Not available +COMMENT = 'FITSKW4 ' / Not available +OBJECT1 = ' ' +TARGET1 = 2 / IFU_TARGET_NONE=0, IFU_TARGET_SKY=1, IFU_TARGET +OBJECT2 = ' ' +TARGET2 = 1 / IFU_TARGET_NONE=0, IFU_TARGET_SKY=1, IFU_TARGET +IFU1GDX = 0.195591860768047 / Cass Unit Positioner 1 focal plane guide X offs +IFU1GDY = 0.24419448526399 / Cass Unit Positioner 1 focal plane guide Y offs +IFU2GDX = 0. / Cass Unit Positioner 2 focal plane guide X offs +IFU2GDY = 0. / Cass Unit Positioner 2 focal plane guide Y offs +ADC1A = 22.4582190934533 / Cassegrain Unit Positioner 1 ADC aprism angle +ADC1B = 128.417718133915 / Cassegrain Unit Positioner 1 ADC bprism angle +ADC2A = 22.4583714517757 / Cassegrain Unit Positioner 2 ADC aprism angle +ADC2B = 128.417601400657 / Cassegrain Unit Positioner 2 ADC bprism angle +CURACT1 = 7.2 / Cassegrain Unit electronics rack temperature 1 +CURACT2 = 8.5 / Cassegrain Unit electronics rack temperature 2 +HRIFU1D = -37.6584773522382 / Cassegrain Unit Positioner 1 Hi Dec +HRIFU1R = 11.6516234743036 / Cassegrain Unit Positioner 1 Hi RA +SKIFU1D = -37.6593695218957 / Cassegrain Unit Positioner 1 Sky Dec +SKIFU1R = 11.6522907122998 / Cassegrain Unit Positioner 1 Sky RA +SRIFU1D = -37.6593689922103 / Cassegrain Unit Positioner 1 Std Dec +IFU1X = 0.00598936270399975 / Cassegrain Unit Positioner 1 focal plane X posi +IFU1Y = 0.00588557857599992 / Cassegrain Unit Positioner 1 focal plane Y posi +SRIFU1R = 11.6509379706258 / Cassegrain Unit Positioner 1 RA +I1HGD0D = -37.6583789435213 / Cassegrain Unit high res IFU guide fiber 0 Dec +I1HGD0R = 11.6518396845883 / Cassegrain Unit high res IFU guide fiber 0 RA +I1HGD1D = -37.658575929685 / Cassegrain Unit high res IFU guide fiber 1 Dec +I1HGD1R = 11.6518392620204 / Cassegrain Unit high res IFU guide fiber 1 RA +I1HGD2D = -37.6586743384012 / Cassegrain Unit high res IFU guide fiber 2 Dec +I1HGD2R = 11.6516230511615 / Cassegrain Unit high res IFU guide fiber 2 RA +I1HGD3D = -37.6585757605605 / Cassegrain Unit high res IFU guide fiber 3 Dec +I1HGD3R = 11.6514072634457 / Cassegrain Unit high res IFU guide fiber 3 RA +I1HGD4D = -37.6583787743983 / Cassegrain Unit high res IFU guide fiber 4 Dec +I1HGD4R = 11.6514076871599 / Cassegrain Unit high res IFU guide fiber 4 RA +I1HGD5D = -37.6582803660752 / Cassegrain Unit high res IFU guide fiber 5 Dec +I1HGD5R = 11.6516238974435 / Cassegrain Unit high res IFU guide fiber 5 RA +I1HSC00D= -37.6584773522382 / Cassegrain Unit high res IFU science fiber 0 De +I1HSC00R= 11.6516234743036 / Cassegrain Unit high res IFU science fiber 0 RA +I1HSC10D= -37.6585430705798 / Cassegrain Unit high res IFU science fiber 10 D +I1HSC10R= 11.6517673327178 / Cassegrain Unit high res IFU science fiber 10 R +I1HSC11D= -37.6585758734852 / Cassegrain Unit high res IFU science fiber 11 D +I1HSC11R= 11.6516952624953 / Cassegrain Unit high res IFU science fiber 11 R +I1HSC12D= -37.6586086763469 / Cassegrain Unit high res IFU science fiber 12 D +I1HSC12R= 11.6516231922091 / Cassegrain Unit high res IFU science fiber 12 R +I1HSC13D= -37.6585758171104 / Cassegrain Unit high res IFU science fiber 13 D +I1HSC13R= 11.6515512629704 / Cassegrain Unit high res IFU science fiber 13 R +I1HSC14D= -37.6585429578302 / Cassegrain Unit high res IFU science fiber 14 D +I1HSC14R= 11.6514793337954 / Cassegrain Unit high res IFU science fiber 14 R +I1HSC15D= -37.6584772957761 / Cassegrain Unit high res IFU science fiber 15 D +I1HSC15R= 11.6514794749699 / Cassegrain Unit high res IFU science fiber 15 R +I1HSC16D= -37.6584116337219 / Cassegrain Unit high res IFU science fiber 16 D +I1HSC16R= 11.6514796161441 / Cassegrain Unit high res IFU science fiber 16 R +I1HSC17D= -37.6583788309477 / Cassegrain Unit high res IFU science fiber 17 D +I1HSC17R= 11.6515516863025 / Cassegrain Unit high res IFU science fiber 17 R +I1HSC18D= -37.6583460281296 / Cassegrain Unit high res IFU science fiber 18 D +I1HSC18R= 11.6516237563971 / Cassegrain Unit high res IFU science fiber 18 R +I1HSC01D= -37.6584445493764 / Cassegrain Unit high res IFU science fiber 1 De +I1HSC01R= 11.6516955444622 / Cassegrain Unit high res IFU science fiber 1 RA +I1HSC02D= -37.6585102114308 / Cassegrain Unit high res IFU science fiber 2 De +I1HSC02R= 11.6516954034789 / Cassegrain Unit high res IFU science fiber 2 RA +I1HSC03D= -37.6585430142925 / Cassegrain Unit high res IFU science fiber 3 De +I1HSC03R= 11.6516233332565 / Cassegrain Unit high res IFU science fiber 3 RA +I1HSC04D= -37.6585101550561 / Cassegrain Unit high res IFU science fiber 4 De +I1HSC04R= 11.6515514040814 / Cassegrain Unit high res IFU science fiber 4 RA +I1HSC05D= -37.6584444930019 / Cassegrain Unit high res IFU science fiber 5 De +I1HSC05R= 11.651551545192 / Cassegrain Unit high res IFU science fiber 5 RA +I1HSC06D= -37.6584116901839 / Cassegrain Unit high res IFU science fiber 6 De +I1HSC06R= 11.6516236153505 / Cassegrain Unit high res IFU science fiber 6 RA +I1HSC07D= -37.658378887322 / Cassegrain Unit high res IFU science fiber 7 De +I1HSC07R= 11.6516956854453 / Cassegrain Unit high res IFU science fiber 7 RA +I1HSC08D= -37.6584117464708 / Cassegrain Unit high res IFU science fiber 8 De +I1HSC08R= 11.6517676145571 / Cassegrain Unit high res IFU science fiber 8 RA +I1HSC09D= -37.6584774085253 / Cassegrain Unit high res IFU science fiber 9 De +I1HSC09R= 11.6517674736376 / Cassegrain Unit high res IFU science fiber 9 RA +I1SGD0D = -37.6592048846885 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD0R = 11.6510583253829 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD1D = -37.6593690873136 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD1R = 11.6511779723974 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD2D = -37.6595331949569 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD2R = 11.6510576176386 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD3D = -37.6595330996097 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD3R = 11.6508176153367 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD4D = -37.6593688966206 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD4R = 11.6506979688548 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD5D = -37.6592047893428 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD5R = 11.6508183241423 / Cassegrain Unit Positioner 1 std res IFU guide +I1SSC0D = -37.6593689922103 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC0R = 11.6509379706258 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC1D = -37.6593143214446 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC1R = 11.6510580894688 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC2D = -37.6594237582008 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC2R = 11.6510578535541 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC3D = -37.6594784289661 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC3R = 11.6509377345338 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC4D = -37.659423662854 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC4R = 11.6508178516059 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC5D = -37.6593142260984 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC5R = 11.6508180878744 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC6D = -37.6592595554544 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC6R = 11.6509382067171 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSK0D = -37.6593695218957 / Cassegrain Unit std res IFU sky fiber 0 Dec +I1SSK0R = 11.6522907122998 / Cassegrain Unit std res IFU sky fiber 0 RA +I1SSK1D = -37.6594242865172 / Cassegrain Unit std res IFU sky fiber 1 Dec +I1SSK1R = 11.6524105962268 / Cassegrain Unit std res IFU sky fiber 1 RA +I1SSK2D = -37.6594789586542 / Cassegrain Unit std res IFU sky fiber 2 Dec +I1SSK2R = 11.6522904782019 / Cassegrain Unit std res IFU sky fiber 2 RA +SRIFU2D = -37.6829846584165 / Cassegrain Unit Positioner 2 Dec +IFU2X = -86.923269710296 / Cassegrain Unit Positioner 2 focal plane X posi +IFU2Y = -52.106378584976 / Cassegrain Unit Positioner 2 focal plane Y posi +SRIFU2R = 11.6009858740267 / Cassegrain Unit Positioner 2 RA +I2HSK0D = -37.6838969539684 / Cassegrain Unit high res IFU sky fiber 0 Dec +I2HSK0R = 11.6007041678923 / Cassegrain Unit high res IFU sky fiber 0 RA +I2HSK1D = -37.6838643068243 / Cassegrain Unit high res IFU sky fiber 1 Dec +I2HSK1R = 11.6007759642589 / Cassegrain Unit high res IFU sky fiber 1 RA +I2HSK2D = -37.6839299797529 / Cassegrain Unit high res IFU sky fiber 2 Dec +I2HSK2R = 11.6007759007549 / Cassegrain Unit high res IFU sky fiber 2 RA +I2HSK3D = -37.6839626268969 / Cassegrain Unit high res IFU sky fiber 3 Dec +I2HSK3R = 11.6007041043247 / Cassegrain Unit high res IFU sky fiber 3 RA +I2HSK4D = -37.6839296010689 / Cassegrain Unit high res IFU sky fiber 4 Dec +I2HSK4R = 11.6006323714626 / Cassegrain Unit high res IFU sky fiber 4 RA +I2HSK5D = -37.6838639281404 / Cassegrain Unit high res IFU sky fiber 5 Dec +I2HSK5R = 11.6006324350936 / Cassegrain Unit high res IFU sky fiber 5 RA +I2HSK6D = -37.6838312810397 / Cassegrain Unit high res IFU sky fiber 6 Dec +I2HSK6R = 11.6007042314597 / Cassegrain Unit high res IFU sky fiber 6 RA +I2SGD0D = -37.6828207913155 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD0R = 11.6011056382783 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD1D = -37.6829852887438 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD1R = 11.6012250864737 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD2D = -37.6831491559652 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD2R = 11.6011053222202 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD3D = -37.6831485253956 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD3R = 11.6008661092466 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD4D = -37.6829840276061 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD4R = 11.6007466615849 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD5D = -37.6828201607474 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD5R = 11.600866426363 / Cassegrain Unit Positioner 2 std res IFU guide +I2SSC0D = -37.6829846584165 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC0R = 11.6009858740267 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC1D = -37.6829302461989 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC1R = 11.6011055329259 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC2D = -37.6830397010822 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC2R = 11.6011054275732 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC3D = -37.6830941132994 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC3R = 11.6009857684975 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC4D = -37.6830390705131 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC4R = 11.6008662149524 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC5D = -37.6829296156303 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC5R = 11.6008663206578 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC6D = -37.6828752035334 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC6R = 11.6009859795556 / Cassegrain Unit Positioner 2 std res IFU scienc +CUAIRT = 8.1319620349768 / Cassegrain Unit air temperature +CUHUM = 31.7941831720968 / Cassegrain Unit humidity +CUT1 = 6.8 / Cassegrain Unit temperature 1 +CUT2 = 6.1 / Cassegrain Unit temperature 2 +CFWNAME = 'CFW_CLOSED1' / Slit Unit calibration filter wheel position +CFWPOS = 11 / Slit Unit calibration filter wheel position (CF +FA1ACTIV= 0 / Is the Slit Unit fiber agitator 1 active? +FA2ACTIV= 0 / Is the Slit Unit fiber agitator 2 active? +SLURACT1= 13. / Slit Unit electronics rack temperature 1 +SLURACT2= 9. / Slit Unit electronics rack temperature 2 +AGCTEMP = 16. / Slit Unit acquisition & guiding camera temperat +THXELAMP= 0 / Slit Unit HCL power flag +PNEUMISO= 0 / Slit Unit optical bench pneumatic isolator erro +SVCTEMP = 21.8 / Slit Unit slit viewing camera temperature +SMPNAME = 'SMP_STD_ONLY' / Slit Unit slit mask positioner wheel position +SMPPOS = 2 / Slit Unit slit mask position (SMP_UNKNOWN=0, SM +BFOCUS = -11.29693797412 / Spectrograph blue focus stage position +BTEMP1 = -120. / Spectrograph blue Lakeshore 336 temperature 1 +BTEMP2 = -115.633 / Spectrograph blue Lakeshore 336 temperature 2 +BTEMP3 = -167.402 / Spectrograph blue Lakeshore 336 temperature 3 +BVAC = 0.000805 / Spectrograph blue combined vacuum value +SPERACT1= 16.8 / Spectrograph electronics rack temperature 1 +SPERACT2= 10.3 / Spectrograph electronics rack temperature 2 +DRYAIRT = 19.78 / Spectrograph Lakeshore 224 dry injection air te +GRATMNTT= 20.425 / Spectrograph Lakeshore 224 grating mount temper +INENCT1 = 20.321 / Spectrograph Lakeshore 224 inner enclosure temp +INENCT2 = 20.453 / Spectrograph Lakeshore 224 inner enclosure temp +OPBENCHT= 20.447 / Spectrograph Lakeshore 224 optical bench temper +OUTENCT1= 20.358 / Spectrograph Lakeshore 224 outer enclosure temp +OUTENCT2= 20.004 / Spectrograph Lakeshore 224 outer enclosure temp +PLABAIRT= 13.846 / Spectrograph Lakeshore 224 pier lab air tempera +BCRYOHT = 23. / Spectrograph onewire blue cryostat housing temp +OUTENCH1= 16.0481015748993 / Spectrograph onewire outer enclosure humidity 1 +OUTENCH2= 16.9149066048102 / Spectrograph onewire outer enclosure humidity 2 +OUTENCP1= 737.0263671875 / Spectrograph onewire outer enclosure pressure 1 +OUTENCP2= 737.28125 / Spectrograph onewire outer enclosure pressure 2 +PLABH = 23.6845318031986 / Spectrograph onewire pier lab humidity +PLABP = 736.0751953125 / Spectrograph onewire pier lab pressure +PLABT1 = 14.0625 / Spectrograph onewire pier lab temperature 1 +PLABT2 = 14. / Spectrograph onewire pier lab temperature 2 +RAC1T = 15.6875 / Spectrograph onewire rack 1 back top temperatur +RAC2CHAT= 8.3125 / Spectrograph onewire rack2 Chiller Air temperat +RAC2T = 10.5 / Spectrograph onewire rack 2 back top temperatur +RAC3CHAT= 20. / Spectrograph onewire rack 3 chiller air tempera +RAC3T = 17.6875 / Spectrograph onewire rack 3 front top temperatu +RCRYOHT = 23.25 / Spectrograph onewire red cryostat housing tempe +SVCASET = 20.5 / Spectrograph onewire slit viewing camera CCD ca +RFOCUS = -12.796948238768 / Spectrograph red focus stage position +RTEMP1 = -117.706 / Spectrograph red Lakeshore 336 temperature 1 +RTEMP2 = -106.824 / Spectrograph red Lakeshore 336 temperature 2 +RTEMP3 = -164.755 / Spectrograph red Lakeshore 336 temperature 3 +RPIAX = 0. / Spectrograph rPi accelerometer x value mg +RPIAY = 0. / Spectrograph rPi accelerometer y value mg +RPIAZ = 0. / Spectrograph rPi accelerometer z value mg +RPIH = 0. / Spectrograph rPi humidity % +RPIP = 0. / Spectrograph rPi pressure hPA +RPIT = 0. / Spectrograph rPi temperature C +RVAC = 0.002515 / Spectrograph red vacuum combined vacuum value +MEERT1 = 19.9988708496094 / Spectrograph thermal enclosure Meerstetter 1 ob +MEERT11 = 0. / Spectrograph thermal enclosure Meerstetter 11 o +MEERT12 = 19.9917907714844 / Spectrograph thermal enclosure Meerstetter 12 o +MEERT13 = 20.0020141601562 / Spectrograph thermal enclosure Meerstetter 13 o +MEERT14 = 19.9994506835938 / Spectrograph thermal enclosure Meerstetter 14 o +MEERT15 = 19.9989929199219 / Spectrograph thermal enclosure Meerstetter 15 o +MEERT16 = 19.9973754882812 / Spectrograph thermal enclosure Meerstetter 16 o +MEERT17 = 20.0008544921875 / Spectrograph thermal enclosure Meerstetter 17 o +MEERT18 = 19.9890747070312 / Spectrograph thermal enclosure Meerstetter 18 o +MEERT19 = 20.0069885253906 / Spectrograph thermal enclosure Meerstetter 19 o +MEERT20 = 19.9876708984375 / Spectrograph thermal enclosure Meerstetter 20 o +MEERT2 = 19.9945068359375 / Spectrograph thermal enclosure Meerstetter 2 ob +MEERT21 = 19.9993591308594 / Spectrograph thermal enclosure Meerstetter 21 o +MEERT22 = 19.9959716796875 / Spectrograph thermal enclosure Meerstetter 22 o +MEERT23 = 20.0005187988281 / Spectrograph thermal enclosure Meerstetter 23 o +MEERT24 = 20.1638793945312 / Spectrograph thermal enclosure Meerstetter 24 o +MEERT25 = 0. / Spectrograph thermal enclosure Meerstetter 25 o +MEERT3 = 20.0113830566406 / Spectrograph thermal enclosure Meerstetter 3 ob +MEERT4 = 19.9964294433594 / Spectrograph thermal enclosure Meerstetter 4 ob +MEERT5 = 20.0000915527344 / Spectrograph thermal enclosure Meerstetter 5 ob +MEERT6 = 19.9842834472656 / Spectrograph thermal enclosure Meerstetter 6 ob +MEERT7 = 20.0056762695312 / Spectrograph thermal enclosure Meerstetter 7 ob +MEERT8 = 19.9993591308594 / Spectrograph thermal enclosure Meerstetter 8 ob +MEERT9 = 20.0060729980469 / Spectrograph thermal enclosure Meerstetter 9 ob +MEERT10 = 20.0045776367188 / Spectrograph thermal enclosure Meerstetter 10 o +UTSTART = '09:12:54' / UTC of observation start +EXPUTST = '09:12:54' / UTC of observation start +UTEND = '09:32:54' / UTC of observation end +EXPUTEND= '09:32:54' / UTC of observation end +DATE-OBS= '2023-05-18' / Date of observation start +EQUINOX = 2000 / Standard FK5 (years) +RADESYS = 'FK5 ' / Coordinate reference frame +IMAGESWV= 'CICADA Release X.Y.Z' / Image creation software version +KWDICT = 'CICADA FITS V 1.8' / Keyword dictionary version +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 61 / Added by AstroData + +--- HDU 61 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 1040 / length of data axis 1 +NAXIS2 = 514 / length of data axis 2 +PCOUNT = 0 / required keyword; must = 0 +GCOUNT = 1 / required keyword; must = 1 +BZERO = 32768. +BSCALE = 1. +CAMERA = 'BLUE ' / Camera name +CONTROLR= 'ghostBlue' / Controller Name +CONHWV = 'ARC v3 ' / Controller hardware version +CCDNAME = 'EEV231-84' / CCD identification +CCDSIZE = '[1:4096,1:4112]' / CCD size +CCDNAMPS= 4 / Number of amplifiers used to readout CCD +CCDSEC = '[1:2048,1:2056]' / Region of CCD read +LTV1 = 0.000000E+00 / Image transformation vector +LTV2 = 0.000000E+00 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 2.500000E-01 / Image transformation matrix +CCDSUM = '2 4 ' / CCD pixel summing +AMPNAME = 'E ' / Amplifier identification +GAIN = 5.900000E-01 / Amplifier Gain (e-/ADU) +RDNOISE = 2.100000E+00 / Read noise for amp (e-) +SATURATE= 0 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[1:2048,1:2056]' / Amplifier size +BIAS0001= '[1025:1040,1:514]' / Bias section - postscan cols +BIASSEC = '[1025:1040,1:514]' / Bias section - user specified +AMPSEC = '[1:2048,1:2056]' / Amplifier section +ATV1 = 0 / Amplifier transformation vector +ATV2 = 0 / Amplifier transformation vector +ATM1_1 = 1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = 1 / Amplifier transformation matrix +TRIMSEC = '[1:1024,1:514]' / Trim section +DATASEC = '[1:1024,1:514]' / Data section +DETSEC = '[1:2048,1:2056]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 62 / Added by AstroData + +--- HDU 62 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 1040 / length of data axis 1 +NAXIS2 = 514 / length of data axis 2 +PCOUNT = 0 / required keyword; must = 0 +GCOUNT = 1 / required keyword; must = 1 +BZERO = 32768. +BSCALE = 1. +CAMERA = 'BLUE ' / Camera name +CONTROLR= 'ghostBlue' / Controller Name +CONHWV = 'ARC v3 ' / Controller hardware version +CCDNAME = 'EEV231-84' / CCD identification +CCDSIZE = '[1:4096,1:4112]' / CCD size +CCDNAMPS= 4 / Number of amplifiers used to readout CCD +CCDSEC = '[2049:4096,1:2056]' / Region of CCD read +LTV1 = -1.008000E+03 / Image transformation vector +LTV2 = 0.000000E+00 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 2.500000E-01 / Image transformation matrix +CCDSUM = '2 4 ' / CCD pixel summing +AMPNAME = 'F ' / Amplifier identification +GAIN = 5.200000E-01 / Amplifier Gain (e-/ADU) +RDNOISE = 2.000000E+00 / Read noise for amp (e-) +SATURATE= 0 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[2049:4096,1:2056]' / Amplifier size +BIAS0001= '[1:16,1:514]' / Bias section - postscan cols +BIASSEC = '[1:16,1:514]' / Bias section - user specified +AMPSEC = '[2048:1,1:2056]' / Amplifier section +ATV1 = 4097 / Amplifier transformation vector +ATV2 = 0 / Amplifier transformation vector +ATM1_1 = -1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = 1 / Amplifier transformation matrix +TRIMSEC = '[17:1040,1:514]' / Trim section +DATASEC = '[17:1040,1:514]' / Data section +DETSEC = '[2049:4096,1:2056]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 63 / Added by AstroData + +--- HDU 63 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 1040 / length of data axis 1 +NAXIS2 = 514 / length of data axis 2 +PCOUNT = 0 / required keyword; must = 0 +GCOUNT = 1 / required keyword; must = 1 +BZERO = 32768. +BSCALE = 1. +CAMERA = 'BLUE ' / Camera name +CONTROLR= 'ghostBlue' / Controller Name +CONHWV = 'ARC v3 ' / Controller hardware version +CCDNAME = 'EEV231-84' / CCD identification +CCDSIZE = '[1:4096,1:4112]' / CCD size +CCDNAMPS= 4 / Number of amplifiers used to readout CCD +CCDSEC = '[2049:4096,2057:4112]' / Region of CCD read +LTV1 = -1.008000E+03 / Image transformation vector +LTV2 = -5.140000E+02 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 2.500000E-01 / Image transformation matrix +CCDSUM = '2 4 ' / CCD pixel summing +AMPNAME = 'G ' / Amplifier identification +GAIN = 5.400000E-01 / Amplifier Gain (e-/ADU) +RDNOISE = 2.100000E+00 / Read noise for amp (e-) +SATURATE= 0 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[2049:4096,2057:4112]' / Amplifier size +BIAS0001= '[1:16,1:514]' / Bias section - postscan cols +BIASSEC = '[1:16,1:514]' / Bias section - user specified +AMPSEC = '[2048:1,2056:1]' / Amplifier section +ATV1 = 4097 / Amplifier transformation vector +ATV2 = 4113 / Amplifier transformation vector +ATM1_1 = -1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = -1 / Amplifier transformation matrix +TRIMSEC = '[17:1040,1:514]' / Trim section +DATASEC = '[17:1040,1:514]' / Data section +DETSEC = '[2049:4096,2057:4112]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 64 / Added by AstroData + +--- HDU 64 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 1040 / length of data axis 1 +NAXIS2 = 514 / length of data axis 2 +PCOUNT = 0 / required keyword; must = 0 +GCOUNT = 1 / required keyword; must = 1 +BZERO = 32768. +BSCALE = 1. +CAMERA = 'BLUE ' / Camera name +CONTROLR= 'ghostBlue' / Controller Name +CONHWV = 'ARC v3 ' / Controller hardware version +CCDNAME = 'EEV231-84' / CCD identification +CCDSIZE = '[1:4096,1:4112]' / CCD size +CCDNAMPS= 4 / Number of amplifiers used to readout CCD +CCDSEC = '[1:2048,2057:4112]' / Region of CCD read +LTV1 = 0.000000E+00 / Image transformation vector +LTV2 = -5.140000E+02 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 2.500000E-01 / Image transformation matrix +CCDSUM = '2 4 ' / CCD pixel summing +AMPNAME = 'H ' / Amplifier identification +GAIN = 5.800000E-01 / Amplifier Gain (e-/ADU) +RDNOISE = 2.100000E+00 / Read noise for amp (e-) +SATURATE= 0 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[1:2048,2057:4112]' / Amplifier size +BIAS0001= '[1025:1040,1:514]' / Bias section - postscan cols +BIASSEC = '[1025:1040,1:514]' / Bias section - user specified +AMPSEC = '[1:2048,2056:1]' / Amplifier section +ATV1 = 0 / Amplifier transformation vector +ATV2 = 4113 / Amplifier transformation vector +ATM1_1 = 1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = -1 / Amplifier transformation matrix +TRIMSEC = '[1:1024,1:514]' / Trim section +DATASEC = '[1:1024,1:514]' / Data section +DETSEC = '[1:2048,2057:4112]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 65 / Added by AstroData + +--- HDU 65 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 8 / number of bits per data pixel +NAXIS = 0 / number of data axes +PCOUNT = 0 / number of random group parameters +GCOUNT = 1 / number of random groups +DARKTIME= 1209.866 / Total dark time of the observation +ELAPSED = 1200. / Total elapsed time between shutter open and rea +EXPOSED = 1200. / Actual exposure time +EXPREQ = 1200. / Requested exposure time +NOTES = ' ' +ORIGNAME= 'S20230518S0121' / Original filename +OBJECT = ' ' +OBSERVER= ' ' +RUN = 1 / Exposure run number +CCDTEMP = 0. +READMODE= 1 / (CHIP_READOUT_SLOW=0, CHIP_READOUT_MEDIUM=1, C +RDOUT = 9.64 / Readout time +EXPTIME = 1200. +OBSTYPE = 'OBJECT ' / Observation type +FILENAME= 're-20230518.091254-0000.fits' / Original host filename +OBSID = 're-20230518.091254-0000' / Observation ID +INSTRUME= 'GHOST ' / Instrument name +CAMERA = 'RED ' / Camera name +WINDOW = ' ' / Readout window name +DETECTOR= 'EEV231-C6' / Detector identification +DETSIZE = '[1:6144,1:6160]' / Detector size (pixels) +NCCDS = 1 / Number of CCDs +NAMPS = 4 / Number of amplifiers +COMMENT = 'FITSKW1 ' / Not available +COMMENT = 'FITSKW2 ' / Not available +COMMENT = 'FITSKW3 ' / Not available +COMMENT = 'FITSKW4 ' / Not available +OBJECT1 = ' ' +TARGET1 = 2 / IFU_TARGET_NONE=0, IFU_TARGET_SKY=1, IFU_TARGET +OBJECT2 = ' ' +TARGET2 = 1 / IFU_TARGET_NONE=0, IFU_TARGET_SKY=1, IFU_TARGET +IFU1GDX = 0.195591860768047 / Cass Unit Positioner 1 focal plane guide X offs +IFU1GDY = 0.24419448526399 / Cass Unit Positioner 1 focal plane guide Y offs +IFU2GDX = 0. / Cass Unit Positioner 2 focal plane guide X offs +IFU2GDY = 0. / Cass Unit Positioner 2 focal plane guide Y offs +ADC1A = 22.4582190934533 / Cassegrain Unit Positioner 1 ADC aprism angle +ADC1B = 128.417718133915 / Cassegrain Unit Positioner 1 ADC bprism angle +ADC2A = 22.4583714517757 / Cassegrain Unit Positioner 2 ADC aprism angle +ADC2B = 128.417601400657 / Cassegrain Unit Positioner 2 ADC bprism angle +CURACT1 = 7.2 / Cassegrain Unit electronics rack temperature 1 +CURACT2 = 8.5 / Cassegrain Unit electronics rack temperature 2 +HRIFU1D = -37.6584773522382 / Cassegrain Unit Positioner 1 Hi Dec +HRIFU1R = 11.6516234743036 / Cassegrain Unit Positioner 1 Hi RA +SKIFU1D = -37.6593695218957 / Cassegrain Unit Positioner 1 Sky Dec +SKIFU1R = 11.6522907122998 / Cassegrain Unit Positioner 1 Sky RA +SRIFU1D = -37.6593689922103 / Cassegrain Unit Positioner 1 Std Dec +IFU1X = 0.00598936270399975 / Cassegrain Unit Positioner 1 focal plane X posi +IFU1Y = 0.00588557857599992 / Cassegrain Unit Positioner 1 focal plane Y posi +SRIFU1R = 11.6509379706258 / Cassegrain Unit Positioner 1 RA +I1HGD0D = -37.6583789435213 / Cassegrain Unit high res IFU guide fiber 0 Dec +I1HGD0R = 11.6518396845883 / Cassegrain Unit high res IFU guide fiber 0 RA +I1HGD1D = -37.658575929685 / Cassegrain Unit high res IFU guide fiber 1 Dec +I1HGD1R = 11.6518392620204 / Cassegrain Unit high res IFU guide fiber 1 RA +I1HGD2D = -37.6586743384012 / Cassegrain Unit high res IFU guide fiber 2 Dec +I1HGD2R = 11.6516230511615 / Cassegrain Unit high res IFU guide fiber 2 RA +I1HGD3D = -37.6585757605605 / Cassegrain Unit high res IFU guide fiber 3 Dec +I1HGD3R = 11.6514072634457 / Cassegrain Unit high res IFU guide fiber 3 RA +I1HGD4D = -37.6583787743983 / Cassegrain Unit high res IFU guide fiber 4 Dec +I1HGD4R = 11.6514076871599 / Cassegrain Unit high res IFU guide fiber 4 RA +I1HGD5D = -37.6582803660752 / Cassegrain Unit high res IFU guide fiber 5 Dec +I1HGD5R = 11.6516238974435 / Cassegrain Unit high res IFU guide fiber 5 RA +I1HSC00D= -37.6584773522382 / Cassegrain Unit high res IFU science fiber 0 De +I1HSC00R= 11.6516234743036 / Cassegrain Unit high res IFU science fiber 0 RA +I1HSC10D= -37.6585430705798 / Cassegrain Unit high res IFU science fiber 10 D +I1HSC10R= 11.6517673327178 / Cassegrain Unit high res IFU science fiber 10 R +I1HSC11D= -37.6585758734852 / Cassegrain Unit high res IFU science fiber 11 D +I1HSC11R= 11.6516952624953 / Cassegrain Unit high res IFU science fiber 11 R +I1HSC12D= -37.6586086763469 / Cassegrain Unit high res IFU science fiber 12 D +I1HSC12R= 11.6516231922091 / Cassegrain Unit high res IFU science fiber 12 R +I1HSC13D= -37.6585758171104 / Cassegrain Unit high res IFU science fiber 13 D +I1HSC13R= 11.6515512629704 / Cassegrain Unit high res IFU science fiber 13 R +I1HSC14D= -37.6585429578302 / Cassegrain Unit high res IFU science fiber 14 D +I1HSC14R= 11.6514793337954 / Cassegrain Unit high res IFU science fiber 14 R +I1HSC15D= -37.6584772957761 / Cassegrain Unit high res IFU science fiber 15 D +I1HSC15R= 11.6514794749699 / Cassegrain Unit high res IFU science fiber 15 R +I1HSC16D= -37.6584116337219 / Cassegrain Unit high res IFU science fiber 16 D +I1HSC16R= 11.6514796161441 / Cassegrain Unit high res IFU science fiber 16 R +I1HSC17D= -37.6583788309477 / Cassegrain Unit high res IFU science fiber 17 D +I1HSC17R= 11.6515516863025 / Cassegrain Unit high res IFU science fiber 17 R +I1HSC18D= -37.6583460281296 / Cassegrain Unit high res IFU science fiber 18 D +I1HSC18R= 11.6516237563971 / Cassegrain Unit high res IFU science fiber 18 R +I1HSC01D= -37.6584445493764 / Cassegrain Unit high res IFU science fiber 1 De +I1HSC01R= 11.6516955444622 / Cassegrain Unit high res IFU science fiber 1 RA +I1HSC02D= -37.6585102114308 / Cassegrain Unit high res IFU science fiber 2 De +I1HSC02R= 11.6516954034789 / Cassegrain Unit high res IFU science fiber 2 RA +I1HSC03D= -37.6585430142925 / Cassegrain Unit high res IFU science fiber 3 De +I1HSC03R= 11.6516233332565 / Cassegrain Unit high res IFU science fiber 3 RA +I1HSC04D= -37.6585101550561 / Cassegrain Unit high res IFU science fiber 4 De +I1HSC04R= 11.6515514040814 / Cassegrain Unit high res IFU science fiber 4 RA +I1HSC05D= -37.6584444930019 / Cassegrain Unit high res IFU science fiber 5 De +I1HSC05R= 11.651551545192 / Cassegrain Unit high res IFU science fiber 5 RA +I1HSC06D= -37.6584116901839 / Cassegrain Unit high res IFU science fiber 6 De +I1HSC06R= 11.6516236153505 / Cassegrain Unit high res IFU science fiber 6 RA +I1HSC07D= -37.658378887322 / Cassegrain Unit high res IFU science fiber 7 De +I1HSC07R= 11.6516956854453 / Cassegrain Unit high res IFU science fiber 7 RA +I1HSC08D= -37.6584117464708 / Cassegrain Unit high res IFU science fiber 8 De +I1HSC08R= 11.6517676145571 / Cassegrain Unit high res IFU science fiber 8 RA +I1HSC09D= -37.6584774085253 / Cassegrain Unit high res IFU science fiber 9 De +I1HSC09R= 11.6517674736376 / Cassegrain Unit high res IFU science fiber 9 RA +I1SGD0D = -37.6592048846885 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD0R = 11.6510583253829 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD1D = -37.6593690873136 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD1R = 11.6511779723974 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD2D = -37.6595331949569 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD2R = 11.6510576176386 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD3D = -37.6595330996097 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD3R = 11.6508176153367 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD4D = -37.6593688966206 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD4R = 11.6506979688548 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD5D = -37.6592047893428 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD5R = 11.6508183241423 / Cassegrain Unit Positioner 1 std res IFU guide +I1SSC0D = -37.6593689922103 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC0R = 11.6509379706258 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC1D = -37.6593143214446 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC1R = 11.6510580894688 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC2D = -37.6594237582008 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC2R = 11.6510578535541 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC3D = -37.6594784289661 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC3R = 11.6509377345338 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC4D = -37.659423662854 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC4R = 11.6508178516059 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC5D = -37.6593142260984 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC5R = 11.6508180878744 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC6D = -37.6592595554544 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC6R = 11.6509382067171 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSK0D = -37.6593695218957 / Cassegrain Unit std res IFU sky fiber 0 Dec +I1SSK0R = 11.6522907122998 / Cassegrain Unit std res IFU sky fiber 0 RA +I1SSK1D = -37.6594242865172 / Cassegrain Unit std res IFU sky fiber 1 Dec +I1SSK1R = 11.6524105962268 / Cassegrain Unit std res IFU sky fiber 1 RA +I1SSK2D = -37.6594789586542 / Cassegrain Unit std res IFU sky fiber 2 Dec +I1SSK2R = 11.6522904782019 / Cassegrain Unit std res IFU sky fiber 2 RA +SRIFU2D = -37.6829846584165 / Cassegrain Unit Positioner 2 Dec +IFU2X = -86.923269710296 / Cassegrain Unit Positioner 2 focal plane X posi +IFU2Y = -52.106378584976 / Cassegrain Unit Positioner 2 focal plane Y posi +SRIFU2R = 11.6009858740267 / Cassegrain Unit Positioner 2 RA +I2HSK0D = -37.6838969539684 / Cassegrain Unit high res IFU sky fiber 0 Dec +I2HSK0R = 11.6007041678923 / Cassegrain Unit high res IFU sky fiber 0 RA +I2HSK1D = -37.6838643068243 / Cassegrain Unit high res IFU sky fiber 1 Dec +I2HSK1R = 11.6007759642589 / Cassegrain Unit high res IFU sky fiber 1 RA +I2HSK2D = -37.6839299797529 / Cassegrain Unit high res IFU sky fiber 2 Dec +I2HSK2R = 11.6007759007549 / Cassegrain Unit high res IFU sky fiber 2 RA +I2HSK3D = -37.6839626268969 / Cassegrain Unit high res IFU sky fiber 3 Dec +I2HSK3R = 11.6007041043247 / Cassegrain Unit high res IFU sky fiber 3 RA +I2HSK4D = -37.6839296010689 / Cassegrain Unit high res IFU sky fiber 4 Dec +I2HSK4R = 11.6006323714626 / Cassegrain Unit high res IFU sky fiber 4 RA +I2HSK5D = -37.6838639281404 / Cassegrain Unit high res IFU sky fiber 5 Dec +I2HSK5R = 11.6006324350936 / Cassegrain Unit high res IFU sky fiber 5 RA +I2HSK6D = -37.6838312810397 / Cassegrain Unit high res IFU sky fiber 6 Dec +I2HSK6R = 11.6007042314597 / Cassegrain Unit high res IFU sky fiber 6 RA +I2SGD0D = -37.6828207913155 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD0R = 11.6011056382783 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD1D = -37.6829852887438 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD1R = 11.6012250864737 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD2D = -37.6831491559652 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD2R = 11.6011053222202 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD3D = -37.6831485253956 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD3R = 11.6008661092466 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD4D = -37.6829840276061 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD4R = 11.6007466615849 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD5D = -37.6828201607474 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD5R = 11.600866426363 / Cassegrain Unit Positioner 2 std res IFU guide +I2SSC0D = -37.6829846584165 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC0R = 11.6009858740267 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC1D = -37.6829302461989 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC1R = 11.6011055329259 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC2D = -37.6830397010822 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC2R = 11.6011054275732 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC3D = -37.6830941132994 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC3R = 11.6009857684975 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC4D = -37.6830390705131 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC4R = 11.6008662149524 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC5D = -37.6829296156303 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC5R = 11.6008663206578 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC6D = -37.6828752035334 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC6R = 11.6009859795556 / Cassegrain Unit Positioner 2 std res IFU scienc +CUAIRT = 8.1319620349768 / Cassegrain Unit air temperature +CUHUM = 31.7941831720968 / Cassegrain Unit humidity +CUT1 = 6.8 / Cassegrain Unit temperature 1 +CUT2 = 6.1 / Cassegrain Unit temperature 2 +CFWNAME = 'CFW_CLOSED1' / Slit Unit calibration filter wheel position +CFWPOS = 11 / Slit Unit calibration filter wheel position (CF +FA1ACTIV= 0 / Is the Slit Unit fiber agitator 1 active? +FA2ACTIV= 0 / Is the Slit Unit fiber agitator 2 active? +SLURACT1= 13. / Slit Unit electronics rack temperature 1 +SLURACT2= 9. / Slit Unit electronics rack temperature 2 +AGCTEMP = 16. / Slit Unit acquisition & guiding camera temperat +THXELAMP= 0 / Slit Unit HCL power flag +PNEUMISO= 0 / Slit Unit optical bench pneumatic isolator erro +SVCTEMP = 21.8 / Slit Unit slit viewing camera temperature +SMPNAME = 'SMP_STD_ONLY' / Slit Unit slit mask positioner wheel position +SMPPOS = 2 / Slit Unit slit mask position (SMP_UNKNOWN=0, SM +BFOCUS = -11.29693797412 / Spectrograph blue focus stage position +BTEMP1 = -120. / Spectrograph blue Lakeshore 336 temperature 1 +BTEMP2 = -115.633 / Spectrograph blue Lakeshore 336 temperature 2 +BTEMP3 = -167.402 / Spectrograph blue Lakeshore 336 temperature 3 +BVAC = 0.000805 / Spectrograph blue combined vacuum value +SPERACT1= 16.8 / Spectrograph electronics rack temperature 1 +SPERACT2= 10.3 / Spectrograph electronics rack temperature 2 +DRYAIRT = 19.78 / Spectrograph Lakeshore 224 dry injection air te +GRATMNTT= 20.425 / Spectrograph Lakeshore 224 grating mount temper +INENCT1 = 20.321 / Spectrograph Lakeshore 224 inner enclosure temp +INENCT2 = 20.453 / Spectrograph Lakeshore 224 inner enclosure temp +OPBENCHT= 20.447 / Spectrograph Lakeshore 224 optical bench temper +OUTENCT1= 20.358 / Spectrograph Lakeshore 224 outer enclosure temp +OUTENCT2= 20.004 / Spectrograph Lakeshore 224 outer enclosure temp +PLABAIRT= 13.846 / Spectrograph Lakeshore 224 pier lab air tempera +BCRYOHT = 23. / Spectrograph onewire blue cryostat housing temp +OUTENCH1= 16.0481015748993 / Spectrograph onewire outer enclosure humidity 1 +OUTENCH2= 16.9149066048102 / Spectrograph onewire outer enclosure humidity 2 +OUTENCP1= 737.0263671875 / Spectrograph onewire outer enclosure pressure 1 +OUTENCP2= 737.28125 / Spectrograph onewire outer enclosure pressure 2 +PLABH = 23.6845318031986 / Spectrograph onewire pier lab humidity +PLABP = 736.0751953125 / Spectrograph onewire pier lab pressure +PLABT1 = 14.0625 / Spectrograph onewire pier lab temperature 1 +PLABT2 = 14. / Spectrograph onewire pier lab temperature 2 +RAC1T = 15.6875 / Spectrograph onewire rack 1 back top temperatur +RAC2CHAT= 8.3125 / Spectrograph onewire rack2 Chiller Air temperat +RAC2T = 10.5 / Spectrograph onewire rack 2 back top temperatur +RAC3CHAT= 20. / Spectrograph onewire rack 3 chiller air tempera +RAC3T = 17.6875 / Spectrograph onewire rack 3 front top temperatu +RCRYOHT = 23.25 / Spectrograph onewire red cryostat housing tempe +SVCASET = 20.5 / Spectrograph onewire slit viewing camera CCD ca +RFOCUS = -12.796948238768 / Spectrograph red focus stage position +RTEMP1 = -117.706 / Spectrograph red Lakeshore 336 temperature 1 +RTEMP2 = -106.824 / Spectrograph red Lakeshore 336 temperature 2 +RTEMP3 = -164.755 / Spectrograph red Lakeshore 336 temperature 3 +RPIAX = 0. / Spectrograph rPi accelerometer x value mg +RPIAY = 0. / Spectrograph rPi accelerometer y value mg +RPIAZ = 0. / Spectrograph rPi accelerometer z value mg +RPIH = 0. / Spectrograph rPi humidity % +RPIP = 0. / Spectrograph rPi pressure hPA +RPIT = 0. / Spectrograph rPi temperature C +RVAC = 0.002515 / Spectrograph red vacuum combined vacuum value +MEERT1 = 19.9988708496094 / Spectrograph thermal enclosure Meerstetter 1 ob +MEERT11 = 0. / Spectrograph thermal enclosure Meerstetter 11 o +MEERT12 = 19.9917907714844 / Spectrograph thermal enclosure Meerstetter 12 o +MEERT13 = 20.0020141601562 / Spectrograph thermal enclosure Meerstetter 13 o +MEERT14 = 19.9994506835938 / Spectrograph thermal enclosure Meerstetter 14 o +MEERT15 = 19.9989929199219 / Spectrograph thermal enclosure Meerstetter 15 o +MEERT16 = 19.9973754882812 / Spectrograph thermal enclosure Meerstetter 16 o +MEERT17 = 20.0008544921875 / Spectrograph thermal enclosure Meerstetter 17 o +MEERT18 = 19.9890747070312 / Spectrograph thermal enclosure Meerstetter 18 o +MEERT19 = 20.0069885253906 / Spectrograph thermal enclosure Meerstetter 19 o +MEERT20 = 19.9876708984375 / Spectrograph thermal enclosure Meerstetter 20 o +MEERT2 = 19.9945068359375 / Spectrograph thermal enclosure Meerstetter 2 ob +MEERT21 = 19.9993591308594 / Spectrograph thermal enclosure Meerstetter 21 o +MEERT22 = 19.9959716796875 / Spectrograph thermal enclosure Meerstetter 22 o +MEERT23 = 20.0005187988281 / Spectrograph thermal enclosure Meerstetter 23 o +MEERT24 = 20.1638793945312 / Spectrograph thermal enclosure Meerstetter 24 o +MEERT25 = 0. / Spectrograph thermal enclosure Meerstetter 25 o +MEERT3 = 20.0113830566406 / Spectrograph thermal enclosure Meerstetter 3 ob +MEERT4 = 19.9964294433594 / Spectrograph thermal enclosure Meerstetter 4 ob +MEERT5 = 20.0000915527344 / Spectrograph thermal enclosure Meerstetter 5 ob +MEERT6 = 19.9842834472656 / Spectrograph thermal enclosure Meerstetter 6 ob +MEERT7 = 20.0056762695312 / Spectrograph thermal enclosure Meerstetter 7 ob +MEERT8 = 19.9993591308594 / Spectrograph thermal enclosure Meerstetter 8 ob +MEERT9 = 20.0060729980469 / Spectrograph thermal enclosure Meerstetter 9 ob +MEERT10 = 20.0045776367188 / Spectrograph thermal enclosure Meerstetter 10 o +UTSTART = '09:12:54' / UTC of observation start +EXPUTST = '09:12:54' / UTC of observation start +UTEND = '09:32:54' / UTC of observation end +EXPUTEND= '09:32:54' / UTC of observation end +DATE-OBS= '2023-05-18' / Date of observation start +EQUINOX = 2000 / Standard FK5 (years) +RADESYS = 'FK5 ' / Coordinate reference frame +IMAGESWV= 'CICADA Release X.Y.Z' / Image creation software version +KWDICT = 'CICADA FITS V 1.8' / Keyword dictionary version +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 66 / Added by AstroData + +--- HDU 66 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 1552 / length of data axis 1 +NAXIS2 = 770 / length of data axis 2 +PCOUNT = 0 / required keyword; must = 0 +GCOUNT = 1 / required keyword; must = 1 +BZERO = 32768. +BSCALE = 1. +CAMERA = 'RED ' / Camera name +CONTROLR= 'ghostRed' / Controller Name +CONHWV = 'ARC v3 ' / Controller hardware version +CCDNAME = 'EEV231-C6' / CCD identification +CCDSIZE = '[1:6144,1:6160]' / CCD size +CCDNAMPS= 4 / Number of amplifiers used to readout CCD +CCDSEC = '[1:3072,1:3080]' / Region of CCD read +LTV1 = 0.000000E+00 / Image transformation vector +LTV2 = 0.000000E+00 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 2.500000E-01 / Image transformation matrix +CCDSUM = '2 4 ' / CCD pixel summing +AMPNAME = 'E ' / Amplifier identification +GAIN = 5.100000E-01 / Amplifier Gain (e-/ADU) +RDNOISE = 2.300000E+00 / Read noise for amp (e-) +SATURATE= 65535 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[1:3072,1:3080]' / Amplifier size +BIAS0001= '[1537:1552,1:770]' / Bias section - postscan cols +BIASSEC = '[1537:1552,1:770]' / Bias section - user specified +AMPSEC = '[1:3072,1:3080]' / Amplifier section +ATV1 = 0 / Amplifier transformation vector +ATV2 = 0 / Amplifier transformation vector +ATM1_1 = 1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = 1 / Amplifier transformation matrix +TRIMSEC = '[1:1536,1:770]' / Trim section +DATASEC = '[1:1536,1:770]' / Data section +DETSEC = '[1:3072,1:3080]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 67 / Added by AstroData + +--- HDU 67 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 1552 / length of data axis 1 +NAXIS2 = 770 / length of data axis 2 +PCOUNT = 0 / required keyword; must = 0 +GCOUNT = 1 / required keyword; must = 1 +BZERO = 32768. +BSCALE = 1. +CAMERA = 'RED ' / Camera name +CONTROLR= 'ghostRed' / Controller Name +CONHWV = 'ARC v3 ' / Controller hardware version +CCDNAME = 'EEV231-C6' / CCD identification +CCDSIZE = '[1:6144,1:6160]' / CCD size +CCDNAMPS= 4 / Number of amplifiers used to readout CCD +CCDSEC = '[3073:6144,1:3080]' / Region of CCD read +LTV1 = -1.520000E+03 / Image transformation vector +LTV2 = 0.000000E+00 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 2.500000E-01 / Image transformation matrix +CCDSUM = '2 4 ' / CCD pixel summing +AMPNAME = 'F ' / Amplifier identification +GAIN = 5.000000E-01 / Amplifier Gain (e-/ADU) +RDNOISE = 2.300000E+00 / Read noise for amp (e-) +SATURATE= 65535 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[3073:6144,1:3080]' / Amplifier size +BIAS0001= '[1:16,1:770]' / Bias section - postscan cols +BIASSEC = '[1:16,1:770]' / Bias section - user specified +AMPSEC = '[3072:1,1:3080]' / Amplifier section +ATV1 = 6145 / Amplifier transformation vector +ATV2 = 0 / Amplifier transformation vector +ATM1_1 = -1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = 1 / Amplifier transformation matrix +TRIMSEC = '[17:1552,1:770]' / Trim section +DATASEC = '[17:1552,1:770]' / Data section +DETSEC = '[3073:6144,1:3080]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 68 / Added by AstroData + +--- HDU 68 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 1552 / length of data axis 1 +NAXIS2 = 770 / length of data axis 2 +PCOUNT = 0 / required keyword; must = 0 +GCOUNT = 1 / required keyword; must = 1 +BZERO = 32768. +BSCALE = 1. +CAMERA = 'RED ' / Camera name +CONTROLR= 'ghostRed' / Controller Name +CONHWV = 'ARC v3 ' / Controller hardware version +CCDNAME = 'EEV231-C6' / CCD identification +CCDSIZE = '[1:6144,1:6160]' / CCD size +CCDNAMPS= 4 / Number of amplifiers used to readout CCD +CCDSEC = '[3073:6144,3081:6160]' / Region of CCD read +LTV1 = -1.520000E+03 / Image transformation vector +LTV2 = -7.700000E+02 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 2.500000E-01 / Image transformation matrix +CCDSUM = '2 4 ' / CCD pixel summing +AMPNAME = 'G ' / Amplifier identification +GAIN = 5.200000E-01 / Amplifier Gain (e-/ADU) +RDNOISE = 2.400000E+00 / Read noise for amp (e-) +SATURATE= 65535 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[3073:6144,3081:6160]' / Amplifier size +BIAS0001= '[1:16,1:770]' / Bias section - postscan cols +BIASSEC = '[1:16,1:770]' / Bias section - user specified +AMPSEC = '[3072:1,3080:1]' / Amplifier section +ATV1 = 6145 / Amplifier transformation vector +ATV2 = 6161 / Amplifier transformation vector +ATM1_1 = -1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = -1 / Amplifier transformation matrix +TRIMSEC = '[17:1552,1:770]' / Trim section +DATASEC = '[17:1552,1:770]' / Data section +DETSEC = '[3073:6144,3081:6160]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 69 / Added by AstroData + +--- HDU 69 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 1552 / length of data axis 1 +NAXIS2 = 770 / length of data axis 2 +PCOUNT = 0 / required keyword; must = 0 +GCOUNT = 1 / required keyword; must = 1 +BZERO = 32768. +BSCALE = 1. +CAMERA = 'RED ' / Camera name +CONTROLR= 'ghostRed' / Controller Name +CONHWV = 'ARC v3 ' / Controller hardware version +CCDNAME = 'EEV231-C6' / CCD identification +CCDSIZE = '[1:6144,1:6160]' / CCD size +CCDNAMPS= 4 / Number of amplifiers used to readout CCD +CCDSEC = '[1:3072,3081:6160]' / Region of CCD read +LTV1 = 0.000000E+00 / Image transformation vector +LTV2 = -7.700000E+02 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 2.500000E-01 / Image transformation matrix +CCDSUM = '2 4 ' / CCD pixel summing +AMPNAME = 'H ' / Amplifier identification +GAIN = 5.500000E-01 / Amplifier Gain (e-/ADU) +RDNOISE = 2.600000E+00 / Read noise for amp (e-) +SATURATE= 65535 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[1:3072,3081:6160]' / Amplifier size +BIAS0001= '[1537:1552,1:770]' / Bias section - postscan cols +BIASSEC = '[1537:1552,1:770]' / Bias section - user specified +AMPSEC = '[1:3072,3080:1]' / Amplifier section +ATV1 = 0 / Amplifier transformation vector +ATV2 = 6161 / Amplifier transformation vector +ATM1_1 = 1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = -1 / Amplifier transformation matrix +TRIMSEC = '[1:1536,1:770]' / Trim section +DATASEC = '[1:1536,1:770]' / Data section +DETSEC = '[1:3072,3081:6160]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +EXPID = 1 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 70 / Added by AstroData + +--- HDU 70 --- +XTENSION= 'IMAGE ' / IMAGE extension +BITPIX = 16 / number of bits per data pixel +NAXIS = 2 / number of data axes +NAXIS1 = 150 / length of data axis 1 +NAXIS2 = 130 / length of data axis 2 +PCOUNT = 0 / number of random group parameters +GCOUNT = 1 / number of random groups +BZERO = 32768. +BSCALE = 1. +DARKTIME= 18.13 / Total dark time of the observation +ELAPSED = 18. / Total elapsed time between shutter open and rea +EXPOSED = 18. / Actual exposure time +EXPREQ = 18. / Requested exposure time +NOTES = ' ' +OBJECT = ' ' +OBSERVER= ' ' +RUN = 61 / Exposure run number +CCDTEMP = 0. +READMODE= 0 / (CHIP_READOUT_SLOW=0, CHIP_READOUT_MEDIUM=1, C +RDOUT = 0.002 / Readout time +EXPTIME = 18. +I1HBFWHM= 99. / IFU1 Hi Res Blue FWHM +I1HRFWHM= 1.12240554389291 / IFU1 Hi Res Red FWHM +I1SBFWHM= 5.84969211314756 / IFU1 Std Res Blue FWHM +I1SRFWHM= 2.05406777542405 / IFU1 Std Res Red FWHM +I2SBFWHM= 99. / IFU2 Std Res Blue FWHM +I2SRFWHM= 99. / IFU2 Std Res Red FWHM +OBSTYPE = 'OBJECT ' / Observation type +FILENAME= 'sv-20230518.093255-0060.fits' / Original host filename +OBSID = 'sv-20230518.093255-0060' / Observation ID +INSTRUME= 'GHOST ' / Instrument name +CAMERA = 'SLITV ' / Camera name +WINDOW = ' ' / Readout window name +DETECTOR= 'Ghost BigEye Sony ICX674' / Detector identification +DETSIZE = '[1:1928,1:1452]' / Detector size (pixels) +NCCDS = 1 / Number of CCDs +NAMPS = 1 / Number of amplifiers +COMMENT = 'FITSKW1 ' / Not available +COMMENT = 'FITSKW2 ' / Not available +COMMENT = 'FITSKW3 ' / Not available +COMMENT = 'FITSKW4 ' / Not available +OBJECT1 = ' ' +TARGET1 = 2 / IFU_TARGET_NONE=0, IFU_TARGET_SKY=1, IFU_TARGET +OBJECT2 = ' ' +TARGET2 = 1 / IFU_TARGET_NONE=0, IFU_TARGET_SKY=1, IFU_TARGET +IFU1GDX = 0.159573425460753 / Cass Unit Positioner 1 focal plane guide X offs +IFU1GDY = 0.306946497361751 / Cass Unit Positioner 1 focal plane guide Y offs +IFU2GDX = 0. / Cass Unit Positioner 2 focal plane guide X offs +IFU2GDY = 0. / Cass Unit Positioner 2 focal plane guide Y offs +ADC1A = 34.2162857454351 / Cassegrain Unit Positioner 1 ADC aprism angle +ADC1B = 121.459421920146 / Cassegrain Unit Positioner 1 ADC bprism angle +ADC2A = 34.2164381037575 / Cassegrain Unit Positioner 2 ADC aprism angle +ADC2B = 121.459498274443 / Cassegrain Unit Positioner 2 ADC bprism angle +CURACT1 = 7.3 / Cassegrain Unit electronics rack temperature 1 +CURACT2 = 8.5 / Cassegrain Unit electronics rack temperature 2 +HRIFU1D = -37.6584893336417 / Cassegrain Unit Positioner 1 Hi Dec +HRIFU1R = 11.6515962921762 / Cassegrain Unit Positioner 1 Hi RA +SKIFU1D = -37.6593815345363 / Cassegrain Unit Positioner 1 Sky Dec +SKIFU1R = 11.6522634617364 / Cassegrain Unit Positioner 1 Sky RA +SRIFU1D = -37.6593809169448 / Cassegrain Unit Positioner 1 Std Dec +IFU1X = -0.0373052149839879 / Cassegrain Unit Positioner 1 focal plane X posi +IFU1Y = 0.072627726736 / Cassegrain Unit Positioner 1 focal plane Y posi +SRIFU1R = 11.650910853477 / Cassegrain Unit Positioner 1 RA +I1HGD0D = -37.6583909403138 / Cassegrain Unit high res IFU guide fiber 0 Dec +I1HGD0R = 11.6518124814276 / Cassegrain Unit high res IFU guide fiber 0 RA +I1HGD1D = -37.6585879237727 / Cassegrain Unit high res IFU guide fiber 1 Dec +I1HGD1R = 11.6518120583205 / Cassegrain Unit high res IFU guide fiber 1 RA +I1HGD2D = -37.6586863170997 / Cassegrain Unit high res IFU guide fiber 2 Dec +I1HGD2R = 11.651595868495 / Cassegrain Unit high res IFU guide fiber 2 RA +I1HGD3D = -37.658587726575 / Cassegrain Unit high res IFU guide fiber 3 Dec +I1HGD3R = 11.6513801023518 / Cassegrain Unit high res IFU guide fiber 3 RA +I1HGD4D = -37.6583907431177 / Cassegrain Unit high res IFU guide fiber 4 Dec +I1HGD4R = 11.651380526605 / Cassegrain Unit high res IFU guide fiber 4 RA +I1HGD5D = -37.6582923501836 / Cassegrain Unit high res IFU guide fiber 5 Dec +I1HGD5R = 11.6515967158552 / Cassegrain Unit high res IFU guide fiber 5 RA +I1HSC00D= -37.6584893336417 / Cassegrain Unit high res IFU science fiber 0 De +I1HSC00R= 11.6515962921762 / Cassegrain Unit high res IFU science fiber 0 RA +I1HSC10D= -37.6585550604393 / Cassegrain Unit high res IFU science fiber 10 D +I1HSC10R= 11.6517401362088 / Cassegrain Unit high res IFU science fiber 10 R +I1HSC11D= -37.6585878582151 / Cassegrain Unit high res IFU science fiber 11 D +I1HSC11R= 11.6516680729974 / Cassegrain Unit high res IFU science fiber 11 R +I1HSC12D= -37.6586206559471 / Cassegrain Unit high res IFU science fiber 12 D +I1HSC12R= 11.6515960097223 / Cassegrain Unit high res IFU science fiber 12 R +I1HSC13D= -37.6585877924825 / Cassegrain Unit high res IFU science fiber 13 D +I1HSC13R= 11.6515240876745 / Cassegrain Unit high res IFU science fiber 13 R +I1HSC14D= -37.6585549289744 / Cassegrain Unit high res IFU science fiber 14 D +I1HSC14R= 11.6514521656903 / Cassegrain Unit high res IFU science fiber 14 R +I1HSC15D= -37.6584892678219 / Cassegrain Unit high res IFU science fiber 15 D +I1HSC15R= 11.6514523070445 / Cassegrain Unit high res IFU science fiber 15 R +I1HSC16D= -37.6584236066693 / Cassegrain Unit high res IFU science fiber 16 D +I1HSC16R= 11.6514524483984 / Cassegrain Unit high res IFU science fiber 16 R +I1HSC17D= -37.6583908090247 / Cassegrain Unit high res IFU science fiber 17 D +I1HSC17R= 11.6515245115456 / Cassegrain Unit high res IFU science fiber 17 R +I1HSC18D= -37.6583580113363 / Cassegrain Unit high res IFU science fiber 18 D +I1HSC18R= 11.6515965746291 / Cassegrain Unit high res IFU science fiber 18 R +I1HSC01D= -37.6584565359096 / Cassegrain Unit high res IFU science fiber 1 De +I1HSC01R= 11.6516683553237 / Cassegrain Unit high res IFU science fiber 1 RA +I1HSC02D= -37.6585221970623 / Cassegrain Unit high res IFU science fiber 2 De +I1HSC02R= 11.6516682141607 / Cassegrain Unit high res IFU science fiber 2 RA +I1HSC03D= -37.6585549947944 / Cassegrain Unit high res IFU science fiber 3 De +I1HSC03R= 11.6515961509494 / Cassegrain Unit high res IFU science fiber 3 RA +I1HSC04D= -37.6585221313299 / Cassegrain Unit high res IFU science fiber 4 De +I1HSC04R= 11.6515242289651 / Cassegrain Unit high res IFU science fiber 4 RA +I1HSC05D= -37.6584564701773 / Cassegrain Unit high res IFU science fiber 5 De +I1HSC05R= 11.6515243702555 / Cassegrain Unit high res IFU science fiber 5 RA +I1HSC06D= -37.658423672489 / Cassegrain Unit high res IFU science fiber 6 De +I1HSC06R= 11.6515964334028 / Cassegrain Unit high res IFU science fiber 6 RA +I1HSC07D= -37.6583908747568 / Cassegrain Unit high res IFU science fiber 7 De +I1HSC07R= 11.6516684964865 / Cassegrain Unit high res IFU science fiber 7 RA +I1HSC08D= -37.6584237381336 / Cassegrain Unit high res IFU science fiber 8 De +I1HSC08R= 11.6517404184075 / Cassegrain Unit high res IFU science fiber 8 RA +I1HSC09D= -37.6584893992865 / Cassegrain Unit high res IFU science fiber 9 De +I1HSC09R= 11.6517402773082 / Cassegrain Unit high res IFU science fiber 9 RA +I1SGD0D = -37.6592168194752 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD0R = 11.6510311968479 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD1D = -37.6593810276441 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD1R = 11.6511508315781 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD2D = -37.6595451252353 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD2R = 11.6510304882054 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD3D = -37.6595450142922 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD3R = 11.6507905095741 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD4D = -37.6593808057593 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD4R = 11.6506708753766 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD5D = -37.6592167085335 / Cassegrain Unit Positioner 1 std res IFU guide +I1SGD5R = 11.6507912192778 / Cassegrain Unit Positioner 1 std res IFU guide +I1SSC0D = -37.6593809169448 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC0R = 11.650910853477 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC1D = -37.6593262547285 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC1R = 11.6510309606345 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC2D = -37.6594356899819 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC2R = 11.6510307244203 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC3D = -37.6594903521979 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC3R = 11.6509106170856 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC4D = -37.6594355790393 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC4R = 11.6507907461427 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC5D = -37.6593261437864 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC5R = 11.6507909827106 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC6D = -37.6592714816917 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSC6R = 11.6509110898677 / Cassegrain Unit Positioner 1 std res IFU scienc +I1SSK0D = -37.6593815345363 / Cassegrain Unit std res IFU sky fiber 0 Dec +I1SSK0R = 11.6522634617364 / Cassegrain Unit std res IFU sky fiber 0 RA +I1SSK1D = -37.6594363062047 / Cassegrain Unit std res IFU sky fiber 1 Dec +I1SSK1R = 11.6523833336786 / Cassegrain Unit std res IFU sky fiber 1 RA +I1SSK2D = -37.6594909697921 / Cassegrain Unit std res IFU sky fiber 2 Dec +I1SSK2R = 11.6522632273389 / Cassegrain Unit std res IFU sky fiber 2 RA +SRIFU2D = -37.682992779424 / Cassegrain Unit Positioner 2 Dec +IFU2X = -86.923269710296 / Cassegrain Unit Positioner 2 focal plane X posi +IFU2Y = -52.106378584976 / Cassegrain Unit Positioner 2 focal plane Y posi +SRIFU2R = 11.6009647656703 / Cassegrain Unit Positioner 2 RA +I2HSK0D = -37.6839050443383 / Cassegrain Unit high res IFU sky fiber 0 Dec +I2HSK0R = 11.6006830846762 / Cassegrain Unit high res IFU sky fiber 0 RA +I2HSK1D = -37.6838724022947 / Cassegrain Unit high res IFU sky fiber 1 Dec +I2HSK1R = 11.6007548740507 / Cassegrain Unit high res IFU sky fiber 1 RA +I2HSK2D = -37.6839380743294 / Cassegrain Unit high res IFU sky fiber 2 Dec +I2HSK2R = 11.6007548103608 / Cassegrain Unit high res IFU sky fiber 2 RA +I2HSK3D = -37.683970716373 / Cassegrain Unit high res IFU sky fiber 3 Dec +I2HSK3R = 11.6006830209228 / Cassegrain Unit high res IFU sky fiber 3 RA +I2HSK4D = -37.6839376863384 / Cassegrain Unit high res IFU sky fiber 4 Dec +I2HSK4R = 11.6006112952388 / Cassegrain Unit high res IFU sky fiber 4 RA +I2HSK5D = -37.6838720143038 / Cassegrain Unit high res IFU sky fiber 5 Dec +I2HSK5R = 11.6006113590556 / Cassegrain Unit high res IFU sky fiber 5 RA +I2HSK6D = -37.6838393723036 / Cassegrain Unit high res IFU sky fiber 6 Dec +I2HSK6R = 11.6006831484296 / Cassegrain Unit high res IFU sky fiber 6 RA +I2SGD0D = -37.6828289223136 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD0R = 11.6010845185783 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD1D = -37.682993425263 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD1R = 11.6012039545007 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD2D = -37.6831572824938 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD2R = 11.6010842015906 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD3D = -37.6831566364125 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD3R = 11.6008450122337 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD4D = -37.6829921331019 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD4R = 11.6007255768451 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD5D = -37.6828282762337 / Cassegrain Unit Positioner 2 std res IFU guide +I2SGD5R = 11.6008453302796 / Cassegrain Unit Positioner 2 std res IFU guide +I2SSC0D = -37.682992779424 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC0R = 11.6009647656703 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC1D = -37.6829383757072 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC1R = 11.6010844129161 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC2D = -37.6830478291006 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC2R = 11.6010843072535 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC3D = -37.6831022328171 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC3R = 11.6009646598312 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC4D = -37.6830471830198 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC4R = 11.6008451182493 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC5D = -37.6829377296268 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC5R = 11.6008452242646 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC6D = -37.6828833260307 / Cassegrain Unit Positioner 2 std res IFU scienc +I2SSC6R = 11.600964871509 / Cassegrain Unit Positioner 2 std res IFU scienc +CUAIRT = 8.1319620349768 / Cassegrain Unit air temperature +CUHUM = 31.3547166356424 / Cassegrain Unit humidity +CUT1 = 6.7 / Cassegrain Unit temperature 1 +CUT2 = 6. / Cassegrain Unit temperature 2 +CFWNAME = 'CFW_CLOSED1' / Slit Unit calibration filter wheel position +CFWPOS = 11 / Slit Unit calibration filter wheel position (CF +FA1ACTIV= 0 / Is the Slit Unit fiber agitator 1 active? +FA2ACTIV= 0 / Is the Slit Unit fiber agitator 2 active? +SLURACT1= 12.9 / Slit Unit electronics rack temperature 1 +SLURACT2= 8.9 / Slit Unit electronics rack temperature 2 +AGCTEMP = 16. / Slit Unit acquisition & guiding camera temperat +THXELAMP= 0 / Slit Unit HCL power flag +PNEUMISO= 0 / Slit Unit optical bench pneumatic isolator erro +SVCTEMP = 22.3 / Slit Unit slit viewing camera temperature +SMPNAME = 'SMP_STD_ONLY' / Slit Unit slit mask positioner wheel position +SMPPOS = 2 / Slit Unit slit mask position (SMP_UNKNOWN=0, SM +BFOCUS = -11.29693797412 / Spectrograph blue focus stage position +BTEMP1 = -119.997 / Spectrograph blue Lakeshore 336 temperature 1 +BTEMP2 = -115.623 / Spectrograph blue Lakeshore 336 temperature 2 +BTEMP3 = -167.318 / Spectrograph blue Lakeshore 336 temperature 3 +BVAC = 0.000672 / Spectrograph blue combined vacuum value +SPERACT1= 16.9 / Spectrograph electronics rack temperature 1 +SPERACT2= 10.4 / Spectrograph electronics rack temperature 2 +DRYAIRT = 19.779 / Spectrograph Lakeshore 224 dry injection air te +GRATMNTT= 20.425 / Spectrograph Lakeshore 224 grating mount temper +INENCT1 = 20.321 / Spectrograph Lakeshore 224 inner enclosure temp +INENCT2 = 20.453 / Spectrograph Lakeshore 224 inner enclosure temp +OPBENCHT= 20.447 / Spectrograph Lakeshore 224 optical bench temper +OUTENCT1= 20.357 / Spectrograph Lakeshore 224 outer enclosure temp +OUTENCT2= 20.001 / Spectrograph Lakeshore 224 outer enclosure temp +PLABAIRT= 13.832 / Spectrograph Lakeshore 224 pier lab air tempera +BCRYOHT = 23. / Spectrograph onewire blue cryostat housing temp +OUTENCH1= 16.0481015748993 / Spectrograph onewire outer enclosure humidity 1 +OUTENCH2= 16.9149066048102 / Spectrograph onewire outer enclosure humidity 2 +OUTENCP1= 736.84814453125 / Spectrograph onewire outer enclosure pressure 1 +OUTENCP2= 737.254638671875 / Spectrograph onewire outer enclosure pressure 2 +PLABH = 23.5441338053962 / Spectrograph onewire pier lab humidity +PLABP = 735.872802734375 / Spectrograph onewire pier lab pressure +PLABT1 = 14. / Spectrograph onewire pier lab temperature 1 +PLABT2 = 14. / Spectrograph onewire pier lab temperature 2 +RAC1T = 15.75 / Spectrograph onewire rack 1 back top temperatur +RAC2CHAT= 8.375 / Spectrograph onewire rack2 Chiller Air temperat +RAC2T = 10.625 / Spectrograph onewire rack 2 back top temperatur +RAC3CHAT= 22.1875 / Spectrograph onewire rack 3 chiller air tempera +RAC3T = 21.5 / Spectrograph onewire rack 3 front top temperatu +RCRYOHT = 23.25 / Spectrograph onewire red cryostat housing tempe +SVCASET = 20.5 / Spectrograph onewire slit viewing camera CCD ca +RFOCUS = -12.796948238768 / Spectrograph red focus stage position +RTEMP1 = -117.714 / Spectrograph red Lakeshore 336 temperature 1 +RTEMP2 = -106.828 / Spectrograph red Lakeshore 336 temperature 2 +RTEMP3 = -164.72 / Spectrograph red Lakeshore 336 temperature 3 +RPIAX = 0. / Spectrograph rPi accelerometer x value mg +RPIAY = 0. / Spectrograph rPi accelerometer y value mg +RPIAZ = 0. / Spectrograph rPi accelerometer z value mg +RPIH = 0. / Spectrograph rPi humidity % +RPIP = 0. / Spectrograph rPi pressure hPA +RPIT = 0. / Spectrograph rPi temperature C +RVAC = 0.002003 / Spectrograph red vacuum combined vacuum value +MEERT1 = 19.9990844726562 / Spectrograph thermal enclosure Meerstetter 1 ob +MEERT11 = 0. / Spectrograph thermal enclosure Meerstetter 11 o +MEERT12 = 19.9884643554688 / Spectrograph thermal enclosure Meerstetter 12 o +MEERT13 = 20.0034790039062 / Spectrograph thermal enclosure Meerstetter 13 o +MEERT14 = 20.0001525878906 / Spectrograph thermal enclosure Meerstetter 14 o +MEERT15 = 20.0048522949219 / Spectrograph thermal enclosure Meerstetter 15 o +MEERT16 = 20.0150451660156 / Spectrograph thermal enclosure Meerstetter 16 o +MEERT17 = 19.9998168945312 / Spectrograph thermal enclosure Meerstetter 17 o +MEERT18 = 19.9955749511719 / Spectrograph thermal enclosure Meerstetter 18 o +MEERT19 = 19.9977416992188 / Spectrograph thermal enclosure Meerstetter 19 o +MEERT20 = 19.9990539550781 / Spectrograph thermal enclosure Meerstetter 20 o +MEERT2 = 20.0096740722656 / Spectrograph thermal enclosure Meerstetter 2 ob +MEERT21 = 19.998291015625 / Spectrograph thermal enclosure Meerstetter 21 o +MEERT22 = 20.0002136230469 / Spectrograph thermal enclosure Meerstetter 22 o +MEERT23 = 20.0036315917969 / Spectrograph thermal enclosure Meerstetter 23 o +MEERT24 = 20.146240234375 / Spectrograph thermal enclosure Meerstetter 24 o +MEERT25 = 0. / Spectrograph thermal enclosure Meerstetter 25 o +MEERT3 = 19.9985961914062 / Spectrograph thermal enclosure Meerstetter 3 ob +MEERT4 = 19.9980773925781 / Spectrograph thermal enclosure Meerstetter 4 ob +MEERT5 = 20.000244140625 / Spectrograph thermal enclosure Meerstetter 5 ob +MEERT6 = 19.9959106445312 / Spectrograph thermal enclosure Meerstetter 6 ob +MEERT7 = 19.9959106445312 / Spectrograph thermal enclosure Meerstetter 7 ob +MEERT8 = 19.9944152832031 / Spectrograph thermal enclosure Meerstetter 8 ob +MEERT9 = 20.0023803710938 / Spectrograph thermal enclosure Meerstetter 9 ob +MEERT10 = 19.9992980957031 / Spectrograph thermal enclosure Meerstetter 10 o +CONTROLR= 'ghostSlitViewer' / Controller Name +CONHWV = 'BigEye G-283' / Controller hardware version +CCDNAME = 'Ghost BigEye Sony ICX674' / CCD identification +CCDSIZE = '[1:1928,1:1452]' / CCD size +CCDNAMPS= 1 / Number of amplifiers used to readout CCD +CCDSEC = '[801:1100,681:940]' / Region of CCD read +LTV1 = -4.000000E+02 / Image transformation vector +LTV2 = -3.400000E+02 / Image transformation vector +LTM1_1 = 5.000000E-01 / Image transformation matrix +LTM1_2 = 0.000000E+00 / Image transformation matrix +LTM2_1 = 0.000000E+00 / Image transformation matrix +LTM2_2 = 5.000000E-01 / Image transformation matrix +CCDSUM = '2 2 ' / CCD pixel summing +AMPNAME = 'A ' / Amplifier identification +GAIN = 1.000000E+00 / Amplifier Gain (e-/ADU) +RDNOISE = 1.000000E+00 / Read noise for amp (e-) +SATURATE= 16383 / Maximum good data value - ADU +LINCOEF = 0.000000E+00 / Linearity coefficient No=Nt(1+Nt**lincoef) +SUMSAT = 0 / Saturation level of summing wells +AMPSIZE = '[1:1928,1:1452]' / Amplifier size +COMMENT = 'BIASSEC = 0' / No BIASSEC +AMPSEC = '[801:1100,681:940]' / Amplifier section +ATV1 = 800 / Amplifier transformation vector +ATV2 = 680 / Amplifier transformation vector +ATM1_1 = 1 / Amplifier transformation matrix +ATM1_2 = 0 / Amplifier transformation matrix +ATM2_1 = 0 / Amplifier transformation matrix +ATM2_2 = 1 / Amplifier transformation matrix +TRIMSEC = '[1:150,1:130]' / Trim section +DATASEC = '[1:150,1:130]' / Data section +DETSEC = '[801:1100,681:940]' / Detector section +DTV1 = 0 / Detector transformation vector +DTV2 = 0 / Detector transformation vector +DTM1_1 = 1 / Detector transformation matrix +DTM1_2 = 0 / Detector transformation matrix +DTM2_1 = 0 / Detector transformation matrix +DTM2_2 = 1 / Detector transformation matrix +UTSTART = '09:32:55' / UTC of observation start +EXPUTST = '09:32:55' / UTC of observation start +UTEND = '09:33:13' / UTC of observation end +EXPUTEND= '09:33:13' / UTC of observation end +DATE-OBS= '2023-05-18' / Date of observation start +EQUINOX = 2000 / Standard FK5 (years) +RADESYS = 'FK5 ' / Coordinate reference frame +IMAGESWV= 'CICADA Release X.Y.Z' / Image creation software version +KWDICT = 'CICADA FITS V 1.8' / Keyword dictionary version +EXPID = 61 +EXTNAME = 'SCI ' / Added by AstroData +EXTVER = 71 / Added by AstroData diff --git a/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.module b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.module new file mode 100644 index 00000000..8b25319f --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.module @@ -0,0 +1,43 @@ +from caom2utils.blueprints import _to_float + + +def get_exposure(uri): + return 1200.0 + + +# def _get_energy_chunk_resolving_power(header): +# start = 76000.0 +# resolut = self._headers[0].get('RESOLUT') +# if resolut == 'Standard': +# start = 56000.0 + +# red_ccds = self._headers[0].get('REDCCDS') +# blue_ccds = self._headers[0].get('BLUCCDS') + +# factor_1 = _to_float(red_ccds.split('x')[0]) +# factor_2 = _to_float(blue_ccds.split('x')[0]) +# if factor_1 == factor_2: +# factor = factor_1 +# else: +# raise Exception(f'Factors do not match. Blue {factor_2}, Red {factor_1}') +# return start / factor + + +def _get_energy_chunk_range_end_val(header): + result = None + camera = header.get('CAMERA') + if camera == 'RED': + result = 1060.0 # nm + elif camera == 'BLUE': + result = 530.0 # nm + return result + + +def _get_energy_chunk_range_start_val(header): + result = None + camera = header.get('CAMERA') + if camera == 'RED': + result = 520.0 # nm + elif camera == 'BLUE': + result = 347.0 # nm + return result diff --git a/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.py b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.py new file mode 100644 index 00000000..6d5708c8 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.py @@ -0,0 +1,65 @@ +import logging +from astropy.time import Time +from caom2utils.blueprints import _to_float + + +def get_exposure(uri): + return 1200.0 + + +def _get_energy_chunk_range_end_val(header): + result = None + camera = header.get('CAMERA') + if camera == 'RED': + result = 1060.0 # nm + elif camera == 'BLUE': + result = 530.0 # nm + return result + + +def _get_energy_chunk_range_start_val(header): + result = None + camera = header.get('CAMERA') + if camera == 'RED': + result = 520.0 # nm + elif camera == 'BLUE': + result = 347.0 # nm + return result + + +def get_time_delta(header): + result = None + date_obs = header.get('DATE-OBS') + ut_end = header.get('UTEND') + ut_start = header.get('UTSTART') + if date_obs and ut_end and ut_start: + temp_start = f'{date_obs} {ut_start}' + temp_end = f'{date_obs} {ut_end}' + start = Time(temp_start) + end = Time(temp_end) + if start and end: + start.format = 'mjd' + end.format = 'mjd' + result = (end - start).value + else: + logging.debug(f'Cannot convert {temp_start} or {temp_end} to MJD for {header.get('EXPID')}') + else: + logging.error(f'Missing one of DATE-OBS {date_obs}, UTSTART {ut_start}, or UTEND {ut_end} in {header.get('EXPID')}') + return result + + +def get_time_function_val(header): + result = None + date_obs = header.get('DATE-OBS') + ut_start = header.get('UTSTART') + if date_obs and ut_start: + temp_start = f'{date_obs} {ut_start}' + start = Time(temp_start) + if start: + start.format = 'mjd' + result = start.value + else: + logging.debug(f'Cannot convert {temp_start} to MJD') + else: + logging.error(f'Missing one of DATE-OBS {date_obs} or UTSTART {ut_start} in {header.get('EXPID')}') + return result diff --git a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/omm.blueprint b/caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.blueprint similarity index 63% rename from caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/omm.blueprint rename to caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.blueprint index 68b2dd6b..20adc3f2 100644 --- a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/omm.blueprint +++ b/caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.blueprint @@ -1,6 +1,7 @@ Observation.type = OBJECT Observation.algorithm.name = composite -DerivedObservation.members = ('BINTABLE', ['FICS'], 1) +# DerivedObservation.members = ('BINTABLE', ['Filename'], 1) +DerivedObservation.members = ('BINTABLE', 'Filename', 1) Plane.dataProductType = image Plane.calibrationLevel = 2 Chunk.position.axis.axis1.ctype = CTYPE1 diff --git a/caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.expected.xml b/caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.expected.xml new file mode 100644 index 00000000..189f8b88 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.expected.xml @@ -0,0 +1,40 @@ + + + GEMINI + wrgnN20140428S0085_arc + + composite + + OBJECT + science + + + wrgnN20140428S0085_arc + image + 2 + + + cadc:GEMINICADC/wrgnN20140428S0085_arc.fits + science + data + application/fits + 23040 + md5:7bd0b06e8c7686099d013f055b979587 + + + 0 + + + + + + + + + caom:GEMINI/N20140428S0085.fits + caom:GEMINI/N20140428S0179.fits + caom:GEMINI/N20140428S0180.fits + caom:GEMINI/rgnN20140428S0169_flat.fits + caom:GEMINI/N20140503S0161.fits + + diff --git a/caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.fits b/caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.fits new file mode 100644 index 00000000..726a9d01 --- /dev/null +++ b/caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.fits @@ -0,0 +1 @@ +SIMPLE = T / Fits standard BITPIX = 16 / Bits per pixel NAXIS = 0 / Number of axes EXTEND = T / File may contain extensions ORIGIN = 'NOAO-IRAF FITS Image Kernel July 2003' / FITS file originator DATE = '2020-07-30T15:23:06' / Date FITS file was generated IRAF-TLM= '2020-07-30T15:23:55' / Time of last modification COMMENT FITS (Flexible Image Transport System) format is defined in 'AstronomyCOMMENT and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H INSTRUME= 'NIFS ' / Instrument used to acquire data. OBJECT = 'Xe,Ar ' / Object Name OBSTYPE = 'ARC ' / Observation type OBSCLASS= 'partnerCal' / Observe class GEMPRGID= 'GN-2014A-Q-85' / Gemini Science Program ID OBSID = 'GN-2014A-Q-85-12' / Observation ID / Data label DATALAB = 'GN-2014A-Q-85-12-001-ARC' / Datalabel OBSERVER= 'Roth ' / Observer OBSERVAT= 'Gemini-North' / Observatory TELESCOP= 'Gemini-North' / Telescope PARALLAX= 0. / Parallax of Target RADVEL = 0. / Heliocentric Radial Velocity EPOCH = 2014.32156286926 / Epoch for Target coordinates EQUINOX = 0. / Equinox for Target coordinates TRKEQUIN= 2000. / Tracking equinox SSA = 'Pakzad ' / SSA RA = 229.12653518 / RA of Target DEC = -15.58164286 / Declination of Target ELEVATIO= 54.3740513888889 / Current Elevation AZIMUTH = 187.036002777778 / Current Azimuth CRPA = -95.8894017373411 / Current Cass Rotator Position Angle HA = '+00:16:51.05' / Telescope hour angle LT = '01:29:47.1' / Local time at start of observation TRKFRAME= 'FK5 ' / Tracking co-ordinate DECTRACK= 0.0010925314052 / Differential tracking rate Dec TRKEPOCH= 56775.479765 / Differential tracking reference epoch RATRACK = -0.00018777242254 / Differential tracking rate RA FRAME = 'APPT ' / Target coordinate system PMDEC = 0. / Proper Motion in Declination PMRA = 0. / Proper Motion in RA WAVELENG= 22000. / Effective Target Wavelength RAWIQ = '70-percentile' / Raw Image Quality RAWCC = '80-percentile' / Raw Cloud Cover RAWWV = 'UNKNOWN ' / Raw Water Vapour/Transparency RAWBG = '20-percentile' / Raw Background RAWPIREQ= 'NO ' / PI Requirements Met RAWGEMQA= 'USABLE ' / Gemini Quality Assessment CGUIDMOD= 'Basic ' / Driving mode for carousel UT = '11:29:47.6' / Beginning of Observation (UT) M2BAFFLE= 'NEAR IR ' / Position of M2 baffle M2CENBAF= 'OPEN ' / Position of M2 central hole baffle ST = '15:33:21.9' / Sidereal time at the start of the exposure XOFFSET = -0.215043755381406 / Telescope offset in x in arcsec YOFFSET = 0.231947194757898 / Telescope offset in y in arcsec POFFSET = -0.23194719475821 / Telescope offset in p in arcsec QOFFSET = -0.215043755381069 / Telescope offset in q in arcsec RAOFFSET= -0.235664899097696 / Telescope offset in RA in arcsec DECOFFSE= -0.21096296646063 / Telescope offset in DEC in arcsec RATRGOFF= 0. / Target offset in RA in arcsec DECTRGOF= 0. / Target offset in DEC in arcsec PA = 1. / Sky Position Angle at start of exposure IAA = 270. / Instrument Alignment Angle SFRT2 = 88.578 / Science fold rotation angle (degrees) SFTILT = -44.89 / Science fold tilt angle (degrees) SFLINEAR= 8. / Science fold linear position (mm) AOFOLD = 'IN ' / AO Pick-Off Mirror Position PWFS1_ST= 'parked ' / PWFS1 probe state (frozen,guiding,parked) PWFS2_ST= 'parked ' / PWFS2 probe state (frozen,guiding,parked) OIWFS_ST= 'parked ' / OIWFS probe state (frozen,guiding,parked) AOWFS_ST= 'guiding ' / AOWFS probe state (frozen,guiding,parked) SCIBAND = 3 / Science Ranking Band REQIQ = '70-percentile' / Requested Image Quality REQCC = '80-percentile' / Requested Cloud Cover REQBG = 'Any ' / Requested Background REQWV = 'Any ' / Requested Water Vapour REQMINAM= 1. / Requested Minimum Airmass REQMAXAM= 2.1 / Requested Maximum Airmass GCALLAMP= 'Ar ' / GCAL Lamp name GCALFILT= 'CLEAR ' / GCAL Filter name GCALDIFF= 'IR ' / GCAL Diffuser name GCALSHUT= 'CLOSED ' / GCAL Shutter (OPEN or CLOSED) AOARA = 229.12653518 / RA of AOWFS guide star AOARV = 0. / AOWFS Heliocentric Radial Velocity AOAWAVEL= 7000. / AOWFS Effective Target Wavelength AOADEC = -15.58164286 / Declination of AOWFS guide star AOAEPOCH= 2014.32156286926 / Epoch for AOWFS guide star coordinates AOAEQUIN= 0. / Equinox for AOWFS guide star coordinates AOAFRAME= 'APPT ' / AOWFS Target coordinate system AOAOBJEC= 'Titan ' / Object Name for AOWFS guide star AOAPMDEC= 0. / AOWFS Proper Motion in Declination AOAPMRA = 0. / AOWFS Proper Motion in RA AOAPARAL= 0. / AOWFS Parallax of Target AOFOCUS = -0.048 / AOWFS Focus Offset (mm) HUMIDITY= 8. / The relative humidity (fraction, 0..101). TAMBIENT= 2. / The ambient temp (C). TAMBIEN2= 35.6 / The ambient temp (F). PRESSURE= 462.17248 / The atmospheric pressure (mm Hg). PRESSUR2= 61600. / The atmospheric pressure (Pa). DEWPOINT= -29.8 / The dew point (C). DEWPOIN2= -21.64 / The dew point (F). WINDSPEE= 1.2 / The wind speed (m/s). WINDSPE2= 2.6844 / The wind speed (mph). WINDDIRE= 191. / The wind direction (degrees). WINDCOVR= 'Opened ' / Window cover position name (open|closed) FILTER = 'HK_G0603' / Filter name APOFFSET= 0. / Fopl offseti (arcseconds) GRATING = 'K_G5605 ' / Grating name GRATWAVE= 2.2 / Grating cetntral wavelength APERTURE= '3.0_Mask_G5610' / Focal plane mask FLIP = 'Out ' / Imaging (flip) mirror EXPRQ = 30. / Requested Exposure time DCNAME = 'NIFS DC ' / Nifs dc name PERIOD = 30. / Period NPERIODS= 1 / Number of periods EXPMODE = 'NONDESTRUCTIVE' / Exposure mode RDTIME = 5. / Read time COADDS = 1 / Number of coadds summed BIASPWR = 3. / Source voltage of bias P-FET OBSMODE = 'IFU ' / Observing mode configuration LNRS = 1 / Number of non-destructive read pairs DATE-OBS= '2014-04-28' / Observation date (UT) INPORT = 1 / Number of ISS port where NIFS was located CTYPE1 = 'RA---TAN' / R.A. in tangent plane projection CRPIX1 = 14.999838 / Ref pix of axis 1 CRVAL1 = 228.916879 / RA at Ref pix in decimal degrees CTYPE2 = 'DEC--TAN' / DEC. in tangent plane projection CRPIX2 = 34.000126 / Ref pix of axis 2 CRVAL2 = -15.527632 / DEC at Ref pix in decimal degrees CD1_1 = -1.0E-06 / WCS matrix element 1 1 CD1_2 = 1.9E-05 / WCS matrix element 1 2 CD2_1 = -4.7E-05 / WCS matrix element 2 1 CD2_2 = -0. / WCS matrix element 2 2 RADECSYS= 'FK5 ' / R.A./DEC. coordinate system reference MJD_OBS = 56775.479771 / Mean Julian day of observation DETSIZE = '[1:2048,1:2048]' / Dimensions of detector DETECTOR= 'NIFS ' / Name of detector NCCDS = 1 / Number of detector CCDS NAMPS = 4 / Number of CCD amps CCDNAME = 'NIFS ' / CCD/ARRAY name CCDSIZE = '[1:2048,1:2048]' / Dimensions of CCD CCDSUM = '1 1 ' / CCD binning CCDSEC = '[1:512,1:2048]' / CCD section AMPNAME = '1 ' / Amplifier name AMPSEC = '[1:512,1:2048]' / Amplifier section DETSEC = '[1:2048,1:2048]' / Full detector size AMPNAME2= '2 ' / Amplifier 2 name AMPSEC2 = '[1024:513,1:2048]' / Amplifier 2 section AMPNAME3= '3 ' / Amplifier 3 name AMPSEC3 = '[1025:1536,1:2048]' / Amplifier 3 section AMPNAME4= '4 ' / Amplifier 4 name AMPSEC4 = '[2048:1537,1:2048]' / Amplifier 4 section EXPTIME = 30. / Exposure time (s) AOFREQ = 1000. / Altair sampling frequency during exposure (Hz) AOCOUNTS= 40.0886 / Altair GS counts at end of exposure (ADU/subapeAOSEEING= 0.596913 / Altair measured seeing at end of exposure (arcsAOWFSX = -0.482908 / Altair WFS X position [mm] (0.621mm/arcsec) AOWFSY = 0.902284 / Altair WFS Y position [mm] (0.621mm/arcsec) AOWFSZ = -3.83602 / Altair WFS Z position [mm] AOGAIN = 0.5087 / Altair centroid gain estimate AONCPAF = '/aoBase/rel/input/aoNcpaNIFSp1Ngs.dat' / Altair Non-Common Path AberrCRFOLLOW= 'yes ' / Cass Rotator follow mode (yes|no) AONDFILT= 'INDEF ' / Altair ND filter (in|out) AOFLENS = 'OUT ' / Altair Fields Lens (in|out) AOFLEXF = '/aoBase/rel/data/aoFlexNIFSp1Ngs.dat' / Altair flexure file LGUSTAGE= 'OUT ' / LGS U stage position AIRMASS = 1.23 / Mean airmass for the observation AMSTART = 1.23 / Airmass at start of exposure AMEND = 1.23 / Airmass at end of exposure RELEASE = '2014-04-28' / End of proprietary period YYYY-MM-DD MDF_YSHF= 4.1 / MDF Y shift (subtracted from y_ccd) MDF_FILE= 'nifs$data/nifs-mdf[1]' / MDF source NEXTEND = 1 / number of extensions in file MODE = 'IFU ' / Reduction mode BIASVOLT= 3.000 / Array bias voltage (V) PIXSCALE= 0.04300 / Pixel scale in arcsec/pixel NSCIEXT = 29 / Number of science extensions DISPAXIS= 1 / Dispersion axis (along columns) PREPARE = '2020-07-30T16:21:49' / UT Time stamp for NFPREPARE GEM-TLM = '2020-07-30T16:23:17' / UT Last modification with GEMINI BPMFILE = 'rgnN20140428S0169_sflat_bpm.pl' / Input bad pixel mask file WMEF = '2020-07-30T16:22:18' / UT Time stamp for WMEF GAINORIG= 2.4 / Input gain RONORIG = 6.3 / Input read-noise GEMCOMB = '2020-07-30T16:22:18' / UT Time stamp for GEMCOMBINE ORIGXSIZ= 2040 / Original size in X ORIGYSIZ= 2080 / Original size in Y NSCUT = '2020-07-30T16:22:22' / UT Time stamp for NSCUT NSAPPWAV= '2020-07-30T16:22:28' / UT Time stamp for NSAPPWAVE NSREDUCE= '2020-07-30T16:23:11' / UT Time stamp for NSREDUCE DARKIMAG= 'tmpdark43230_1963' / Dark current image subtracted from raw data FLATIMAG= 'rgnN20140428S0169_flat' / Flat field image used NSWAVELE= '2020-07-30T16:23:17' / UT Time stamp for NSWAVELENGTH END XTENSION= 'BINTABLE' / binary table extension BITPIX = 8 / array data type NAXIS = 2 / number of array dimensions NAXIS1 = 192 / length of dimension 1 NAXIS2 = 5 / length of dimension 2 PCOUNT = 0 / number of group parameters GCOUNT = 1 / number of groups TFIELDS = 3 / number of table fields EXTNAME = 'PROVENANCE' TTYPE1 = 'Filename' TFORM1 = '64A ' TTYPE2 = 'Description' TFORM2 = '64A ' TTYPE3 = 'Type (member or input)' TFORM3 = '64A ' END N20140428S0085.fitsRaw arc frame.memberN20140503S0161.fitsRaw arc frame.memberN20140428S0179.fitsRaw dark frame.inputN20140428S0180.fitsRaw dark frame.inputrgnN20140428S0169_flat.fitsProcessed flat field frame.input \ No newline at end of file diff --git a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/omm.module b/caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.py similarity index 100% rename from caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/omm.module rename to caom2utils/caom2utils/tests/data/gemini/wrgnN20140428S0085_arc/wrgnN20140428S0085_arc.py diff --git a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/Cdemo_ext2_SCIRED.fits.header b/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/Cdemo_ext2_SCIRED.fits.header deleted file mode 100644 index 8bfc4c85..00000000 --- a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/Cdemo_ext2_SCIRED.fits.header +++ /dev/null @@ -1,181 +0,0 @@ -SIMPLE = T / Written by IDL: Wed Jun 20 10:02:48 2018 -BITPIX = -32 / Bits per pixel -NAXIS = 2 / Number of dimensions -NAXIS1 = 3515 / Dimension for the X axis -NAXIS2 = 3437 / Dimension for the Y axis -EXTEND = T -COMMENT *********************************************************** -COMMENT * All paper using CPAPIR data must include the following * -COMMENT * note in the "Acknowledgement" section : * -COMMENT * Based on observations obtained with CPAPIR at the * -COMMENT * Observatoire du mont Megantic, funded by the Universite * -COMMENT * de Montreal, Universite Laval, the Natural Sciences and * -COMMENT * Engineering Research Council of Canada (NSERC), the * -COMMENT * Fond quebecois de la recherche sur la Nature et les * -COMMENT * technologies (FQRNT) and the Canada Economic * -COMMENT * Development program. * -COMMENT * * -COMMENT * Please consider contacting the PI before publication * -COMMENT * of this dataset. * -COMMENT * * -COMMENT * Please refer to the following paper in any publication * -COMMENT * making use of CPAPIR dataset : * -COMMENT * Artigau, E., Doyon, R., Vallee, P., Riopel, M., * -COMMENT * & Nadeau, D. 2004, Proc. SPIE, 5492, 1479 * -COMMENT * * -COMMENT * Visit : www.astro.umontreal.ca/cpapir for more details. * -COMMENT *********************************************************** -EQUINOX = 2000.00000000 / Reference -ORIGIN = 'LAE ' / Laboratoire d'Astrophysique Experimentale -TELESCOP= 'OMM-1.6m' /Observatoire du Mont-Megantic -OBS_LAT = 45.4555555556 /Latitude of the telescope -OBS_LON = -71.1527777778 /Longitude of the telescope (Negative=West of Gre -INSTRUME= 'CPAPIR ' / Instrument name -OBSERVER= ' ' /Observer name -OBJECT = 'NGC4214 ' / Nom de l'objet -DATATYPE= 'REDUC ' /Data type, SCIENCE/CALIB/REJECT/FOCUS/TEST -FILE_INI= '180520_0472' /Initial file name -NIGHTID = '180520 ' /YYMMDD of the local start of night date -DATE-OBS= '2018-05-21T06:07:22' / Universal date and time -DATE = '2018-05-21T02:07:22' /Local date and time -FICHIER = '/home/cpapir/180520/180520_0472' /0520_0472 -TEXP = 10.0000 / Exposure time asked for -NLEC = 1 / Number of reads per coaddition -NCOA = 2 / Number of coadditions per file -NFIC = 3 / Number of files to be written -NATOD = 32 / Number of A/D converters -T1 = 90.5085 / Detecteur -T2 = 540.420000000 / Lentille #8 -T3 = 76.6732 / Detecteur -T4 = 76.8097 / non applicable -F1 = 'H ' / Wheel 1 (X), position 3 -F2 = 'open ' / Wheel 2 (Y), position 0 -F1NOM = 'Wheel R =' / Identification F1 -F2NOM = 'Wheel B =' / Identification F2 -HA = 5.04095600000 /Hour angle, always expressed in HOURS -RA = 183.939270971 /RA always expressed in DECIMAL DEGREE (0-360) -RATXT = '12:15:45.43' /RA expressed as text -DEC = 36.3853458776 / Declinaison -DECTXT = '+36:23:07.2' /DEC expressed as text -AIRMASS = 1.34507 /mean airmass -ST = 17.3158370000 / Temps sideral -UT = 6.13676900000 / Temps universel -YEAR = 2018.38280305 /YEAR, same as the EPOCH keyword -FOCUS = 4494.00000000 / Foyer du telescope -DOME = 294.495474000 / Position du dome -ROTATOR = 90.8853300000 / Angle du rotateur -NLECMAX = 4 / Maximum number of reads as a function of TEXP -TINT = 9.47100 /integration time per COADD -TEFF = 5549.99 /total exposure time in seconds -N_IND = 293.000 /number of individual exposures -TEFF_IND= 18.9419 /mean individual exposure time -T_TOT = 18.9420 /total integration time for all COADDS -T1LEC = 1.35300 / Read time for the detector -TCOA = 4.05900 / Time for one coaddition -TLEC = 2.70600 / Time for two reads -TSEQ = 0 / Time for the last sequence -TUS = 32424743 / Acquisition time in us -TEC = 9.47100 / Time elapsed -FEC = 3 / File in progress -LEC = '1f ' /Read in progress -CEC = 2 / Coaddition in progress -PEC = 100 / Percentage in progress -DS9FIC = '/home/cpapir/180520/180520_0472.fits' /0520_0472.fits / Filename to d -DECALE1 = 0 / Flip quadrant 1 -DECALE2 = 0 / Ne decale pas le quadrant 2 -DECALE3 = 1 / Flip quadrant 3 -DECALE4 = 0 / Flip quadrant 4 -MIRPAT = 'tim10.lod' /Read pattern filename -MIRMSG1 = 'seq 10 1 2 3 ' / -MIRMSG2 = '/home/cpapir/180520/180520_0472.fits (3 of 3) in 32.425 s, efficienc' -MIRMSG5 = ' ' / -AIRM_MIN= 1.08528 /minimum airmass -AIRM_MAX= 1.77463 /maximum airmass -FILTER = 'H ' / -WLEN = 1.65000 /central wavelength in micron -BANDPASS= 0.300000000000 /bandpass in micron -SCAN = '- ' / -SHIFTCOR= 1 /corrected with quadrant_shifteur.pro -MEDIMAGE= 24440.0 /Image median -MEDDEVIM= 2239.00 /Median absolute deviation -TEMP_WMO= -999 /Temperature estimee, stations WMO -DEWP_WMO= -999 /Point de rosee estime, stations WMO -HUMI_WMO= -999 /Humidite relative estimee, stations WMO -PARSED = 1 /if 1, then image has been parsed -CDELT1 = 1 / -CDELT2 = 1 / -CD1_1 = -0.000247563642915 / -CD1_2 = 5.00967780681E-06 / -CD2_1 = 5.07946242578E-06 / -CD2_2 = 0.000247790361755 / -MJDATE = 58259.6893191 /mean MJDATE -MJD_END = 58259.7551157 /MJDATE of last exposure -MJD_STAR= 58259.6227431 /MJDATE of first exposure -RELEASE = '2019-11-19' / End of proprietary period YYYY-MM-DD -CTYPE1 = 'RA---TAN' / Coordinate Type -CTYPE2 = 'DEC--TAN' / Coordinate Type -CRPIX1 = 1755.00000000 / Reference Pixel in X -CRPIX2 = 1718.00000000 / Reference Pixel in Y -CRVAL1 = 183.958190918 / R.A. (degrees) of reference pixel -CRVAL2 = 36.3876953125 / Declination of reference pixel -RADESYS = 'ICRS ' / Reference frame -MJD-OBS = 58259.2551157 / Modified Julian day of observations -ASTRGAIA= 1 / -RMSASTR = 0.321558871390 /astrometry from solve-field. Assumed to be about -THETA = 88.8553228111 /Rotation of WCS relative to NS (deg) -TEM71323= 0 / End of proprietary period YYYY-MM-DD -DEW71323= 0 / End of proprietary period YYYY-MM-DD -HUM71323= 0 / End of proprietary period YYYY-MM-DD -TEM71610= 0 / End of proprietary period YYYY-MM-DD -DEW71610= 0 / End of proprietary period YYYY-MM-DD -HUM71610= 0 / End of proprietary period YYYY-MM-DD -TEM71611= 0 / End of proprietary period YYYY-MM-DD -DEW71611= 0 / End of proprietary period YYYY-MM-DD -HUM71611= 0 / End of proprietary period YYYY-MM-DD -TEM72616= 0 / End of proprietary period YYYY-MM-DD -DEW72616= 0 / End of proprietary period YYYY-MM-DD -HUM72616= 0 / End of proprietary period YYYY-MM-DD -RED_DATE= 'Mon May 21 18:10:21 2018' /DATE AND TIME FILE WAS REDUCED -HISTORY HEXTRACT: Mon May 21 14:10:21 2018 -HISTORY Original image size was 3616 by 3534 -HISTORY Extracted Image: [53:3567,49:3485] -PIXSCALE= 0.891822028842 /Pixel scale in arcsec -PIXFWHM = 2.83188 /FWHM in pixels -FWHM = 2.52553390014 /FWHM in arcsec -PZERO = 24.8265 /zero point, raper=1*FWHM, rsky=[2-3]FHWM,mag=H2m -PZ3FW = 25.0762 /zero point, raper=3*FWHM, rsky=[3-5]FHWM,mag=H2m -SKYMAG = 17.0509029933 /sky background in mag/arcsec -MAG5SIG = 17.9667 /mag 5 sigma, raper=1*FWHM, rsky=[2-3]FHWM,mag=H2 -RMSPIXEL= 21.7692 /rms pixel-to-pixel image -END -XTENSION= 'BINTABLE' /Written by IDL: Wed Jun 20 10:02:47 2018 -BITPIX = 8 / -NAXIS = 2 /Binary table -NAXIS1 = 341 /Number of bytes per row -NAXIS2 = 1 /Number of rows -PCOUNT = 0 /Random parameter count -GCOUNT = 1 /Group count -TFIELDS = 3 /Number of columns -COMMENT -COMMENT *** End of mandatory fields *** -COMMENT -EXTNAME = 'PROVENANCE' / -COMMENT -COMMENT *** Column formats *** -COMMENT -TFORM1 = '209A ' / -TFORM2 = '11D ' / -TFORM3 = '11E ' / -COMMENT -COMMENT *** Column dimensions (2 D or greater) *** -COMMENT -TDIM1 = '(19, 11)' / -COMMENT Column names -COMMENT -COMMENT *** Column names *** -COMMENT -TTYPE1 = 'FICS ' / -TTYPE2 = 'STARTTIME' / -TTYPE3 = 'DURATION' / -END - diff --git a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/omm.py b/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/omm.py deleted file mode 100644 index e69de29b..00000000 diff --git a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/y.xml b/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/y.xml deleted file mode 100644 index 1d6cf98e..00000000 --- a/caom2utils/caom2utils/tests/data/omm/Cdemo_ext2_SCIRED/y.xml +++ /dev/null @@ -1,57 +0,0 @@ - - - OMM - Cdemo_ext2_SCIRED - 2018-05-21T02:07:22.0 - - composite - - OBJECT - science - - NGC4214 - - - OMM-1.6m - - - CPAPIR - - - - Cdemo_ext2_SCIRED - 2019-11-19T00:00:00.000 - 2019-11-19T00:00:00.000 - image - 2 - - - cadc:OMM/Cdemo_ext2_SCIRED.fits.gz - science - data - application/fits - 48346560 - md5:6372df8f4312d3452895246aba777852 - - - 0 - - - 2 - - - - - 1 - - - 2 - - - - - - - - - diff --git a/caom2utils/caom2utils/tests/test_collections.py b/caom2utils/caom2utils/tests/test_collections.py index f0866f77..187577d0 100644 --- a/caom2utils/caom2utils/tests/test_collections.py +++ b/caom2utils/caom2utils/tests/test_collections.py @@ -233,11 +233,6 @@ def _get_cardinality(directory): 'MegaPipe.080.156.Z.MP9801/cadc:CFHTSG/' 'MegaPipe.080.156.Z.MP9801.fits.gif' ) - elif '/omm/' in directory: - if 'SCIRED' in directory: - return '--lineage Cdemo_ext2_SCIRED/cadc:OMM/' 'Cdemo_ext2_SCIRED.fits.gz' - else: - return '--lineage C190531_0432_SCI/cadc:OMM/' 'C190531_0432_SCI.fits.gz' elif 'apass/catalog' in directory: return '--lineage catalog/vos://cadc.nrc.ca!vospace/CAOMworkshop/' 'Examples/DAO/dao_c122_2016_012725.fits' elif 'taos_' in directory: @@ -250,35 +245,16 @@ def _get_cardinality(directory): elif 'gemini' in directory: if 'S20230518S0121' in directory: return '--lineage GS-2023A-SV-101-13-009/cadc:GEMINI/S20230518S0121.fits' - else: + elif 'N20030325S0098' in directory: return '--lineage GN-2003A-Q-51-2-004/cadc:GEMINI/N20030325S0098.fits' + else: + return '--lineage wrgnN20140428S0085_arc/cadc:GEMINICADC/wrgnN20140428S0085_arc.fits' elif 'lotss' in directory: return '--lineage P124+62_mosaic/astron:LOTSS/P124+62/mosaic.fits' else: return '' -def _get_common(fnames): - common = os.path.basename(fnames[0]) - for jj in fnames: - rhs = os.path.basename(jj) - for kk in fnames: - lhs = os.path.basename(kk) - ii = 0 - while ii < len(rhs): - if rhs[ii] == '.' or rhs[ii] != lhs[ii]: - if len(rhs[0:ii]) != 0 and len(rhs[0:ii]) < len(common): - common = rhs[0:ii] - break - else: - ii += 1 - return common - - -def _get_subdirs(dir_name): - return [name for name in os.listdir(dir_name) if os.path.isdir(os.path.join(dir_name, name))] - - def _get_parameter(extension, dir_name): fnames = _get_file(extension, dir_name) if fnames: diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index 0995fff8..5456562e 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -1682,6 +1682,14 @@ def test_parser_construction(vos_mock, stdout_mock): os.unlink(test_out_fqn) +def test_edge_case(): + ob = ObsBlueprint() + ob.load_from_file(f'{TESTDATA_DIR}/edge_case.blueprint') + assert ( + ob._plan['Plane.provenance.producer'] == (['IMAGESWV', 'ORIGIN'], 'Gemini Observatory') + ), f'wrong blueprint default {ob._plan["Plane.provenance.producer"]}' + + def _get_local_headers(file_name): return _get_headers(file_name, None) From 54942888c2da3ea72bc2f18244ce997a22aff151 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Thu, 1 Feb 2024 11:21:35 -0800 Subject: [PATCH 25/36] Refactor comment line length. --- caom2utils/caom2utils/caom2blueprint.py | 177 +++++++++--------------- 1 file changed, 63 insertions(+), 114 deletions(-) diff --git a/caom2utils/caom2utils/caom2blueprint.py b/caom2utils/caom2utils/caom2blueprint.py index feb6d62b..e28b9530 100755 --- a/caom2utils/caom2utils/caom2blueprint.py +++ b/caom2utils/caom2utils/caom2blueprint.py @@ -146,8 +146,7 @@ class DispatchingFormatter: - """Dispatch formatter for logger and it's sub-logger, so there can - be multiple formatters.""" + """Dispatch formatter for logger and it's sub-logger, so there can be multiple formatters.""" def __init__(self, formatters, default_formatter): self._formatters = formatters @@ -194,8 +193,8 @@ def get_vos_headers(uri, subject=None): Creates the FITS headers object from a vospace file. :param uri: vos URI :param subject: user credentials. Anonymous if subject is None - :return: List of headers corresponding to each extension. Each header is - of astropy.wcs.Header type - essentially a dictionary of FITS keywords. + :return: List of headers corresponding to each extension. Each header is of astropy.wcs.Header type - essentially + a dictionary of FITS keywords. """ if uri.startswith('vos'): if subject is not None and subject.certificate is not None: @@ -223,9 +222,8 @@ def _get_and_update_artifact_meta(uri, artifact, subject=None, connected=True, c logging.debug(f'Begin _get_and_update_artifact_meta for {uri}') file_url = urlparse(uri) if file_url.scheme == 'gemini' and '.jpg' not in file_url.path: - # will get file metadata from Gemini JSON summary for fits, - # because the metadata is available long before the data - # will be stored at CADC + # will get file metadata from Gemini JSON summary for fits, because the metadata is available long before + # the data will be stored at CADC return elif file_url.scheme == 'vos': metadata = _get_vos_meta(subject, uri) @@ -298,8 +296,7 @@ def _get_vos_meta(subject, uri): def _lookup_blueprint(blueprints, uri): """ - Blueprint handling may be one-per-observation, or one-per-URI. Find - the correct one here. + Blueprint handling may be one-per-observation, or one-per-URI. Find the correct one here. :param blueprints: The collection of blueprints provided by the user. :param uri: Which blueprint to look for :return: the blueprint to apply to Observation creation. @@ -338,22 +335,19 @@ def _augment( **kwargs, ): """ - Find or construct a plane and an artifact to go with the observation - under augmentation. + Find or construct a plane and an artifact to go with the observation under augmentation. :param obs: Observation - target of CAOM2 model augmentation :param product_id: Unique identifier for a plane in an Observation :param uri: Unique identifier for an artifact in a plane - :param blueprint: Which blueprint to use when mapping from a telescope - data model to CAOM2 + :param blueprint: Which blueprint to use when mapping from a telescope data model to CAOM2 :param subject: authorization for any metdata access :param dumpconfig: print the blueprint to stdout - :param validate_wcs: if true, call the validate method on the constructed - observation, which checks that the WCS in the CAOM model is valid, + :param validate_wcs: if true, call the validate method on the constructed observation, which checks that the WCS + in the CAOM model is valid, :param plugin: what code to use for modifying a CAOM instance :param local: the input is the name of a file on disk - :param external_url: if header information should be retrieved - externally, this is where to find it + :param external_url: if header information should be retrieved externally, this is where to find it :param client: StorageClientWrapper :return: an updated Observation """ @@ -397,8 +391,7 @@ def _augment( parser = ContentParser(blueprint, uri) elif '.h5' in local: logging.debug(f'Using an Hdf5Parser for local file {local}') - # h5py is an extra in this package since most collections do - # not require it + # h5py is an extra in this package since most collections do not require it import h5py temp = h5py.File(local) @@ -456,11 +449,10 @@ def _augment( def _load_module(module): - """If a user provides code for execution during blueprint configuration, - add that code to the execution environment of the interpreter here. + """If a user provides code for execution during blueprint configuration, add that code to the execution + environment of the interpreter here. - :param module the fully-qualified path name to the source code from a - user. + :param module the fully-qualified path name to the source code from a user. """ mname = os.path.basename(module) if '.' in mname: @@ -482,12 +474,9 @@ def caom2gen(): nargs='+', required=True, help=( - 'list of files with blueprints for CAOM2 ' - 'construction, in serialized format. If the ' - 'list is of length 1, the same blueprint will ' - 'be applied to all lineage entries. Otherwise, ' - 'there must be a blueprint file per lineage ' - 'entry.' + 'list of files with blueprints for CAOM2 construction, in serialized format. If the list is of length 1, ' + 'the same blueprint will be applied to all lineage entries. Otherwise, there must be a blueprint file ' + 'per lineage entry.' ), ) @@ -516,8 +505,7 @@ def caom2gen(): product_id, uri = _extract_ids(cardinality) blueprints[uri] = blueprint else: - # there needs to be the same number of blueprints as plane/artifact - # identifiers + # there needs to be the same number of blueprints as plane/artifact identifiers if len(args.lineage) != len(args.blueprint): logging.debug(f'Lineage: {args.lineage}') logging.debug(f'Blueprints: {args.blueprint}') @@ -551,16 +539,13 @@ def caom2gen(): def _gen_obs(obs_blueprints, in_obs_xml, collection=None, obs_id=None): """ - Determine whether to create a Simple or Derived Observation, or to - read an existing Observation from an input file. + Determine whether to create a Simple or Derived Observation, or to read an existing Observation from an input + file. :param obs_blueprints: Collection of blueprints provided to application. - :param in_obs_xml: Existing observation information, contains the - collection and obs_id values. - :param collection: This plus the obs_id is a unique key for an - observation. - :param obs_id: This plus the collection is a unique key for an - observation. + :param in_obs_xml: Existing observation information, contains the collection and obs_id values. + :param collection: This plus the obs_id is a unique key for an observation. + :param obs_id: This plus the collection is a unique key for an observation. :return: Initially constructed Observation. """ obs = None @@ -569,9 +554,8 @@ def _gen_obs(obs_blueprints, in_obs_xml, collection=None, obs_id=None): reader = ObservationReader(validate=True) obs = reader.read(in_obs_xml) else: - # determine the type of observation to create by looking for the - # the DerivedObservation.members in the blueprints. If present - # in any of it assume derived + # determine the type of observation to create by looking for the the DerivedObservation.members in the + # blueprints. If present in any of it assume derived for bp in obs_blueprints.values(): if bp._get('DerivedObservation.members') is not None: logging.debug('Build a DerivedObservation') @@ -631,8 +615,7 @@ def _set_logging(verbose, debug, quiet): def _get_common_arg_parser(): """ - Returns the arg parser with common arguments between - fits2caom2 and caom2gen + Returns the arg parser with common arguments between fits2caom2 and caom2gen :return: args parser """ parser = util.get_base_parser( @@ -655,9 +638,8 @@ def _get_common_arg_parser(): '--no_validate', action='store_true', help=( - 'by default, the application will validate the ' - 'WCS information for an observation. ' - 'Specifying this flag skips that step.' + 'by default, the application will validate the WCS information for an observation. Specifying this flag ' + 'skips that step.' ), ) @@ -682,8 +664,7 @@ def _get_common_arg_parser(): def get_arg_parser(): """ - Returns the arg parser with minimum arguments required to run - fits2caom2 + Returns the arg parser with minimum arguments required to run fits2caom2 :return: args parser """ parser = _get_common_arg_parser() @@ -694,21 +675,17 @@ def get_arg_parser(): def proc(args, obs_blueprints): """ - Function to process an observation according to command line arguments - and a dictionary of blueprints. + Function to process an observation according to command line arguments and a dictionary of blueprints. - This implementation mirrors the Java implementation of fits2caom2, and - the command line arguments it handles are productID and fileURI or - local. + This implementation mirrors the Java implementation of fits2caom2, and the command line arguments it + handles are productID and fileURI or local. - There is no support for plugin execution to modify the blueprint with - this access point. + There is no support for plugin execution to modify the blueprint with this access point. - :param args: argparse args object containing the user supplied arguments. - Arguments correspond to the parser returned by the get_arg_parser function - :param obs_blueprints: dictionary of blueprints reguired to process the - observation. The fileURIs represent the keys in this dictionary. Every - fileURI in args.fileURI should have a corresponding blueprint. + :param args: argparse args object containing the user supplied arguments. Arguments correspond to the parser + returned by the get_arg_parser function + :param obs_blueprints: dictionary of blueprints reguired to process the observation. The fileURIs represent the + keys in this dictionary. Every fileURI in args.fileURI should have a corresponding blueprint. :return: """ @@ -727,10 +704,9 @@ def proc(args, obs_blueprints): if args.in_obs_xml and len(obs.planes) != 1: if not args.productID: - msg = '{}{}{}'.format( - 'A productID parameter is required if ', - 'there are zero or more than one planes ', - 'in the input observation.', + msg = ( + 'A productID parameter is required if there are zero or more than one planes in the input ' + 'observation.', ) raise RuntimeError(msg) @@ -783,12 +759,10 @@ def _load_plugin(plugin_name): if not hasattr(plgin, 'update'): msg = ( - 'The plugin {} is not correct. It must provide one ' - 'of:\n' + f'The plugin {plugin_name} is not correct. It must provide one of:\n' '1 - a function named update, or\n' - '2 - a class ObservationUpdater with a function named ' - 'update.\n In either case, the update signature needs ' - 'to be (Observation, **kwargs).'.format(plugin_name) + '2 - a class ObservationUpdater with a function named update.\n ' + 'In either case, the update signature needs to be (Observation, **kwargs).' ) raise ImportError(msg) return plgin @@ -837,10 +811,8 @@ def _write_observation(obs, args): def gen_proc(args, blueprints, **kwargs): - """The implementation that expects a product ID to be provided as - part of the lineage parameter, and blueprints as input parameters, - and a plugin parameter, that supports external programmatic blueprint - modification.""" + """The implementation that expects a product ID to be provided as part of the lineage parameter, and blueprints + as input parameters, and a plugin parameter, that supports external programmatic blueprint modification.""" _set_logging(args.verbose, args.debug, args.quiet) result = 0 @@ -863,9 +835,8 @@ def gen_proc(args, blueprints, **kwargs): # if the resource_id is Undefined, using CadcDataClient client = data_util.StorageClientWrapper(subject, using_storage_inventory=False) else: - # if the resource_id is defined, assume that the caller intends to - # use the Storage Inventory system, as it's the CADC storage - # client that depends on a resource_id + # if the resource_id is defined, assume that the caller intends to use the Storage Inventory system, as + # it's the CADC storage client that depends on a resource_id client = data_util.StorageClientWrapper(subject, resource_id=args.resource_id) for ii, cardinality in enumerate(args.lineage): @@ -920,8 +891,7 @@ def gen_proc(args, blueprints, **kwargs): def get_gen_proc_arg_parser(): """ - Returns the arg parser with minimum arguments required to run - caom2gen + Returns the arg parser with minimum arguments required to run caom2gen :return: args parser """ parser = _get_common_arg_parser() @@ -929,60 +899,40 @@ def get_gen_proc_arg_parser(): '--external_url', nargs='+', help=( - 'service endpoint(s) that ' - 'return(s) a string that can be ' - 'made into FITS headers. Cardinality should' + 'service endpoint(s) that return(s) a string that can be made into FITS headers. Cardinality should ' 'be consistent with lineage.' ), ) parser.add_argument( '--module', help=( - 'if the blueprint contains function ' - 'calls, call ' - 'importlib.import_module ' - 'for the named module. Provide a ' - 'fully qualified name. Parameter ' - 'choices are the artifact URI (uri) ' - 'or a list of astropy Header ' - 'instances (header). This will ' - 'allow the update of a single ' - 'blueprint entry with a single ' - 'call.' + 'if the blueprint contains function calls, call importlib.import_module for the named module. Provide a ' + 'fully qualified name. Parameter choices are the artifact URI (uri) or a list of astropy Header ' + 'instances (header). This will allow the update of a single blueprint entry with a single call.' ), ) parser.add_argument( '--plugin', help=( - 'if this parameter is specified, ' - 'call importlib.import_module ' - 'for the named module. Then ' - 'execute the method "update", ' - 'with the signature ' - '(Observation, **kwargs). ' - 'This will allow ' - 'for the update of multiple ' - 'observation data members with one ' - 'call.' + 'if this parameter is specified, call importlib.import_module for the named module. Then execute the ' + 'method "update", with the signature (Observation, **kwargs). This will allow for the update of ' + 'multiple observation data members with one call.' ), ) parser.add_argument( '--lineage', nargs='+', help=( - 'productID/artifactURI. List of plane/artifact ' - 'identifiers that will be' - 'created for the identified observation.' + 'productID/artifactURI. List of plane/artifact identifiers that will be created for the identified ' + 'observation.' ), ) parser.add_argument( '--use_blueprint_parser', nargs='+', help=( - 'productID/artifactURI. List of lineage entries ' - 'that will be processed with a BlueprintParser. ' - 'Good for files with no metadata in the ' - 'content.' + 'productID/artifactURI. List of lineage entries that will be processed with a BlueprintParser. ' + 'Good for files with no metadata in the content.' ), ) return parser @@ -1009,10 +959,9 @@ def augment( _set_logging(verbose, debug, quiet) logging.debug('Begin augmentation for product_id {}, uri {}'.format(product_id, uri)) - # The 'visit_args' are a dictionary within the 'params' dictionary. - # They are set by the collection-specific implementation, as they are - # dependent on that collection-specific implementation. The args to the - # visit function are not set in fits2caom2. + # The 'visit_args' are a dictionary within the 'params' dictionary. They are set by the collection-specific + # implementation, as they are dependent on that collection-specific implementation. The args to the visit + # function are not set in fits2caom2. params = kwargs.get('params') kwargs = {} From f92ef93c74689c506bfa83bdd0e83f34ba36be2b Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Mon, 5 Feb 2024 17:41:09 -0800 Subject: [PATCH 26/36] CADC-13017 - support bz2 extensions for Content-Type. --- caom2utils/caom2utils/data_util.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/caom2utils/caom2utils/data_util.py b/caom2utils/caom2utils/data_util.py index 884c214c..86c81b4f 100644 --- a/caom2utils/caom2utils/data_util.py +++ b/caom2utils/caom2utils/data_util.py @@ -335,7 +335,7 @@ def get_local_file_info(fqn): def get_file_encoding(fqn): """Basic header extension to content_encoding lookup.""" - if fqn.endswith('.fits.fz'): + if fqn.endswith('.fits.fz') or fqn.endswith('.fits.bz2'): return 'x-fits' elif fqn.endswith('.fits.gz'): return 'gzip' @@ -346,7 +346,7 @@ def get_file_encoding(fqn): def get_file_type(fqn): """Basic header extension to content_type lookup.""" lower_fqn = fqn.lower() - if lower_fqn.endswith('.fits') or lower_fqn.endswith('.fits.fz'): + if lower_fqn.endswith('.fits') or lower_fqn.endswith('.fits.fz') or lower_fqn.endswith('.fits.bz2'): return 'application/fits' elif lower_fqn.endswith('.gif'): return 'image/gif' From acaceb204afbc9df7475b19e5340369d07e62de0 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Mon, 19 Feb 2024 08:39:36 -0800 Subject: [PATCH 27/36] CADC-10810 - interim commit. --- caom2utils/caom2utils/parsers.py | 64 ++++++++++++------- .../gemini/S20230518S0121/S20230518S0121.py | 6 +- 2 files changed, 44 insertions(+), 26 deletions(-) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index 397e2d90..5a889152 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -286,7 +286,7 @@ def _get_set_from_list(self, lookup, index): # if there's something useful as a value in the keywords, # extract it - if keywords: + if keywords is not None and any(keywords): if ObsBlueprint.needs_lookup(keywords): # if there's a default value use it if keywords[1]: @@ -430,6 +430,7 @@ def _get_datetime(self, from_value): # CFHT 2003/03/29,01:34:54 # CFHT 2003/03/29 # DDO 12/02/95 + # TAOSII 2024-01-26T14:52:49Z for dt_format in [ '%Y-%m-%dT%H:%M:%S', '%Y-%m-%dT%H:%M:%S.%f', @@ -442,6 +443,7 @@ def _get_datetime(self, from_value): '%d/%m/%y', '%d/%m/%y %H:%M:%S', '%d-%m-%Y', + '%Y-%m-%dT%H:%M:%SZ', ]: try: result = datetime.strptime(from_value, dt_format) @@ -460,7 +462,7 @@ class ContentParser(BlueprintParser): def __init__(self, obs_blueprint=None, uri=None): super().__init__(obs_blueprint, uri) self._wcs_parsers = {} - self._wcs_parsers[0] = WcsParser(obs_blueprint, extension=0) + self._set_wcs_parsers(obs_blueprint) def _get_chunk_naxis(self, chunk, index): chunk.naxis = self._get_from_list('Chunk.naxis', index, self.blueprint.get_configed_axes_count()) @@ -470,6 +472,9 @@ def _get_num_parts(self): """ return len(self._blueprint._extensions) + 1 + def _set_wcs_parsers(self, obs_blueprint): + self._wcs_parsers[0] = WcsParser(obs_blueprint, extension=0) + def augment_artifact(self, artifact): """ Augments a given CAOM2 artifact with available content information @@ -477,8 +482,7 @@ def augment_artifact(self, artifact): :param index: int Part name """ super().augment_artifact(artifact) - - self.logger.error(f'Begin content artifact augmentation for {artifact.uri}') + self.logger.debug(f'Begin content artifact augmentation for {artifact.uri}') if self.blueprint.get_configed_axes_count() == 0: raise TypeError(f'No WCS Data. End content artifact augmentation for ' f'{artifact.uri}.') @@ -948,13 +952,17 @@ def _get_provenance(self, current): if name: prov = caom2.Provenance(name, p_version, project, producer, run_id, reference, last_executed) ContentParser._add_keywords(keywords, current, prov) - if inputs: + if inputs is not None and any(inputs): if isinstance(inputs, caom2.TypedSet): for i in inputs: prov.inputs.add(i) else: - for i in inputs.split(): - prov.inputs.add(caom2.PlaneURI(str(i))) + if isinstance(inputs, str): + for i in inputs.split(): + prov.inputs.add(caom2.PlaneURI(str(i))) + else: + for i in inputs: + prov.inputs.add(caom2.PlaneURI(str(i))) else: if current is not None and len(current.inputs) > 0: # preserve the original value @@ -1931,23 +1939,20 @@ class Hdf5Parser(ContentParser): CAOM2 record. """ - def __init__(self, obs_blueprint, uri, h5_file, find_roots_here='sitedata'): + def __init__(self, obs_blueprint, uri, h5_file, extension_names): """ :param obs_blueprint: Hdf5ObsBlueprint instance :param uri: which artifact augmentation is based on :param h5_file: h5py file handle - :param find_roots_here: str location where Chunk metadata starts + :param extension_names: list of str where Chunk metadata starts. There is one Part/Chunk per list entry """ self._file = h5_file - # where N Chunk metadata starts - self._find_roots_here = find_roots_here # the length of the array is the number of Parts in an HDF5 file, # and the values are HDF5 lookup path names. - self._extension_names = [] + self._extension_names = extension_names super().__init__(obs_blueprint, uri) - # used to set the astropy wcs info, resulting in a validated wcs - # that can be used to construct a valid CAOM2 record - self._wcs_parsers = {} + # for index, _ in enumerate(self._extension_names): + # self._blueprint._extensions[index] = {} def _get_num_parts(self): """return the number of Parts to create for a CAOM record @@ -1958,6 +1963,12 @@ def _get_num_parts(self): result = 1 return result + def _set_wcs_parsers(self, obs_blueprint): + # self._wcs_parsers[0] = WcsParser(obs_blueprint, extension=0) + # used to set the astropy wcs info, resulting in a validated wcs + # that can be used to construct a valid CAOM2 record + self._wcs_parsers = {} + def apply_blueprint_from_file(self): """ Retrieve metadata from file, cache in the blueprint. @@ -1968,6 +1979,9 @@ def apply_blueprint_from_file(self): import h5py individual, multi, attributes = self._extract_path_names_from_blueprint() + # self.logger.error(individual) + # self.logger.error(multi) + # self.logger.error(attributes) filtered_individual = [ii for ii in individual.keys() if '(' in ii] def _extract_from_item(name, object): @@ -1978,23 +1992,18 @@ def _extract_from_item(name, object): :param name: fully-qualified HDF5 path name :param object: what the HDF5 path name points to """ - if name == self._find_roots_here: - for ii, path_name in enumerate(object.keys()): - # store the names and locations of the Part/Chunk metadata - temp = f'{name}/{path_name}' - self.logger.debug(f'Adding extension {temp}') - self._extension_names.append(temp) - self._blueprint._extensions[ii] = {} - # If it's the Part/Chunk metadata, capture it to extensions. # Syntax of the keys described in Hdf5ObsBlueprint class. for part_index, part_name in enumerate(self._extension_names): + # self.logger.error(f'part_index {part_index} part_name {part_name} name {name} names {object.dtype.names}') + # self.logger.error(f'part_index {part_index} part_name {part_name} name {name}') if name.startswith(part_name) and isinstance(object, h5py.Dataset) and object.dtype.names is not None: for d_name in object.dtype.names: temp_path = f'{name.replace(part_name, "")}/{d_name}' for path_name in multi.keys(): if path_name == temp_path: for jj in multi.get(path_name): + # self.logger.error(f'set 1 {jj}') self._blueprint.set(jj, object[d_name], part_index) elif path_name.startswith(temp_path) and '(' in path_name: z = path_name.split('(') @@ -2003,6 +2012,7 @@ def _extract_from_item(name, object): if len(a) > 2: raise NotImplementedError for jj in multi.get(path_name): + # self.logger.error(f'set 2 {jj}') self._blueprint.set( jj, object[d_name][int(a[0])][int(a[1])], @@ -2011,6 +2021,7 @@ def _extract_from_item(name, object): else: index = int(z[1].split(')')[0]) for jj in multi.get(path_name): + # self.logger.error(f'set 3 z {z} {jj} d_name {d_name} index {index}') self._blueprint.set( jj, object[d_name][index], @@ -2025,6 +2036,7 @@ def _extract_from_item(name, object): temp = f'//{name}/{d_name}' if temp in individual.keys(): for jj in individual.get(temp): + # self.logger.error(f'set 4 {jj}') self._blueprint.set(jj, object[d_name], 0) else: for ind_path in filtered_individual: @@ -2032,6 +2044,7 @@ def _extract_from_item(name, object): z = ind_path.split('(') index = int(z[1].split(')')[0]) for jj in individual.get(ind_path): + # self.logger.error(f'set 5 {jj}') self._blueprint.set(jj, object[d_name][index], 0) if len(individual) == 0 and len(multi) == 0: @@ -2080,6 +2093,9 @@ def _extract_path_names_from_blueprint(self): def apply_blueprint(self): self.logger.debug('Begin apply_blueprint') + for index, _ in enumerate(self._extension_names): + self._blueprint._extensions[index] = {} + self.apply_blueprint_from_file() # after the apply_blueprint_from_file call, all the metadata from the @@ -2106,6 +2122,7 @@ def apply_blueprint(self): else: exts[extension][key] = self._execute_external_instance(value, key, extension) + # apply overrides # blueprint already contains all the overrides, only need to make # sure the overrides get applied to all the extensions for extension in exts: @@ -2123,6 +2140,7 @@ def apply_blueprint(self): exts[extension][key] = value self.logger.debug(f'{key}: set to {value} in extension {extension}') + # apply defaults # if no values have been set by file lookups, function execution, # or applying overrides, apply defaults, including to all extensions for key, value in plan.items(): diff --git a/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.py b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.py index 6d5708c8..bd690d80 100644 --- a/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.py +++ b/caom2utils/caom2utils/tests/data/gemini/S20230518S0121/S20230518S0121.py @@ -42,9 +42,9 @@ def get_time_delta(header): end.format = 'mjd' result = (end - start).value else: - logging.debug(f'Cannot convert {temp_start} or {temp_end} to MJD for {header.get('EXPID')}') + logging.debug(f'Cannot convert {temp_start} or {temp_end} to MJD for {header.get("EXPID")}') else: - logging.error(f'Missing one of DATE-OBS {date_obs}, UTSTART {ut_start}, or UTEND {ut_end} in {header.get('EXPID')}') + logging.error(f'Missing one of DATE-OBS {date_obs}, UTSTART {ut_start}, or UTEND {ut_end} in {header.get("EXPID")}') return result @@ -61,5 +61,5 @@ def get_time_function_val(header): else: logging.debug(f'Cannot convert {temp_start} to MJD') else: - logging.error(f'Missing one of DATE-OBS {date_obs} or UTSTART {ut_start} in {header.get('EXPID')}') + logging.error(f'Missing one of DATE-OBS {date_obs} or UTSTART {ut_start} in {header.get("EXPID")}') return result From 0367a8c1cbc4ef140a49d200d7037021928af0d6 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Wed, 28 Feb 2024 12:52:55 -0800 Subject: [PATCH 28/36] CADC-13204 - get the HDF5 test cases working. --- caom2utils/caom2utils/blueprints.py | 6 +- caom2utils/caom2utils/parsers.py | 62 +++++++++++++++---- .../20220201T200117/taos.blueprint | 24 +++---- 3 files changed, 66 insertions(+), 26 deletions(-) diff --git a/caom2utils/caom2utils/blueprints.py b/caom2utils/caom2utils/blueprints.py index f5bef6ab..86bf651b 100644 --- a/caom2utils/caom2utils/blueprints.py +++ b/caom2utils/caom2utils/blueprints.py @@ -1199,10 +1199,14 @@ class Hdf5ObsBlueprint(ObsBlueprint): # lookup value starting with // means rooted at base of the hdf5 file ob.add_attribute('Observation.target.name', '//header/object/obj_id') - # lookup value starting with / means rooted at the base of the "find_roots_here" parameter for Hdf5Parser + # lookup value starting with / means rooted at the base of one of the extension_names parameter for Hdf5Parser # (integer) means return only the value with the index of "integer" from a list ob.add_attribute('Chunk.position.axis.function.refCoord.coord1.pix', '/header/wcs/crpix(0)') + # lookup values starting with / and with "{}" in the path will cause the blueprint application to attempt to + # guess the extension names from the file content + ob.add_attribute('Chunk.position.axis.function.refCoord.coord1.pix', '/sitedata/site{}/header/wcs/crpix(0)') + # (integer:integer) means return only the value with the index of "integer" from a list, followed by "integer" # from the list in the list ob.add_attribute('Chunk.position.axis.function.cd11', '/header/wcs/cd(0:0)') diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index add550a4..9a021a9a 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -1863,7 +1863,7 @@ class Hdf5Parser(ContentParser): - use the astropy.wcs instance and other blueprint metadata to fill the CAOM2 record. """ - def __init__(self, obs_blueprint, uri, h5_file, extension_names): + def __init__(self, obs_blueprint, uri, h5_file, extension_names=None): """ :param obs_blueprint: Hdf5ObsBlueprint instance :param uri: which artifact augmentation is based on @@ -1900,10 +1900,13 @@ def apply_blueprint_from_file(self): # require it import h5py - individual, multi, attributes = self._extract_path_names_from_blueprint() - # self.logger.error(individual) - # self.logger.error(multi) - # self.logger.error(attributes) + individual, multi, attributes, candidate_extensions = self._extract_path_names_from_blueprint() + if self._extension_names is None and len(candidate_extensions) > 0: + self._find_extension_names(candidate_extensions) + for index, _ in enumerate(self._extension_names): + self._blueprint._extensions[index] = {} + else: + self._blueprint._extensions[0] = {} filtered_individual = [ii for ii in individual.keys() if '(' in ii] def _extract_from_item(name, object): @@ -1996,31 +1999,64 @@ def _extract_path_names_from_blueprint(self): are _CAOM2_ELEMENT strings. attributes - a dictionary of lists, keys reference expected content from the h5py.File().attrs data structure and its keys. + extensions - a list of prefixes for identifying extensions """ individual = defaultdict(list) multi = defaultdict(list) attributes = defaultdict(list) + extensions = [] for key, value in self._blueprint._plan.items(): if ObsBlueprint.needs_lookup(value): for ii in value[0]: if ii.startswith('//'): individual[ii].append(key) elif ii.startswith('/'): - multi[ii].append(key) + if '{}' in ii: + bits = ii.split('{}') + extensions.append(bits[0]) + multi[bits[1]].append(key) + else: + multi[ii].append(key) else: attributes[ii].append(key) - return individual, multi, attributes + + temp = list(set(extensions)) + extensions = temp + return individual, multi, attributes, extensions + + def _find_extension_names(self, candidates): + """ if the HDF5 file has a structure where-by more than one Chunk (the equivalent of a FITS HDU extension) + is defined, try to guess that structure + """ + candidate_extension_names = [] + + def _extract_extension_prefixes(name, object): + """ + Function signature dictated by h5py visititems implementation. Executed for each dataset/group in an + HDF5 file. + + :param name: fully-qualified HDF5 path name + :param object: what the HDF5 path name points to + """ + import h5py + for part_name in candidates: + y = part_name.replace('/', '', 1) + if name.startswith(y): + x = name.split(y)[1].split('/') + temp = f'{y}{x[0]}' + candidate_extension_names.append(temp) + self._extension_names = list(sorted(set(candidate_extension_names))) + + self._file.visititems(_extract_extension_prefixes) + msg = '\n'.join(ii for ii in self._extension_names) + self.logger.info(f'Found extension_names:\n{msg}') def apply_blueprint(self): self.logger.debug('Begin apply_blueprint') - for index, _ in enumerate(self._extension_names): - self._blueprint._extensions[index] = {} - self.apply_blueprint_from_file() - # after the apply_blueprint_from_file call, all the metadata from the - # file has been applied to the blueprint, so now do the bits that - # require no access to file content + # after the apply_blueprint_from_file call, all the metadata from the file has been applied to the blueprint, + # so now do the bits that require no access to file content # pointers that are short to type exts = self._blueprint._extensions diff --git a/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/taos.blueprint b/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/taos.blueprint index ee38e282..0f515f15 100644 --- a/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/taos.blueprint +++ b/caom2utils/caom2utils/tests/data/taos_h5file/20220201T200117/taos.blueprint @@ -10,18 +10,18 @@ Plane.dataProductType = timeseries Plane.calibrationLevel = 2 Chunk.position.axis.function.dimension.naxis1 = 1920 Chunk.position.axis.function.dimension.naxis2 = 4608 -Chunk.position.axis.function.refCoord.coord1.pix = [/header/wcs/crpix(0)] -Chunk.position.axis.function.refCoord.coord1.val = [/header/wcs/crval(0)] -Chunk.position.axis.function.refCoord.coord2.pix = [/header/wcs/crpix(1)] -Chunk.position.axis.function.refCoord.coord2.val = [/header/wcs/crval(1)] -Chunk.position.axis.axis1.ctype = [/header/wcs/ctype(0)] -Chunk.position.axis.axis1.cunit = [/header/wcs/cunit(0)] -Chunk.position.axis.axis2.ctype = [/header/wcs/ctype(1)] -Chunk.position.axis.axis2.cunit = [/header/wcs/cunit(1)] -Chunk.position.axis.function.cd11 = [/header/wcs/cd(0:0)] -Chunk.position.axis.function.cd12 = [/header/wcs/cd(0:1)] -Chunk.position.axis.function.cd21 = [/header/wcs/cd(1:0)] -Chunk.position.axis.function.cd22 = [/header/wcs/cd(1:1)] +Chunk.position.axis.function.refCoord.coord1.pix = [/sitedata/site{}/header/wcs/crpix(0)] +Chunk.position.axis.function.refCoord.coord1.val = [/sitedata/site{}/header/wcs/crval(0)] +Chunk.position.axis.function.refCoord.coord2.pix = [/sitedata/site{}/header/wcs/crpix(1)] +Chunk.position.axis.function.refCoord.coord2.val = [/sitedata/site{}/header/wcs/crval(1)] +Chunk.position.axis.axis1.ctype = [/sitedata/site{}/header/wcs/ctype(0)] +Chunk.position.axis.axis1.cunit = [/sitedata/site{}/header/wcs/cunit(0)] +Chunk.position.axis.axis2.ctype = [/sitedata/site{}/header/wcs/ctype(1)] +Chunk.position.axis.axis2.cunit = [/sitedata/site{}/header/wcs/cunit(1)] +Chunk.position.axis.function.cd11 = [/sitedata/site{}/header/wcs/cd(0:0)] +Chunk.position.axis.function.cd12 = [/sitedata/site{}/header/wcs/cd(0:1)] +Chunk.position.axis.function.cd21 = [/sitedata/site{}/header/wcs/cd(1:0)] +Chunk.position.axis.function.cd22 = [/sitedata/site{}/header/wcs/cd(1:1)] Chunk.position.equinox = [//header/object/epoch] Chunk.position.axis.error1.syser = None Chunk.position.axis.error1.rnder= None From 13368cdc90b6eb3bbc9252511b1ad3a8dc3986d1 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Tue, 5 Mar 2024 09:36:28 -0800 Subject: [PATCH 29/36] CADC-10810 - interim commit. --- caom2utils/caom2utils/tests/test_obs_blueprint.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/caom2utils/caom2utils/tests/test_obs_blueprint.py b/caom2utils/caom2utils/tests/test_obs_blueprint.py index 0864fb4f..e60a56f4 100644 --- a/caom2utils/caom2utils/tests/test_obs_blueprint.py +++ b/caom2utils/caom2utils/tests/test_obs_blueprint.py @@ -135,7 +135,7 @@ def test_obs_blueprint(): # set in extension ob.set('Chunk.energy.velang', 33, extension=1) - extension1_str = str(ob)[str(ob).index('extension 1') :] + extension1_str = str(ob)[str(ob).index('extension 1'):] assert 'Chunk.energy.velang = 33' in extension1_str # set fits attribute in extension @@ -147,7 +147,7 @@ def test_obs_blueprint(): # set in a different extension ob.set('Chunk.energy.velang', 44, extension=2) - extension2_str = str(ob)[str(ob).index('extension 2') :] + extension2_str = str(ob)[str(ob).index('extension 2'):] assert 'Chunk.energy.velang = 44' in extension2_str # test get @@ -190,7 +190,7 @@ def test_obs_blueprint(): # set defaults in extension ob.set_default('Chunk.energy.axis.axis.ctype', 'NOCTYPE', extension=3) - extension3_str = str(ob)[str(ob).index('extension 3') :] + extension3_str = str(ob)[str(ob).index('extension 3'):] assert "Chunk.energy.axis.axis.ctype = NOCTYPE" in extension3_str assert len(ob._extensions) == 1 From 48fe01f3e5fa7bcec49537ea07e83a3f0ebb4f0b Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Fri, 18 Oct 2024 16:00:37 -0700 Subject: [PATCH 30/36] CADC-10810 - handle the TAOSII case of image and lightcurve data in the same file. Set up Part creation to be instigated by subsets of extensions. --- caom2utils/caom2utils/parsers.py | 27 ++++++++++++++++++--------- caom2utils/caom2utils/wcs_parsers.py | 5 +++-- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index 9a021a9a..38cd8033 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -579,8 +579,11 @@ def _get_quality(self, current): class ContentParser(BlueprintParser): - def __init__(self, obs_blueprint=None, uri=None): + def __init__(self, obs_blueprint=None, uri=None, extension_start_index=0, extension_end_index=None): super().__init__(obs_blueprint, uri) + # for those cases where the extensions of interest are not all the extensions in the original file + self._extension_start_index = extension_start_index + self._extension_end_index = extension_end_index if extension_end_index else self._get_num_parts() self._wcs_parsers = {} self._set_wcs_parsers(obs_blueprint) @@ -593,7 +596,7 @@ def _get_num_parts(self): return len(self._blueprint._extensions) + 1 def _set_wcs_parsers(self, obs_blueprint): - self._wcs_parsers[0] = WcsParser(obs_blueprint, extension=0) + self._wcs_parsers[0] = WcsParser(obs_blueprint, extension=self._extension_start_index) def augment_artifact(self, artifact): """ @@ -607,17 +610,21 @@ def augment_artifact(self, artifact): if self.blueprint.get_configed_axes_count() == 0: raise TypeError(f'No WCS Data. End content artifact augmentation for ' f'{artifact.uri}.') - for index in range(0, self._get_num_parts()): + for index in range(self._extension_start_index, self._extension_end_index): if self.add_parts(artifact, index): part = artifact.parts[str(index)] part.product_type = self._get_from_list('Part.productType', index) - part.meta_producer = self._get_from_list('Part.metaProducer', index=0, current=part.meta_producer) + part.meta_producer = self._get_from_list( + 'Part.metaProducer', index=self._extension_start_index, current=part.meta_producer + ) # each Part has one Chunk, if it's not an empty part as determined just previously if not part.chunks: part.chunks.append(caom2.Chunk()) chunk = part.chunks[0] - chunk.meta_producer = self._get_from_list('Chunk.metaProducer', index=0, current=chunk.meta_producer) + chunk.meta_producer = self._get_from_list( + 'Chunk.metaProducer', index=self._extension_start_index, current=chunk.meta_producer + ) self._get_chunk_naxis(chunk, index) @@ -1478,7 +1485,7 @@ class FitsParser(ContentParser): """ - def __init__(self, src, obs_blueprint=None, uri=None): + def __init__(self, src, obs_blueprint=None, uri=None, extension_start_index=0, extension_end_index=None): """ Ctor :param src: List of headers (dictionary of FITS keywords:value) with one header for each extension or a FITS @@ -1505,6 +1512,8 @@ def __init__(self, src, obs_blueprint=None, uri=None): self._errors = [] # for command-line parameter to module execution self.uri = uri + self._extension_start_index = extension_start_index + self._extension_end_index = extension_end_index if extension_end_index is not None else self._get_num_parts() self.apply_blueprint() def _get_num_parts(self): @@ -1863,7 +1872,7 @@ class Hdf5Parser(ContentParser): - use the astropy.wcs instance and other blueprint metadata to fill the CAOM2 record. """ - def __init__(self, obs_blueprint, uri, h5_file, extension_names=None): + def __init__(self, obs_blueprint, uri, h5_file, extension_names=None, extension_start_index=0, extension_end_index=None): """ :param obs_blueprint: Hdf5ObsBlueprint instance :param uri: which artifact augmentation is based on @@ -1874,7 +1883,7 @@ def __init__(self, obs_blueprint, uri, h5_file, extension_names=None): # the length of the array is the number of Parts in an HDF5 file, # and the values are HDF5 lookup path names. self._extension_names = extension_names - super().__init__(obs_blueprint, uri) + super().__init__(obs_blueprint, uri, extension_start_index, extension_end_index) def _get_num_parts(self): """return the number of Parts to create for a CAOM record @@ -2119,7 +2128,7 @@ def apply_blueprint(self): return def augment_artifact(self, artifact): - for ii in range(0, self._get_num_parts()): + for ii in range(self._extension_start_index, self._extension_end_index): # one WCS parser per Part/Chunk self._wcs_parsers[ii] = Hdf5WcsParser(self._blueprint, ii) super().augment_artifact(artifact) diff --git a/caom2utils/caom2utils/wcs_parsers.py b/caom2utils/caom2utils/wcs_parsers.py index ed02fa26..a8b51871 100644 --- a/caom2utils/caom2utils/wcs_parsers.py +++ b/caom2utils/caom2utils/wcs_parsers.py @@ -644,8 +644,9 @@ def _get_axis_length(self, for_axis): else: result = self._wcs.array_shape[for_axis - 1] if isinstance(result, tuple): - # the blueprint is incompletely configured - raise ValueError(f'Could not find axis length for axis {for_axis}') + raise ValueError( + f'Could not find axis length for axis {for_axis}. The blueprint is incompletely configured.' + ) return _to_int(result) def _get_cd(self, x_index, y_index): From cc81abb6ac0898f259c35148fc74df3437048f34 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Thu, 6 Feb 2025 12:54:23 -0800 Subject: [PATCH 31/36] refactor(parsers.py): remove spurious error logging --- caom2utils/caom2utils/parsers.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index 15f1b86f..fabba423 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -1921,15 +1921,12 @@ def _extract_from_item(name, object): # If it's the Part/Chunk metadata, capture it to extensions. # Syntax of the keys described in Hdf5ObsBlueprint class. for part_index, part_name in enumerate(self._extension_names): - # self.logger.error(f'part_index {part_index} part_name {part_name} name {name} names {object.dtype.names}') - # self.logger.error(f'part_index {part_index} part_name {part_name} name {name}') if name.startswith(part_name) and isinstance(object, h5py.Dataset) and object.dtype.names is not None: for d_name in object.dtype.names: temp_path = f'{name.replace(part_name, "")}/{d_name}' for path_name in multi.keys(): if path_name == temp_path: for jj in multi.get(path_name): - # self.logger.error(f'set 1 {jj}') self._blueprint.set(jj, object[d_name], part_index) elif path_name.startswith(temp_path) and '(' in path_name: z = path_name.split('(') @@ -1938,7 +1935,6 @@ def _extract_from_item(name, object): if len(a) > 2: raise NotImplementedError for jj in multi.get(path_name): - # self.logger.error(f'set 2 {jj}') self._blueprint.set( jj, object[d_name][int(a[0])][int(a[1])], @@ -1947,7 +1943,6 @@ def _extract_from_item(name, object): else: index = int(z[1].split(')')[0]) for jj in multi.get(path_name): - # self.logger.error(f'set 3 z {z} {jj} d_name {d_name} index {index}') self._blueprint.set( jj, object[d_name][index], @@ -1961,7 +1956,6 @@ def _extract_from_item(name, object): temp = f'//{name}/{d_name}' if temp in individual.keys(): for jj in individual.get(temp): - # self.logger.error(f'set 4 {jj}') self._blueprint.set(jj, object[d_name], 0) else: for ind_path in filtered_individual: @@ -1969,7 +1963,6 @@ def _extract_from_item(name, object): z = ind_path.split('(') index = int(z[1].split(')')[0]) for jj in individual.get(ind_path): - # self.logger.error(f'set 5 {jj}') self._blueprint.set(jj, object[d_name][index], 0) if len(individual) == 0 and len(multi) == 0: From 5aed9d5a5a250af92999115b688b0cf660ff5886 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Wed, 12 Feb 2025 10:55:02 -0800 Subject: [PATCH 32/36] refactor(code-review): address code review comments CADC-10810 --- caom2utils/caom2utils/parsers.py | 9 +++------ caom2utils/caom2utils/tests/test_fits2caom2.py | 17 +++++++++++++++++ 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index fabba423..a9f7c57b 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -1887,9 +1887,8 @@ def _get_num_parts(self): return result def _set_wcs_parsers(self, obs_blueprint): - # self._wcs_parsers[0] = WcsParser(obs_blueprint, extension=0) - # used to set the astropy wcs info, resulting in a validated wcs - # that can be used to construct a valid CAOM2 record + # used to set the astropy wcs info, resulting in a validated wcs that can be used to construct a valid CAOM2 + # record self._wcs_parsers = {} def apply_blueprint_from_file(self): @@ -1897,8 +1896,7 @@ def apply_blueprint_from_file(self): Retrieve metadata from file, cache in the blueprint. """ self.logger.debug('Begin apply_blueprint_from_file') - # h5py is an extra in this package since most collections do not - # require it + # h5py is an extra in this package since most collections do not require it import h5py individual, multi, attributes, candidate_extensions = self._extract_path_names_from_blueprint() @@ -2032,7 +2030,6 @@ def _extract_extension_prefixes(name, object): :param name: fully-qualified HDF5 path name :param object: what the HDF5 path name points to """ - import h5py for part_name in candidates: y = part_name.replace('/', '', 1) if name.startswith(y): diff --git a/caom2utils/caom2utils/tests/test_fits2caom2.py b/caom2utils/caom2utils/tests/test_fits2caom2.py index 880121ec..5ef674c2 100755 --- a/caom2utils/caom2utils/tests/test_fits2caom2.py +++ b/caom2utils/caom2utils/tests/test_fits2caom2.py @@ -1412,6 +1412,23 @@ def test_generic_parser1(): assert test_parser._blueprint._plan[test_key] == test_value, 'original value over-ridden' +def test_generic_parser_imported_module_error_handling(): + # this test exercises the error handling code for executing functions defined by blueprints + test_key = 'Plane.metaRelease' + test_key_2 = 'Plane.dataRelease' + test_value = '2013-10-10' + test_blueprint = ObsBlueprint() + test_blueprint.set(test_key, '2013-10-10') + # pick __sizeof__ as an attribute that will fail to execute for any module + test_blueprint.set(test_key_2, '__sizeof__()') + test_parser = BlueprintParser() + assert test_parser._blueprint._plan[test_key] == (['RELEASE', 'REL_DATE'], None), 'default value changed' + test_parser.blueprint = test_blueprint + assert test_parser._blueprint._plan[test_key] == test_value, 'original value over-ridden' + test_result = test_parser._execute_external('__sizeof__(uri)', test_key_2, 0) + assert test_result == '', 'wrong result' + + def test_get_external_headers(): test_uri = 'http://localhost/obs23/collection/obsid-1' with patch('requests.Session.get') as session_get_mock: From 2287d4bea70ebb100e4d27718984ed24a6b48c03 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Wed, 12 Feb 2025 10:58:51 -0800 Subject: [PATCH 33/36] refactor(code-review): address code review comments CADC-10810 --- caom2utils/caom2utils/parsers.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index a9f7c57b..1fb0e2bd 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -2012,9 +2012,7 @@ def _extract_path_names_from_blueprint(self): else: attributes[ii].append(key) - temp = list(set(extensions)) - extensions = temp - return individual, multi, attributes, extensions + return individual, multi, attributes, list(set(extensions)) def _find_extension_names(self, candidates): """ if the HDF5 file has a structure where-by more than one Chunk (the equivalent of a FITS HDU extension) From d0af8ec4b46674f231856cef898087de3c4b5054 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Wed, 12 Feb 2025 12:26:37 -0800 Subject: [PATCH 34/36] build(setup.cfg): increment version number --- caom2utils/setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caom2utils/setup.cfg b/caom2utils/setup.cfg index 6ba07b9a..18ebdcae 100644 --- a/caom2utils/setup.cfg +++ b/caom2utils/setup.cfg @@ -33,7 +33,7 @@ url = https://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/caom2 edit_on_github = False github_project = opencadc/caom2tools # version should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386) -version = 1.7.2 +version = 1.7.3 [options] install_requires = From 58fc8ab7d46afb1f5a1339a93a006807b251c5f3 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Thu, 13 Feb 2025 07:55:29 -0800 Subject: [PATCH 35/36] docs(parsers.py): add comment on the behaviour of a specialized method CADC-10810 --- caom2utils/caom2utils/parsers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index 1fb0e2bd..877978ad 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -1889,6 +1889,8 @@ def _get_num_parts(self): def _set_wcs_parsers(self, obs_blueprint): # used to set the astropy wcs info, resulting in a validated wcs that can be used to construct a valid CAOM2 # record + # This method call is over-writing the default behaviour in the ContentParser class. The default behaviour + # uses the obs_blueprint. This method is called in the ContentParser constructor. self._wcs_parsers = {} def apply_blueprint_from_file(self): From e1729f65c1195f789a03a25da722c3f1588ddb63 Mon Sep 17 00:00:00 2001 From: Sharon Goliath Date: Thu, 13 Feb 2025 11:00:56 -0800 Subject: [PATCH 36/36] style(parsers.py): flake8 --- caom2utils/caom2utils/parsers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/caom2utils/caom2utils/parsers.py b/caom2utils/caom2utils/parsers.py index 877978ad..5b86520a 100644 --- a/caom2utils/caom2utils/parsers.py +++ b/caom2utils/caom2utils/parsers.py @@ -1864,7 +1864,8 @@ class Hdf5Parser(ContentParser): - use the astropy.wcs instance and other blueprint metadata to fill the CAOM2 record. """ - def __init__(self, obs_blueprint, uri, h5_file, extension_names=None, extension_start_index=0, extension_end_index=None): + def __init__(self, obs_blueprint, uri, h5_file, extension_names=None, extension_start_index=0, + extension_end_index=None): """ :param obs_blueprint: Hdf5ObsBlueprint instance :param uri: which artifact augmentation is based on @@ -1889,7 +1890,7 @@ def _get_num_parts(self): def _set_wcs_parsers(self, obs_blueprint): # used to set the astropy wcs info, resulting in a validated wcs that can be used to construct a valid CAOM2 # record - # This method call is over-writing the default behaviour in the ContentParser class. The default behaviour + # This method call is over-writing the default behaviour in the ContentParser class. The default behaviour # uses the obs_blueprint. This method is called in the ContentParser constructor. self._wcs_parsers = {}