Skip to content

Commit

Permalink
Merge pull request #1585 from Caltech-IPAC/FIREFLY-1512-fitslib
Browse files Browse the repository at this point in the history
Firefly-1512: Update nom.tam.fits to 1.20.0
  • Loading branch information
robyww authored Jul 11, 2024
2 parents 8dfe7c1 + 44bca9f commit 0623824
Show file tree
Hide file tree
Showing 10 changed files with 62 additions and 74 deletions.
2 changes: 1 addition & 1 deletion buildScript/dependencies.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ dependencies {
implementation 'org.apache.logging.log4j:log4j-core:2.20.0'

// nom tam fits
implementation 'gov.nasa.gsfc.heasarc:nom-tam-fits:1.18.1'
implementation 'gov.nasa.gsfc.heasarc:nom-tam-fits:1.20.0'

// openidconnect
implementation ('net.minidev:json-smart:2.3')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import edu.caltech.ipac.firefly.visualize.PlotState;
import edu.caltech.ipac.util.FileUtil;
import edu.caltech.ipac.visualize.plot.plotdata.FitsRead;
import nom.tam.fits.FitsException;

import java.io.File;
import java.io.IOException;
Expand Down Expand Up @@ -39,7 +38,7 @@ public void writeFile(PlotState state) {
PlotStateUtil.setWorkingFitsFile(state, targetFile, band);
try {
fr.writeSimpleFitsFile(targetFile);
} catch (FitsException|IOException e) {
} catch (IOException e) {
Logger.getLogger().warn(e,"geom write failed", "geom file: "+targetFile.getPath());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public static FitsDataEval readAndEvaluate(File f, boolean clearHdu, WebPlotRequ
public static FitsDataEval readAndEvaluate(Fits fits, File f, boolean clearHdu, WebPlotRequest req) throws FitsException, IOException {
FitsReadUtil.UncompressFitsInfo uFitsInfo= null;
try {
BasicHDU<?>[] HDUs= FitsReadUtil.readHDUs(fits);
BasicHDU<?>[] HDUs= fits.read();
if (FitsReadUtil.hasCompressedImageHDUS(HDUs)) uFitsInfo = FitsReadUtil.createdUncompressImageHDUFile(HDUs,f);

File fitsFile= uFitsInfo!=null ? uFitsInfo.file() : f;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ public static DataGroup convertFitsToDataGroup(String fits_filename,
// the exception error sent from nom.tam.fits.
DataGroup result;
;
BasicHDU<?>[] hdus = FitsReadUtil.readHDUs(fits);
BasicHDU<?>[] hdus = fits.read();

if (table_idx >= hdus.length) {
throw new FitsException( "table index of " +table_idx+" exceeds the number of HDUS " + hdus.length);
Expand Down
2 changes: 1 addition & 1 deletion src/firefly/java/edu/caltech/ipac/util/FitsHDUUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public static FitsAnalysisReport analyze(File infile, FileAnalysisReport.ReportT
Header[] headerAry;

try (Fits fits= new Fits(infile)) {
BasicHDU<?>[] parts = FitsReadUtil.readHDUs(fits);
BasicHDU<?>[] parts = fits.read();
headerAry= new Header[parts.length];
for(int i = 0; i < parts.length; i++) {
FileAnalysisReport.Type ptype;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public class FitsExtract {
public enum CombineType {AVG, SUM, OR}

private static Number combineArray(List<Number> aryList, CombineType ct, Class<?> type) {
if (aryList.size() == 0) return Double.NaN;
if (aryList.isEmpty()) return Double.NaN;
if (aryList.size() == 1) return aryList.get(0);
double cnt = 0;
var realCt= (ct==CombineType.OR && (type==Float.TYPE || type==Double.TYPE)) ? CombineType.AVG : ct;
Expand Down Expand Up @@ -60,8 +60,8 @@ private static Number getNan(Number v) {
return Double.NaN;
}
private static boolean isNaN(Number v) {
if (v instanceof Double) return ((Double) v).isNaN();
if (v instanceof Float) return ((Float) v).isNaN();
if (v instanceof Double d) return d.isNaN();
if (v instanceof Float f) return f.isNaN();
return false;
}

Expand Down Expand Up @@ -147,8 +147,7 @@ static ImageHDU getImageHDU(BasicHDU<?>[] hdus, int idx) throws FitsException {
if ( !(hdus[idx] instanceof ImageHDU) && !(hdus[idx] instanceof CompressedImageHDU) ) {
throw new FitsException(idx + " is not a cube");
}
return (hdus[idx] instanceof CompressedImageHDU) ?
((CompressedImageHDU) hdus[idx]).asImageHDU() : (ImageHDU) hdus[idx];
return (hdus[idx] instanceof CompressedImageHDU cHDU) ? cHDU.asImageHDU() : (ImageHDU) hdus[idx];
}

public static List<Number> getPointDataAry(ImagePt[] ptAry, int plane, BasicHDU<?>[] hdus, int hduNum, int refHduNum, int ptSize, CombineType ct)
Expand Down Expand Up @@ -185,7 +184,7 @@ public static List<Number> getLineDataAry(ImagePt pt1, ImagePt pt2, int plane, B

int minX = (int)Math.min(x1, x2);
int maxX = (int)Math.max(x1, x2) ;
int n = (int)Math.rint(Math.ceil(maxX-minX))+1;
int n = maxX - minX +1;
List<Number> pts= new ArrayList<>(n);
for (x=minX; x<=maxX; x+=1) {
y = (int)(slope*x + yIntercept);
Expand All @@ -198,7 +197,7 @@ public static List<Number> getLineDataAry(ImagePt pt1, ImagePt pt2, int plane, B

int minY = (int)Math.min(y1, y2);
int maxY = (int)Math.max(y1, y2);
int n = (int)Math.rint(Math.ceil(maxY - minY))+1;
int n = maxY - minY +1;
List<Number> pts= new ArrayList<>(n);

for (y=minY; y<=maxY; y+=1) {
Expand All @@ -214,7 +213,7 @@ public static List<ExtractionResults> extractFromRelatedHDUs(File fitsFile, int
boolean allMatchingHDUs, Extractor extractor)
throws FitsException, IOException {
try (Fits fits = new Fits(fitsFile)) {
BasicHDU<?>[] hdus = FitsReadUtil.readHDUs(fits);
BasicHDU<?>[] hdus = fits.read();
BasicHDU<?> hdu = hdus[refHduNum];
validateImageAtHDU(hdus, refHduNum);
Header refHeader = hdu.getHeader();
Expand Down Expand Up @@ -243,7 +242,7 @@ public static List<ExtractionResults> extractFromRelatedHDUs(File fitsFile, int
public static List<Number> extractFromHDU(File fitsFile, int hduNum, Extractor extractor)
throws FitsException, IOException {
try (Fits fits= new Fits(fitsFile)) {
return extractor.extractAry(FitsReadUtil.readHDUs(fits), hduNum);
return extractor.extractAry(fits.read(), hduNum);
}
}

Expand Down Expand Up @@ -323,7 +322,7 @@ private static void validateImageAtHDU(BasicHDU<?>[] hdus, int hduNum) throws Fi
}
}

interface Extractor { List<Number> extractAry(BasicHDU<?>[] hdus, int hduNum) throws FitsException, IOException; }
public interface Extractor { List<Number> extractAry(BasicHDU<?>[] hdus, int hduNum) throws FitsException, IOException; }

public record ExtractionResults(int hduNum, String extName, List<Number> aryData, boolean refHDU, Header header) { }
}
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,9 @@ public class FitsRead implements Serializable, HasSizeOf {

}

public static ImageHDU makeImageHDU(BasicHDU<?> hdu) throws FitsException {
if (hdu instanceof ImageHDU) return (ImageHDU)hdu;
else if (hdu instanceof CompressedImageHDU) return ((CompressedImageHDU) hdu).asImageHDU();
private static ImageHDU makeImageHDU(BasicHDU<?> hdu) throws FitsException {
if (hdu instanceof ImageHDU iHdu) return iHdu;
if (hdu instanceof CompressedImageHDU cHdu) return cHdu.asImageHDU();
throw new FitsException("imageHdu much be a ImageHDU or a CompressedImageHDU");
}

Expand Down Expand Up @@ -122,12 +122,12 @@ public float[] getRawFloatAry() {
if (float1d!=null) return float1d;
if (!deferredRead) throw new IllegalArgumentException("FitsRead not setup for deferred reading");
try (Fits fits = new Fits(this.file)) {
BasicHDU<?> hdu= FitsReadUtil.readHDUs(fits)[this.hduNumber];
BasicHDU<?> hdu= fits.read()[this.hduNumber];
if (!(hdu instanceof ImageHDU)) return null;
float1d= (float [])dataArrayFromFitsFile((ImageHDU)hdu, 0,0,getNaxis1(),getNaxis2(), planeNumber,Float.TYPE);
return float1d;
}
catch (FitsException|IOException|ArrayIndexOutOfBoundsException e) {
catch (Exception e) {
Logger.getLogger("FitsRead").error(e,"Could not ready cube FITS plane");
return null;
}
Expand Down Expand Up @@ -308,13 +308,13 @@ public long getSizeOf() {
return retSize;
}

public void writeSimpleFitsFile(File f) throws FitsException, IOException{
public void writeSimpleFitsFile(File f) throws IOException{
createNewFits().write(f);
}

public void clearHDU() { this.hdu= null; }

public Fits createNewFits() throws FitsException, IOException {
public Fits createNewFits() throws IOException {
if (hdu==null) {
throw new IOException("HDU has been clear, this FitsRead no longer supports re-writing the FITS file");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public class FitsReadFactory {
* read a fits with extensions or cube data to create a list of the FistRead object
*/
public static FitsRead[] createFitsReadArray(Fits fits) throws FitsException {
return createFitsReadArray(FitsReadUtil.readHDUs(fits),null,false);
return createFitsReadArray(fits.read(),null,false);
}

public static FitsRead[] createFitsReadArray(BasicHDU<?>[] HDUs, File f, boolean clearHdu) throws FitsException {
Expand Down Expand Up @@ -166,16 +166,14 @@ public static FitsRead createFitsReadWithGeom(FitsRead aFitsRead,
if (aRefFitsRead != null) {
ImageHeader refHeader = new ImageHeader(aRefFitsRead.getHeader());
Geom geom = new Geom();
//geom.override_naxis1=0;
geom.n_override_naxis1 = aDoscale;

ImageHeader imageHeader = geom.open_in(aFitsRead);
double primCdelt1 = Math.abs(imageHeader.cdelt1);
double refCdelt1 = Math.abs(refHeader.cdelt1);
int imageScaleFactor = 1;
boolean shouldScale = 2 * refCdelt1 < primCdelt1;
if (aDoscale && shouldScale) {
imageScaleFactor = (int) (primCdelt1 / refCdelt1);
int imageScaleFactor = (int) (primCdelt1 / refCdelt1);
geom.override_cdelt1 = refHeader.cdelt1 * imageScaleFactor;
geom.n_override_cdelt1 = true;
geom.override_cdelt2 = refHeader.cdelt2 * imageScaleFactor;
Expand All @@ -199,7 +197,7 @@ public static FitsRead createFitsReadWithGeom(FitsRead aFitsRead,
}
}

//make a copy of the reference fits
//make a copy of the reference fits
Fits modFits = geom.do_geom(aRefFitsRead);

FitsRead[] fitsReadArray = createFitsReadArray(modFits);
Expand All @@ -225,7 +223,7 @@ public static FitsRead createFitsReadPositionAngle(FitsRead fitsRead, double pos
Header refHeader = getRefHeader(geom, fitsRead, positionAngle, coordinateSys);

//create a ImageHDU with the null data
ImageHDU refHDU = FitsReadUtil.makeImageHDU(refHeader, null);
ImageHDU refHDU = FitsReadUtil.makeEmptyImageHDU(refHeader);
Fits refFits = new Fits();
refFits.addHDU(refHDU);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,9 @@ public class FitsReadUtil {
public static final String SPOT_BP = "SPOT_BP"; // original bitpix
public static final String SPOT_PL = "SPOT_PL"; // cube plane number, only used with cubes, deprecated

public static ImageData getImageData(BasicHDU<?> refHdu, float[] float1d) throws FitsException {
Header header = refHdu.getHeader();
int naxis1 = header.getIntValue("NAXIS1");
int naxis2 = header.getIntValue("NAXIS2");
int[] dims2 = new int[]{naxis1, naxis2};
public static ImageData getImageData(BasicHDU<?> refHdu, float[] float1d) {
Header h = refHdu.getHeader();
int[] dims2 = new int[] {getNaxis1(h), getNaxis2(h)};
float[][] fdata = (float[][]) ArrayFuncs.curl(float1d, dims2);
Object data = ArrayFuncs.convertArray(fdata, getDataType(refHdu), true);
return new ImageData(data);
Expand All @@ -77,24 +75,21 @@ public static Header cloneHeaderFrom(Header header) throws HeaderCardException {


public static boolean hasCompressedImageHDUS(BasicHDU<?>[] HDUs) {
for (BasicHDU<?> hdu : HDUs) {
if (hdu instanceof CompressedImageHDU) return true;
}
return false;
return Arrays.stream(HDUs).anyMatch(h -> h instanceof CompressedImageHDU);
}

public static Header getTopFitsHeader(File f) {
try (Fits fits= new Fits(f)) {
return fits.getHDU(0).getHeader();
} catch (FitsException|IOException e) {
} catch (IOException e) {
return null;
}
}

public record UncompressFitsInfo(File file, BasicHDU<?>[] HDUs, Fits fits) {}

public static UncompressFitsInfo createdUncompressImageHDUFile(BasicHDU<?>[] HDUs, File originalFile)
throws FitsException, IOException {
throws IOException {
String fBase= FileUtil.getBase(originalFile);
String dir= originalFile.getParent();
File retFile= new File(dir+"/"+ fBase+"---hdu-uncompressed"+".fits");
Expand All @@ -105,10 +100,10 @@ public static UncompressFitsInfo createdUncompressImageHDUFile(BasicHDU<?>[] HDU
fits.write(retFile);
closeFits(fits);
Fits retReadFits= new Fits(retFile);
return new UncompressFitsInfo(retFile,FitsReadUtil.readHDUs(retReadFits), retReadFits);
return new UncompressFitsInfo(retFile,fits.read(), retReadFits);
}

public static BasicHDU<?>[] getImageHDUArray(BasicHDU<?>[] HDUs, boolean onlyFireCubeHdu) throws FitsException {
public static BasicHDU<?>[] getImageHDUArray(BasicHDU<?>[] HDUs, boolean onlyFireCubeHdu) {
ArrayList<BasicHDU<?>> HDUList = new ArrayList<>();

String delayedExceptionMsg = null; // the exception can be ignored if HDUList size is greater than 0
Expand Down Expand Up @@ -161,15 +156,15 @@ else if (naxis == 1) {

} //end j loop

if (HDUList.size() == 0 && delayedExceptionMsg != null) {
if (HDUList.isEmpty() && delayedExceptionMsg != null) {
throw new FitsException(delayedExceptionMsg);
}
return HDUList.toArray(new BasicHDU<?>[0]);
}



private static void insertPositionIntoHeader(Header header, int pos, long hduOffset) throws FitsException {
private static void insertPositionIntoHeader(Header header, int pos, long hduOffset) {
if (hduOffset < 0) hduOffset = 0;
if (pos < 0) pos = 0;
long headerSize = getHeaderSize(header);
Expand All @@ -182,7 +177,7 @@ private static void insertPositionIntoHeader(Header header, int pos, long hduOff
}


private static BasicHDU<?>[] splitFits3DCube(BasicHDU<?> inHdu, boolean onlyFirstCubeHdu) throws FitsException {
private static BasicHDU<?>[] splitFits3DCube(BasicHDU<?> inHdu, boolean onlyFirstCubeHdu) {
ImageHDU hdu = (inHdu instanceof ImageHDU) ? (ImageHDU) inHdu : ((CompressedImageHDU) inHdu).asImageHDU(); // if we have to uncompress a cube it could take a long time
BasicHDU<?>[] hduList = new BasicHDU<?>[hdu.getHeader().getIntValue("NAXIS3", 0)];

Expand All @@ -191,7 +186,7 @@ private static BasicHDU<?>[] splitFits3DCube(BasicHDU<?> inHdu, boolean onlyFirs
hduList[i] = null;
}
else {
hduList[i] = makeImageHDU(cloneHeaderFrom(hdu.getHeader()), null);
hduList[i] = makeEmptyImageHDU(cloneHeaderFrom(hdu.getHeader()));
//set the header pointer to the BITPIX location to add the new key. Without calling this line, the pointer is point
//to the end of the Header, the SPOT_PL is added after the "END" key, which leads the image loading failure.
hduList[i].getHeader().getIntValue("BITPIX", -1);
Expand All @@ -204,7 +199,7 @@ private static BasicHDU<?>[] splitFits3DCube(BasicHDU<?> inHdu, boolean onlyFirs

}

private static BasicHDU<?>[] splitFitsCube(BasicHDU<?> inHdu, boolean onlyFirstCubeHdu) throws FitsException {
private static BasicHDU<?>[] splitFitsCube(BasicHDU<?> inHdu, boolean onlyFirstCubeHdu) {
int naxis = inHdu.getHeader().getIntValue("NAXIS", -1);

switch (naxis) {
Expand Down Expand Up @@ -270,8 +265,8 @@ public static float[] getImageHDUDataInFloatArray(BasicHDU<?> inHDU) throws Fits

Header header = imageHDU.getHeader();
double cdelt2 = header.getDoubleValue("CDELT2");
int naxis1 = header.getIntValue("NAXIS1");
int naxis2 = header.getIntValue("NAXIS2");
int naxis1 = getNaxis1(header);
int naxis2 = getNaxis2(header);

try {
if (imageDataObj.getTiler() != null) {
Expand Down Expand Up @@ -342,11 +337,18 @@ public static long getHeaderSize(Header header) {
return header.getOriginalSize() > 0 ? header.getOriginalSize() : header.getSize();
}

public static ImageHDU makeImageHDU(Header newHeader, nom.tam.fits.ImageData imageData) {
return new ImageHDU(newHeader, imageData );

public static ImageHDU makeImageHDU(Header newHeader, ImageData imageData) {
var hdu= (ImageHDU) Fits.makeHDU(imageData);
hdu.getHeader().updateLines(newHeader);
return hdu;
}

public static ImageHDU makeEmptyImageHDU(Header newHeader) {
return new ImageHDU(newHeader, null);
}

public static void writeFitsFile(File outfile, FitsRead[] fitsReadAry, Fits refFits) throws FitsException, IOException {
public static void writeFitsFile(File outfile, FitsRead[] fitsReadAry, Fits refFits) throws IOException {
Fits outputFits = new Fits();
for (FitsRead fr : fitsReadAry) {
BasicHDU<?> refHdu = refFits.getHDU(0);
Expand All @@ -356,15 +358,6 @@ public static void writeFitsFile(File outfile, FitsRead[] fitsReadAry, Fits refF
outputFits.write(outfile);
}

/**
* Read the fits objs and return an array of HDUs.
* As of the last fits update this function is not really necessary since we no longer
* have to deal with the PaddingException issue. However, since it is used in about
* 8 difference places we should keep it around.
* @param fits the fits object to read
*/
public static BasicHDU<?>[] readHDUs(Fits fits) throws FitsException { return fits.read(); }

public static int getBitPix(Header h) {return h.getIntValue("BITPIX"); }
public static int getNaxis(Header h) { return h.getIntValue("NAXIS", 0); }
public static int getNaxis1(Header h) { return h.getIntValue("NAXIS1", 0); }
Expand Down Expand Up @@ -464,7 +457,7 @@ public static Class<?> getDataType(int bitPix){

public static Class<?> getDataType(Bitpix bp){ return bp.getNumberType(); }

public static Class<?> getDataType(BasicHDU<?> hdu) throws FitsException { return hdu.getBitpix().getNumberType(); }
public static Class<?> getDataType(BasicHDU<?> hdu) { return hdu.getBitpix().getNumberType(); }


}
Loading

0 comments on commit 0623824

Please sign in to comment.