diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 2b47ee7..c5c12a9 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -8,6 +8,12 @@ This document records all notable changes to
This project adheres to `PEP 440 -- Version Identification
and Dependency Specification `_.
+0.4.3 (22-08-03)
+----------------
+- Spectrum and Hsi now moved to **data.spectra**. **This could break your fork if you have custom code.**
+- LazyHDF5 now within CRIkit2 instead of external library
+- Import macrostage raster images (NIST) from multiple datasets
+
0.4.2 (22-03-24)
----------------
- Changes to Phase Error Correction UI and some quality of life improvements
diff --git a/README.rst b/README.rst
index 23255b3..a3db3e5 100644
--- a/README.rst
+++ b/README.rst
@@ -73,8 +73,6 @@ packages may work.
- PyQT5
- CVXOPT
-- LazyHDF5 >= 0.2.2
-
- Requires H5Py (>= 2.6)
- SciPlot-PyQt >= 0.2.2
diff --git a/appveyor.yml b/appveyor.yml
index 63aeb90..f7ae504 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -15,7 +15,7 @@ environment:
MINICONDA_VERSION: "latest"
PYTHON_ARCH: "64"
CONDA_DEPENDENCIES: "numpy scipy pyqt matplotlib h5py cvxopt"
- PIP_DEPENDENCIES: "pytest pytest-cov codecov lazyhdf5"
+ PIP_DEPENDENCIES: "pytest pytest-cov codecov"
matrix:
- PYTHON_VERSION: "3.7"
diff --git a/crikit/CRIkitUI.py b/crikit/CRIkitUI.py
index 2e5c43a..a7755da 100644
--- a/crikit/CRIkitUI.py
+++ b/crikit/CRIkitUI.py
@@ -60,14 +60,14 @@
from crikit.data.frequency import (calib_pix_wn as _calib_pix_wn,
calib_pix_wl as _calib_pix_wl)
-from crikit.data.hsi import Hsi
+from crikit.data.spectra import Hsi
from crikit.data.spectra import Spectra
-from crikit.data.spectrum import Spectrum
+from crikit.data.spectra import Spectrum
from crikit.datasets.model import Model as _Model
from crikit.io.macros import import_csv_nist_special1 as io_nist_dlm
-from crikit.io.macros import import_hdf_nist_special as io_nist
+from crikit.io.macros import (import_hdf_nist_special as io_nist, hdf_nist_special_macroraster as io_nist_macro)
from crikit.io.macros import import_hdf_nist_special_ooc as io_nist_ooc
# from crikit.io.meta_configs import special_nist_bcars2 as _snb2
@@ -113,14 +113,14 @@
from sciplot.sciplotUI import SciPlotUI as _SciPlotUI
-import lazy5
-from lazy5.ui.QtHdfLoad import HdfLoad
+from crikit.io.lazy5.ui.QtHdfLoad import HdfLoad
+import crikit.io.lazy5 as lazy5
force_not_sw = False
try:
import crikit2_sw
-except:
+except Exception:
__sw_installed = False
# print('SW package not installed, using standard')
from crikit.ui.dialog_SVD import DialogSVD
@@ -143,7 +143,7 @@
try:
from crikit.ui.widget_Jupyter import QJupyterWidget
jupyter_flag = 1
-except:
+except Exception:
print('No appropriate Jupyter/IPython installation found. Console will not be available')
jupyter_flag = -1
@@ -228,7 +228,7 @@ def __init__(self, **kwargs):
self.ui.sweeperVL.insertWidget(0, self.img_BW, stretch=1, alignment=_QtCore.Qt.AlignHCenter)
try:
self.img_BW.mpl.fig.tight_layout(pad=2)
- except:
+ except Exception:
print('tight_layout failed (CrikitUI: 1')
# ID used for matplotlib to connect to a figure
@@ -290,6 +290,7 @@ def __init__(self, **kwargs):
# Load Data
self.ui.actionOpenHDFNIST.triggered.connect(self.fileOpenHDFNIST)
self.ui.actionOpenHDFNISTOOC.triggered.connect(self.fileOpenHDFNISTOOC)
+ self.ui.actionOpen_HDF_Macro_Raster_NIST.triggered.connect(self.fileOpenHDFMacroRasterNIST)
self.ui.actionLoadNRB.triggered.connect(self.loadNRB)
self.ui.actionLoadDark.triggered.connect(self.loadDark)
@@ -422,7 +423,7 @@ def __init__(self, **kwargs):
try:
str_banner = 'Welcome to the embedded ipython console\n\n'
self.jupyterConsole = QJupyterWidget(customBanner=str_banner)
- except:
+ except Exception:
print('Error loading embedded IPython Notebook')
else:
self.ui.tabMain.addTab(self.jupyterConsole, 'Jupyter/IPython Console')
@@ -455,7 +456,7 @@ def __init__(self, **kwargs):
if temp is not None:
try:
self.fileOpenSuccess(True)
- except:
+ except Exception:
print('Error in input hsi')
self.hsi = Hsi()
@@ -466,7 +467,7 @@ def __init__(self, **kwargs):
self.hsi.x = temp
self.hsi._x_rep.units = kwargs.get('x_units')
self.hsi._x_rep.label = kwargs.get('x_label')
- except:
+ except Exception:
print('Error in input x-array')
self.hsi.x = None
@@ -477,7 +478,7 @@ def __init__(self, **kwargs):
self.hsi.y = temp
self.hsi._y_rep.units = kwargs.get('y_units')
self.hsi._y_rep.label = kwargs.get('y_label')
- except:
+ except Exception:
print('Error in input y-array')
self.hsi.y = None
@@ -488,7 +489,7 @@ def __init__(self, **kwargs):
self.hsi.freq._data = temp
self.hsi.freq._units = kwargs.get('f_units')
self.hsi.freq._label = kwargs.get('f_label')
- except:
+ except Exception:
print('Error in input freq-array (f)')
self.hsi.freq._data = None
@@ -498,7 +499,7 @@ def __init__(self, **kwargs):
self.hsi.data = kwargs.get('data')
self.hsi.check()
self.fileOpenSuccess(True)
- except:
+ except Exception:
print('Error in input data')
self.hsi = Hsi()
@@ -645,7 +646,7 @@ def closeEvent(self, event):
for count in self.bcpre.cut_list:
try:
_os.remove(count + '.pickle')
- except:
+ except Exception:
print('Error in deleting old pickle files')
else:
del_flag += 1
@@ -658,11 +659,54 @@ def closeEvent(self, event):
print('Closing HDF File')
try:
self.fid.close()
- except:
+ except Exception:
print('Something failed in closing the file')
else:
print('Successfully closed HDF File')
+ def fileOpenHDFMacroRasterNIST(self, *args, dialog=True):
+ """
+ Open and load multiple datasets from HDF file that describe a single image.
+ Used for a macrostage rastering mode at NIST.
+
+ dialog : bool
+ Present a gui for file and dataset selection
+ """
+
+ # Get data and load into CRI_HSI class
+ # This will need to change to accomodate multiple-file selection
+
+ if dialog:
+ try:
+ if (self.filename is not None) & (self.path is not None):
+ to_open = HdfLoad.getFileDataSets(_os.path.join(self.path, self.filename), parent=self, title='Hyperspectral Image')
+ else:
+ to_open = HdfLoad.getFileDataSets(self.path, parent=self, title='Hyperspectral Image')
+
+ print('to_open: {}'.format(to_open))
+ if to_open is not None:
+ self.path, self.filename, self.dataset_name = to_open
+ except Exception as e:
+ _traceback.print_exc(limit=1)
+ print('Could not open file. Corrupt or not appropriate file format: {}'.format(e))
+ else:
+ if to_open is not None:
+ self.hsi = Hsi()
+ print('Path: {}'.format(self.path))
+ print('filename: {}'.format(self.filename))
+ print('dset name: {}'.format(self.dataset_name))
+ success = io_nist_macro(self.path, self.filename, self.dataset_name,
+ self.hsi)
+ print('Was successful: {}'.format(success))
+ print('HSI shape: {}'.format(self.hsi.shape))
+ print('Succes: {}'.format(success))
+ self.fileOpenSuccess(success)
+ else:
+ self.hsi = Hsi()
+ success = io_nist_macro(self.path, self.filename, self.dataset_name,
+ self.hsi)
+ self.fileOpenSuccess(success)
+
def fileOpenHDFNIST(self, *args, dialog=True):
"""
Open and load HDF5 File
@@ -725,7 +769,7 @@ def fileOpenHDFNISTOOC(self, *args):
if to_open is not None:
self.path, self.filename, self.dataset_name = to_open
self.dataset_name = self.dataset_name[0]
- except:
+ except Exception:
print('Could not open file. Corrupt or not appropriate file format.')
else:
if to_open is not None:
@@ -774,9 +818,9 @@ def fileOpenSuccess(self, success):
"""
if success:
# * If HSI is integer dtype, convert to float
- if (self.hsi.data.dtype.kind == 'i') & isinstance(self.hsi.data, _np.ndarray):
+ if (self.hsi.data.dtype.kind in ['i','u']) & isinstance(self.hsi.data, _np.ndarray):
print('Converting HSI from int to float')
- self.hsi.data = 1.0*self.hsi.data
+ self.hsi.data = 1.0 * self.hsi.data
self.setWindowTitle('{}: {}'.format(self.windowTitle(), self.filename))
# FILE
@@ -940,7 +984,7 @@ def fileOpenSuccess(self, success):
# signal then reconnect (or could have ignored, but this is easier)
try:
rgb_img.popimage.ui.pushButtonSpectrum.pressed.disconnect()
- except:
+ except Exception:
pass
rgb_img.popimage.ui.pushButtonSpectrum.pressed.connect(self.spectrumColorImg)
@@ -975,7 +1019,7 @@ def loadDark(self):
if success:
# If Dark is integer dtype, convert to float
- if self.dark.data.dtype.kind == 'i':
+ if self.dark.data.dtype.kind in ['u', 'i']:
print('Converting Dark from int to float')
self.dark.data = 1.0*self.dark.data
@@ -1020,7 +1064,7 @@ def loadDarkDLM(self):
if success:
# If Dark is integer dtype, convert to float
- if self.dark.data.dtype.kind == 'i':
+ if self.dark.data.dtype.kind in ['u', 'i']:
print('Converting Dark from int to float')
self.dark.data = 1.0*self.dark.data
@@ -1064,7 +1108,7 @@ def loadNRB(self):
success = io_nist(pth, filename, datasets, nrb)
if success:
# If NRB is integer dtype, convert to float
- if nrb.data.dtype.kind == 'i':
+ if nrb.data.dtype.kind in ['u', 'i']:
print('Converting NRB from int to float')
nrb.data = 1.0*nrb.data
@@ -1199,7 +1243,7 @@ def showPreviewRois(self):
self.img_BW.mpl.ax.add_artist(lg)
try:
self.img_BW.mpl.fig.tight_layout(pad=1)
- except:
+ except Exception:
print('tight_layout failed (CrikitUI: 2')
self.img_BW.mpl.draw()
@@ -1611,7 +1655,7 @@ def _roiSubtract(self, locs):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -1987,7 +2031,7 @@ def lineEditFreqChanged(self):
self.ui.freqSlider.setSliderPosition(pos)
self.changeSlider()
- except:
+ except Exception:
pass
def lineEditPixChanged(self):
@@ -2060,7 +2104,7 @@ def opChange(self):
try:
currentop = self.img_RGB_list[rgbnum].math.ui.comboBoxOperations.currentText()
self.img_RGB_list[rgbnum].data.operation = currentop
- except:
+ except Exception:
pass
def condOpChange(self):
@@ -2072,7 +2116,7 @@ def condOpChange(self):
try:
currentop = self.img_RGB_list[rgbnum].math.ui.comboBoxCondOps.currentText()
self.img_RGB_list[rgbnum].data.condoperation = currentop
- except:
+ except Exception:
pass
def condInEqualityChange(self):
@@ -2084,7 +2128,7 @@ def condInEqualityChange(self):
try:
currentop = self.img_RGB_list[rgbnum].math.ui.comboBoxCondInEquality.currentText()
self.img_RGB_list[rgbnum].data.inequality = currentop
- except:
+ except Exception:
pass
def spinBoxInEqualityChange(self):
@@ -2096,7 +2140,7 @@ def spinBoxInEqualityChange(self):
try:
self.img_RGB_list[rgbnum].data.inequalityval = \
self.img_RGB_list[rgbnum].math.ui.spinBoxInEquality.value()
- except:
+ except Exception:
pass
def doKK(self):
@@ -2122,7 +2166,7 @@ def doKK(self):
conj = True
else:
conj = False
- except:
+ except Exception:
conj = False
out = DialogKKOptions.dialogKKOptions(data=[self.hsi.f,
@@ -2166,7 +2210,7 @@ def doKK(self):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -2267,7 +2311,7 @@ def deNoise(self):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -2338,7 +2382,7 @@ def errorCorrectPhase(self):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -2384,7 +2428,7 @@ def errorCorrectScale(self):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -2459,7 +2503,7 @@ def errorCorrectAmp(self):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -2479,7 +2523,7 @@ def doUndo(self):
for count in self.bcpre.cut_list:
try:
_os.remove(count + '.pickle')
- except:
+ except Exception:
print('Error in deleting old pickle files')
else:
del_flag += 1
@@ -2525,7 +2569,7 @@ def subDark(self):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -2660,7 +2704,7 @@ def subResidual(self):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -2701,7 +2745,7 @@ def anscombe(self):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -2733,7 +2777,7 @@ def inverseAnscombe(self):
if self.ui.actionUndo_Backup_Enabled.isChecked():
try:
_BCPre.backup_pickle(self.hsi, self.bcpre.id_list[-1])
- except:
+ except Exception:
print('Error in pickle backup (Undo functionality)')
else:
self.bcpre.backed_up()
@@ -3016,7 +3060,7 @@ def setOpFreq1(self):
self.img_RGB_list[rgbnum].mpl.draw()
- except:
+ except Exception:
print('Error')
self.doComposite()
@@ -3031,7 +3075,7 @@ def setOpFreq2(self):
self.img_RGB_list[rgbnum].data.opfreq2 = currentfreq
self.img_RGB_list[rgbnum].math.ui.pushButtonOpFreq2.setText(str(round(currentfreq, 1)))
- except:
+ except Exception:
pass
def setOpFreq3(self):
@@ -3046,7 +3090,7 @@ def setOpFreq3(self):
self.img_RGB_list[rgbnum].data.opfreq3 = currentfreq
self.img_RGB_list[rgbnum].math.ui.pushButtonOpFreq3.setText(str(round(currentfreq, 1)))
- except:
+ except Exception:
pass
def setCondFreq1(self):
@@ -3061,7 +3105,7 @@ def setCondFreq1(self):
self.img_RGB_list[rgbnum].data.condfreq1 = currentfreq
self.img_RGB_list[rgbnum].math.ui.pushButtonCondFreq1.setText(str(round(currentfreq, 1)))
- except:
+ except Exception:
print('Error')
def setCondFreq2(self):
@@ -3076,7 +3120,7 @@ def setCondFreq2(self):
self.img_RGB_list[rgbnum].data.condfreq2 = currentfreq
self.img_RGB_list[rgbnum].math.ui.pushButtonCondFreq2.setText(str(round(currentfreq, 1)))
- except:
+ except Exception:
print('Error')
def setCondFreq3(self):
@@ -3091,7 +3135,7 @@ def setCondFreq3(self):
self.img_RGB_list[rgbnum].data.condfreq3 = currentfreq
self.img_RGB_list[rgbnum].math.ui.pushButtonCondFreq3.setText(str(round(currentfreq, 1)))
- except:
+ except Exception:
print('Error')
def spectrumColorImg(self):
@@ -3114,7 +3158,7 @@ def spectrumColorImg(self):
else:
Mask = Mask > 0
- Mask = Mask.astype(_np.integer)
+ Mask = Mask.astype(_np.int32)
mask_hits = Mask.sum()
@@ -3256,7 +3300,7 @@ def changeSlider(self):
if self._mpl_v1:
self.img_BW.mpl.ax.hold(True)
- except:
+ except Exception:
print('Error in changeSlider: display img_BW')
try:
@@ -3298,11 +3342,11 @@ def changeSlider(self):
self.img_BW.mpl.ax.add_artist(lg)
try:
self.img_BW.mpl.fig.tight_layout(pad=1)
- except:
+ except Exception:
print('tight_layout failed (CrikitUI: 3')
- except:
+ except Exception:
print('Error in showing overlay legend')
- except:
+ except Exception:
print('Error in changeSlider: display overlays')
self.img_BW.mpl.draw()
@@ -3378,7 +3422,7 @@ def doComposite(self):
yunits=self.img_Composite2.data.yunits,
extent=self.img_BW.data.winextent)
self.img_Composite2.mpl.draw()
- except:
+ except Exception:
print('Error in doComposite')
def updateOverlays(self):
@@ -3663,7 +3707,7 @@ def specialDemosaicRGB(self):
img_shape = fid[dataset_name].shape
self._mosaic_mask = _np.zeros(img_shape)
fid[dataset_name].read_direct(self._mosaic_mask)
- n_imgs = self._mosaic_mask.max().astype(_np.int)
+ n_imgs = self._mosaic_mask.max().astype(int)
fid.close()
msg = _QMessageBox(self)
@@ -3710,7 +3754,7 @@ def crikit_launch(**kwargs):
Input kwargs (Optional)
------------------------
- hsi : crikit.data.Hsi
+ hsi : crikit.data.spectra.Hsi
Hsi instance
data : ndarray (3D)
diff --git a/crikit/__init__.py b/crikit/__init__.py
index 205dfca..52349c9 100644
--- a/crikit/__init__.py
+++ b/crikit/__init__.py
@@ -14,4 +14,4 @@
* Charles H. Camp Jr.
"""
-__version__ = '0.4.2'
\ No newline at end of file
+__version__ = '0.4.3'
diff --git a/crikit/cri/algorithms/kk.py b/crikit/cri/algorithms/kk.py
index e53296f..aaeeb4f 100644
--- a/crikit/cri/algorithms/kk.py
+++ b/crikit/cri/algorithms/kk.py
@@ -96,7 +96,7 @@ def kkrelation(bg, cri, conjugate=False, phase_offset=0.0, norm_to_nrb=True, pad
ratio[ratio <= 0] = bad_value
if (ratio.ndim == 3) & ((axis == -1) | (axis == 2)) & (not no_iter):
- ph = _np.zeros(ratio.shape, dtype = _np.complex)
+ ph = _np.zeros(ratio.shape, dtype = complex)
for num in range(ratio.shape[0]):
ph[num, ...] = _np.exp(1j * (hilbertfft(0.5 * _np.log(ratio[num, ...]), **hilb_kwargs) + phase_offset))
else:
diff --git a/crikit/cri/error_correction.py b/crikit/cri/error_correction.py
index 0266cf1..d881ff9 100644
--- a/crikit/cri/error_correction.py
+++ b/crikit/cri/error_correction.py
@@ -158,7 +158,7 @@ def _calc(self, data, ret_obj, **kwargs):
# else:
ret_obj[idx][..., self.rng] *= correction_factor
counter += 1
- except:
+ except Exception:
return False
else:
# print(self._inst_als.__dict__)
@@ -227,7 +227,7 @@ def _calc(self, data, ret_obj):
ret_obj *= correction_factor
else:
ret_obj[..., self.rng] *= correction_factor
- except:
+ except Exception:
return False
else:
return True
diff --git a/crikit/cri/kk.py b/crikit/cri/kk.py
index 71f8920..bcea1db 100644
--- a/crikit/cri/kk.py
+++ b/crikit/cri/kk.py
@@ -196,12 +196,12 @@ def calculate(self, cars, nrb):
Nonresonant background (NRB)
"""
- kkd = _np.zeros(cars.shape, dtype=_np.complex)
+ kkd = _np.zeros(cars.shape, dtype=complex)
self._calc(cars, nrb, ret_obj=kkd)
return kkd
def _transform(self, cars, nrb):
- if issubclass(cars.dtype.type, _np.complex):
+ if issubclass(cars.dtype.type, complex):
success = self._calc(cars, nrb, ret_obj=cars)
return success
else:
@@ -210,9 +210,9 @@ def _transform(self, cars, nrb):
if __name__ == '__main__': # pragma: no cover
- from crikit.data.spectrum import Spectrum as _Spectrum
+ from crikit.data.spectra import Spectrum as _Spectrum
from crikit.data.spectra import Spectra as _Spectra
- from crikit.data.hsi import Hsi as _Hsi
+ from crikit.data.spectra import Hsi as _Hsi
hsi = _Hsi()
nrb = _Spectra()
diff --git a/crikit/cri/merge_nrbs.py b/crikit/cri/merge_nrbs.py
index 28ef486..ad2cb92 100644
--- a/crikit/cri/merge_nrbs.py
+++ b/crikit/cri/merge_nrbs.py
@@ -74,7 +74,7 @@ def _calc(self, data, ret_obj):
else:
raise ValueError('self.scale_left must be True, False, or None')
- except:
+ except Exception:
return False
else:
return True
diff --git a/crikit/cri/tests/test_kk.py b/crikit/cri/tests/test_kk.py
index 1d26385..a200046 100644
--- a/crikit/cri/tests/test_kk.py
+++ b/crikit/cri/tests/test_kk.py
@@ -47,7 +47,7 @@ def test_kk_rng():
def test_kk_transform():
x = np.linspace(-100, 100, 1000)
y = 2/(2**2 + x**2)
- y_complex = y.astype(np.complex)
+ y_complex = y.astype(complex)
kk = KramersKronig(norm_to_nrb=False)
success = kk._transform(y_complex, 0*y_complex + 1)
@@ -58,7 +58,7 @@ def test_kk_transform():
def test_kk_transform_fail():
x = np.linspace(-100, 100, 1000)
y = 2/(2**2 + x**2)
- y_complex = y.astype(np.complex)
+ y_complex = y.astype(complex)
kk = KramersKronig(norm_to_nrb=False)
diff --git a/crikit/data/factorized.py b/crikit/data/factorized.py
index 00a99e1..4923488 100644
--- a/crikit/data/factorized.py
+++ b/crikit/data/factorized.py
@@ -9,10 +9,10 @@
import numpy as _np
from crikit.data.frequency import Frequency as _Frequency
-from crikit.data.spectrum import Spectrum as _Spectrum
+from crikit.data.spectra import Spectrum as _Spectrum
from crikit.data.spectra import Spectra as _Spectra
from crikit.data.replicate import Replicate as _Replicate
-from crikit.data.hsi import Hsi as _Hsi
+from crikit.data.spectra import Hsi as _Hsi
class _Factorized:
"""
diff --git a/crikit/data/hsi.py b/crikit/data/hsi.py
deleted file mode 100644
index 50b5863..0000000
--- a/crikit/data/hsi.py
+++ /dev/null
@@ -1,373 +0,0 @@
-"""
-Hyperspectral imagery (hsi) class
-
-Created on Tue Apr 12 13:06:30 2016
-
-@author: chc
-"""
-
-import numpy as _np
-import copy as _copy
-
-from crikit.data.frequency import Frequency as _Frequency
-from crikit.data.spectrum import Spectrum as _Spectrum
-from crikit.data.replicate import Replicate as _Replicate
-
-__all__ = ['Hsi']
-
-class Hsi(_Spectrum):
- """
- Hyperspectral imagery class
-
- Parameters
- ----------
- data : 3D ndarray [y_pix, x_pix, f_pix]
- HSI image
-
- mask : 3D ndarray (int) [y_pix, x_pix, f_pix]
- 0,1 mask with 1 is a usable pixel and 0 is not
-
- freq : crikit.data.frequency.Frequency instance
- Frequency [wavelength, wavenumber] object (i.e., the independent \
- variable)
-
- label : str
- Image label (i.e., a string describing what the image is)
-
- units : str
- Units of image (e.g., intensity)
-
- x_rep : crikit.data.replicate.Replicate instance, Not implemented yet
- x-axis spatial object
-
- y_rep : crikit.data.replicate.Replicate instance, Not implemented yet
- x-axis spatial object
-
- x : 1D ndarray
- x-axis spatial vector
-
- y : 1D ndarray
- y-axis spatial vector
-
- meta : dict
- Meta-data dictionary
-
- Attributes
- ----------
- shape : tuple, read-only
- Shape of data
-
- size : int, read-only
- Size of data (i.e., total number of entries)
-
- Methods
- -------
- mean : 1D ndarray
- Mean spectrum. If extent [a,b] is provided, calculate mean over that\
- inclusive region.
-
- std : 1D ndarray
- Standard deviation of spectrum. If extent [a,b] is provided, calculate standard\
- deviation over that inclusive region.
-
- subtract : 3D ndarray or None
- Subtract spectrum or object
-
- Notes
- -----
- * freq object contains some useful parameters such as op_range_* and \
- plot_range_*, which define spectral regions-of-interest. (It's debatable \
- as to whether those parameters should be in Frequency or Spectrum classes)
-
- """
-
- # Configurations
- config = {}
- config['nd_axis'] = -1
-
- def __init__(self, data=None, freq=None, x=None, y=None, x_rep=None,
- y_rep=None, label=None, units=None, meta=None):
-
- super().__init__(data, freq, label, units, meta)
- self._x_rep = _Replicate()
- self._y_rep = _Replicate()
- self._mask = None
-
- self._x_rep = _Replicate(data=x)
- self._y_rep = _Replicate(data=y)
-
- if x is None and x_rep is not None:
- self.x_rep = _copy.deepcopy(x_rep)
- if y is None and y_rep is not None:
- self.y_rep = _copy.deepcopy(y_rep)
-
- @staticmethod
- def _mean_axes(*args, **kwargs):
- """ Inhereted from Spectrum """
- raise NotImplementedError('Only applicable to Spectrum class.')
-
- @staticmethod
- def _reshape_axes(shape, spectral_axis):
- """
- Parameters
- ----------
- shape : tuple
- Input data shape
-
- spectral_axis : int
- Spectral axis
-
- Returns
- -------
- Reshape vector
- """
- ndim = len(shape)
-
- if ndim == 1:
- out = [1, 1, 1]
- out[spectral_axis] = shape[0]
- elif ndim == 2: # ! Super-wonky
- out = [1, shape[0], shape[1]]
- elif ndim == 3:
- out = shape
- elif ndim > 3:
- out = [-1, shape[-2], shape[-1]]
- else:
- raise ValueError('Shape error')
-
- return tuple(out)
-
- @property
- def mask(self):
- return self._mask
-
- @property
- def x_rep(self):
- return self._x_rep
-
- @x_rep.setter
- def x_rep(self, value):
- if isinstance(value, _Replicate):
- self._x_rep = value
- elif isinstance(value, _np.ndarray):
- self._x_rep.data = value
-
- @property
- def y_rep(self):
- return self._y_rep
-
- @property
- def x(self):
- return self._x_rep.data
-
- @x.setter
- def x(self, value):
- self._x_rep.data = value
-
- @property
- def y(self):
- return self._y_rep.data
-
- @y.setter
- def y(self, value):
- self._y_rep.data = value
-
-
- @y_rep.setter
- def y_rep(self, value):
- if isinstance(value, _Replicate):
- self._y_rep = value
- elif isinstance(value, _np.ndarray):
- self._y_rep.data = value
-
- @property
- def data(self):
- return self._data
-
- @data.setter
- def data(self, value):
- if not isinstance(value, _np.ndarray):
- raise TypeError('data must be of type ndarray, not {}'.format(type(value)))
-
- ax_rs = self._reshape_axes(value.shape, self.config['nd_axis'])
-
- # self._mask = _np.ones(tuple([n for n in range(3) if n != self.config['nd_axis']]),
- # dtype=_np.int)
-
- if self.freq is None or self.freq.op_list_pix is None:
- self._data = value.reshape(ax_rs)
- else:
- if value.shape[self.config['nd_axis']] == self.freq.op_range_pix.size:
- temp = _np.zeros((self._data.shape),dtype=value.dtype)
- temp[:,:,self.freq.op_range_pix] = value.reshape(ax_rs)
- self._data = 1*temp
- del temp
- elif value.shape[self.config['nd_axis']] == self._data.shape[self.config['nd_axis']]:
- temp = _np.zeros((self._data.shape),dtype=value.dtype)
- temp[..., self.freq.op_range_pix] = value.reshape(ax_rs)[..., self.freq.op_range_pix]
- self._data = 1*temp
- del temp
-
- def check(self):
- """
- Check x, y, and freq to make sure the dimensions agree with data
- """
- if self._data is None:
- print('Hsi check: data is None, not checking')
- else:
- if self._x_rep._data is None:
- self._x_rep._data = _np.arange(self.shape[1])
- self._x_rep._label = 'X'
- self._x_rep._units = 'pix'
- print('Hsi check: setting x to pixels')
- elif self._x_rep._data.size != self._data.shape[1]:
- self._x_rep = _Replicate()
- self._x_rep._data = _np.arange(self.shape[1])
- self._x_rep._label = 'X'
- self._x_rep._units = 'pix'
- print('Hsi check: setting x to pixels')
-
- if self._y_rep._data is None:
- self._y_rep._data = _np.arange(self.shape[0])
- self._y_rep._label = 'Y'
- self._y_rep._units = 'pix'
- print('Hsi check: setting y to pixels')
- elif self._y_rep._data.size != self._data.shape[0]:
- self._y_rep = _Replicate()
- self._y_rep._data = _np.arange(self.shape[0])
- self._y_rep._label = 'Y'
- self._y_rep._units = 'pix'
- print('Hsi check: setting y to pixels')
-
- if self.freq._data is None:
- self.freq._data = _np.arange(self.shape[-1])
- self.freq._label = 'Frequency'
- self.freq._units = 'pix'
- print('Hsi check: setting freq to pixels')
- elif self.freq._data.size != self._data.shape[-1]:
- self.freq = _Frequency()
- self.freq._data = _np.arange(self.shape[-1])
- print('Hsi check: setting freq to pixels')
- return None
-
- def subtract(self, spectra, overwrite=True):
- """
- Subtract spectrum from data
- """
- # Order IS important
- if isinstance(spectra, Hsi):
- if overwrite:
- self.data -= spectra.data
- return None
- else:
- return self.data - spectra.data
- elif isinstance(spectra, _Spectrum):
- if overwrite:
- self.data -= spectra.data[None,None,:]
- return None
- else:
- return self.data - spectra.data
- elif isinstance(spectra, _np.ndarray):
- if spectra.shape == self.data.shape:
- if overwrite:
- self.data -= spectra
- return None
- else:
- return self.data - spectra
- else:
- if overwrite:
- self.data -= spectra[None,None,:]
- return None
- else:
- return self.data - spectra[None,None,:]
-
- def get_rand_spectra(self, num, pt_sz=1, quads=False, full=False):
-
- mlen, nlen, freqlen = self.data.shape
-
- if quads:
- num_spectra = num + 5
- else:
- num_spectra = num
-
- if _np.iscomplexobj(self.data):
- dtype = _np.complex
- else:
- dtype = _np.float
-
- temp = _np.zeros((num_spectra, self.data.shape[-1]), dtype=dtype)
-
- quad_mid_row = int(_np.round(mlen/2))
- quad_mid_col = int(_np.round(nlen/2))
- center_row = (int(_np.round(mlen/3)), int(_np.round(2*mlen/3)))
- center_col = (int(_np.round(nlen/3)), int(_np.round(2*nlen/3)))
-
- start_count = 0
- if quads:
- # QUADS
- # Bottom-left
- temp[0, :] = _np.mean(self.data[0:quad_mid_row, 0:quad_mid_col, :], axis=(0, 1))
-
- # Upper-left
- temp[1, :] = _np.mean(self.data[0:quad_mid_row, quad_mid_col+1::, :], axis=(0, 1))
-
- # Upper-right
- temp[2, :] = _np.mean(self.data[quad_mid_row+1::, quad_mid_col+1::, :], axis=(0, 1))
-
- # Bottom-right
- temp[3, :] = _np.mean(self.data[quad_mid_row+1::, 0:quad_mid_col, :], axis=(0, 1))
-
- # Center
- temp[4, :] = _np.mean(self.data[center_row[0]:center_row[1], center_col[0]:center_col[1], :], axis=(0, 1))
-
- start_count += 5
- else:
- pass
-
- rand_rows = ((mlen-pt_sz-1)*_np.random.rand(num_spectra)).astype(int)
- rand_cols = ((nlen-pt_sz-1)*_np.random.rand(num_spectra)).astype(int)
-
- for count in _np.arange(start_count,num_spectra):
- if pt_sz == 1:
- temp[count, :] = _np.squeeze(self.data[rand_rows[count-start_count],
- rand_cols[count-start_count]])
- else:
-
- rows = [rand_rows[count-start_count]-(pt_sz-1),
- rand_rows[count-start_count]+pt_sz]
- cols = [rand_cols[count-start_count]-(pt_sz-1),
- rand_cols[count-start_count]+pt_sz]
-
- if rows[0] < 0:
- rows[0] = 0
- if rows[1] >= mlen:
- rows[1] = mlen-1
- if cols[0] < 0:
- cols[0] = 0
- if cols[1] >= nlen:
- cols[1] = nlen-1
-
- if cols[0] == cols[1] or rows[0] == rows[1]:
- pass
- else:
- temp[count,:] = _np.squeeze(_np.mean(self.data[rows[0]:rows[1], cols[0]:cols[1], :], axis=(0, 1)))
-
- if (not full) and (self.freq.data is not None):
- temp = temp[..., self.freq.op_range_pix]
-
- return temp
-
- def __sub__(self, spectrum):
- return self.subtract(spectrum, overwrite=False)
-
-if __name__ == '__main__': # pragma: no cover
-
- x = _np.linspace(0,100,10)
- y = _np.linspace(0,100,10)
- freq = _np.arange(20)
- data = _np.random.rand(10,10,20)
-
-
- hs = Hsi(data=data, freq=freq, x=x, y=y)
- print(hs.shape)
- print(isinstance(hs, _Spectrum))
diff --git a/crikit/data/mosaic.py b/crikit/data/mosaic.py
index 747b8da..ba8107c 100644
--- a/crikit/data/mosaic.py
+++ b/crikit/data/mosaic.py
@@ -121,11 +121,11 @@ def dtype(self):
dt = [q.dtype.kind for q in self._data]
if dt.count('c') > 0:
- return _np.complex
+ return complex
elif dt.count('f') > 0:
- return _np.float
+ return float
elif dt.count('i') > 0:
- return _np.int
+ return int
def mosaic_shape(self, shape=None, idx=None):
@@ -248,7 +248,7 @@ def _mosaic(self, shape=None, idx=None, out=None, mask=False):
data = sub_img_counter * \
_np.ones(self._data[sub_img_counter][slice_sub_r,
slice_sub_c].shape[0:2],
- dtype=_np.int)
+ dtype=int)
elif idx is None:
data = self._data[sub_img_counter][slice_sub_r, slice_sub_c]
else:
@@ -348,7 +348,7 @@ def mosaicfull(self, shape=None, out=None):
assert mos.shape == tuple(n*[new_obj.shape])
assert mos.size == n
assert mos.issamedim
- assert mos.dtype == _np.float
+ assert mos.dtype == float
# AFFECTED BY START* END*
assert mos.unitshape == (m_obj_crop, n_obj_crop, p_obj_crop)
diff --git a/crikit/data/spectra.py b/crikit/data/spectra.py
index 3d15340..41a3ebd 100644
--- a/crikit/data/spectra.py
+++ b/crikit/data/spectra.py
@@ -8,12 +8,733 @@
import copy as _copy
from crikit.data.frequency import Frequency as _Frequency
-from crikit.data.spectrum import Spectrum as _Spectrum
from crikit.data.replicate import Replicate as _Replicate
-__all__ = ['Spectra']
+__all__ = ['Spectrum', 'Spectra', 'Hsi']
+
+
+class Spectrum:
+ """
+ Spectrum class
+
+ Attributes
+ ----------
+ data : 1D ndarray [f_pix]
+ Spectrum
+
+ freq : crikit.data.Frequency instance
+ Frequency [wavelength, wavenumber] object (i.e., the independent \
+ variable)
+
+ label : str
+ Spectrum label (i.e., a string describing what the spectrum is)
+
+ units : str
+ Units of spectrum
+
+ meta : dict
+ Meta-data dictionary
+
+ f_pix : int, read-only
+ Size of data. Note: this matches the size of data and does NOT check \
+ the size of freq.freq_vec.
+
+ ndim : int, read-only
+ Number of data dimensions
+
+ shape : tuple, read-only
+ Shape of data
+
+ size : int, read-only
+ Size of data (i.e., total number of entries)
+
+ Methods
+ -------
+ mean : int
+ Mean value. If extent [a,b] is provided, calculate mean over that\
+ inclusive region.
+
+ std : int
+ Standard deviation. If extent [a,b] is provided, calculate standard\
+ deviation over that inclusive region.
+
+ subtract : 1D ndarray or None
+ Subtract spectrum or object
+
+ Notes
+ -----
+ * freq object contains some useful parameters such as op_range* and \
+ plot_range*, which define spectral regions-of-interest. (It's debatable \
+ as to whether those parameters should be in Frequency or Spectrum classes)
+
+ """
+
+ # Configurations
+ config = {}
+ config['nd_axis'] = -1
+ config['nd_fcn'] = _np.mean
+
+ def __init__(self, data=None, freq=None, label=None, units=None, meta=None):
+
+ self._data = None
+ self._freq = _Frequency()
+ self._label = None
+ self._units = None
+ self._meta = {}
+
+ if data is not None:
+ self.data = _copy.deepcopy(data)
+ if freq is not None:
+ self.freq = _copy.deepcopy(freq)
+ else:
+ self.freq = _Frequency()
+ if label is not None:
+ self.label = _copy.deepcopy(label)
+ if units is not None:
+ self.units = _copy.deepcopy(units)
+ if meta is not None:
+ self._meta = _copy.deepcopy(meta)
+
+ @staticmethod
+ def _mean_axes(ndim, axis):
+ """
+ Parameters
+ ----------
+ ndim : int
+ Number of dimensions of input data (target is 1D spectrum)
+
+ axis : int
+ For ND data, axis is remaining axis
+
+ Returns
+ -------
+ Vector that describes what axes to operate (using a mean or similar method) with
+ axis parameter
+ """
+ if axis < 0:
+ axis2 = ndim + axis
+ else:
+ axis2 = axis
+ return tuple([n for n in range(ndim) if n != axis2])
+
+ @property
+ def data(self):
+ return self._data
+
+ @data.setter
+ def data(self, value):
+ if not isinstance(value, _np.ndarray):
+ raise TypeError('data must be of type ndarray')
+
+ # If sub-range of operation is defined. Only perform action over op_range_pix
+ if self.freq is not None and self.freq.op_list_pix is not None:
+ if value.shape[self.config['nd_axis']] == self.freq.op_range_pix.size:
+ temp = _np.zeros((self.freq.size), dtype=value.dtype)
+ if value.ndim == 1:
+ temp[self.freq.op_range_pix] = value
+ else:
+ print('Input data is {}-dim. Performing {}'.format(value.ndim, self.config['nd_fcn'].__name__))
+ nd_ax = self._mean_axes(value.ndim, axis=self.config['nd_axis'])
+ temp[self.freq.op_range_pix] = self.config['nd_fcn'](value, axis=nd_ax)
+ elif value.shape[self.config['nd_axis']] == self.freq.size:
+ temp = _np.zeros((self.freq.size), dtype=value.dtype)
+ if value.ndim == 1:
+ temp[self.freq.op_range_pix] = value[self.freq.op_range_pix]
+ else:
+ print('Input data is {}-dim. Performing {}'.format(value.ndim, self.config['nd_fcn'].__name__))
+ nd_ax = self._mean_axes(value.ndim, axis=self.config['nd_axis'])
+ temp[self.freq.op_range_pix] = self.config['nd_fcn'](value, axis=nd_ax)[self.freq.op_range_pix]
+
+ else:
+ raise TypeError('data is of an unrecognized shape: {}'.format(value.shape))
+ self._data = 1 * temp
+ del temp
+ else:
+ if value.ndim == 1:
+ self._data = value
+ else:
+ print('Input data is {}-dim. Performing {}'.format(value.ndim, self.config['nd_fcn'].__name__))
+ nd_ax = self._mean_axes(value.ndim, axis=self.config['nd_axis'])
+ self._data = self.config['nd_fcn'](value, axis=nd_ax)
+
+ @property
+ def freq(self):
+ return self._freq
+
+ @freq.setter
+ def freq(self, value):
+ if isinstance(value, _Frequency):
+ self._freq = value
+ elif isinstance(value, _np.ndarray):
+ self.freq = _Frequency(data=value)
+ else:
+ raise TypeError('freq must be of type crikit.data.Frequency')
+
+ @property
+ def f(self):
+ """
+ Convenience attribute: return frequency vector within operating (op) \
+ range
+ """
+ return self.freq.op_range_freq
+
+ @property
+ def f_full(self):
+ """
+ Convenience attribute: return full frequency vector
+ """
+ return self.freq.data
+
+ @property
+ def units(self):
+ return self._units
+
+ @units.setter
+ def units(self, value):
+ if isinstance(value, str):
+ self._units = value
+ else:
+ raise TypeError('units must be of type str')
+
+ @property
+ def label(self):
+ return self._label
+
+ @label.setter
+ def label(self, value):
+ if isinstance(value, str):
+ self._label = value
+ else:
+ raise TypeError('label must be of type str')
+
+ @property
+ def meta(self):
+ temp_dict = self._meta.copy()
+
+ if self.freq.calib is not None:
+ try:
+ calib_dict = {}
+ calib_prefix = 'Calib.'
+
+ calib_dict[calib_prefix + 'a_vec'] = self.freq.calib['a_vec']
+ calib_dict[calib_prefix + 'ctr_wl'] = self.freq.calib['ctr_wl']
+ calib_dict[calib_prefix + 'ctr_wl0'] = self.freq.calib['ctr_wl0']
+ calib_dict[calib_prefix + 'n_pix'] = self.freq.calib['n_pix']
+ calib_dict[calib_prefix + 'probe'] = self.freq.calib['probe']
+
+ try: # Doesn't really matter if we have the units
+ calib_dict[calib_prefix + 'units'] = self.freq.calib['units']
+ except Exception:
+ pass
+
+ except Exception:
+ print('Could not get calibration information')
+ else:
+ temp_dict.update(calib_dict)
+
+ if self.freq.calib_orig is not None:
+ try:
+ calib_dict = {}
+ calib_prefix = 'CalibOrig.'
+
+ calib_dict[calib_prefix + 'a_vec'] = self.freq.calib_orig['a_vec']
+ calib_dict[calib_prefix + 'ctr_wl'] = self.freq.calib_orig['ctr_wl']
+ calib_dict[calib_prefix + 'ctr_wl0'] = self.freq.calib_orig['ctr_wl0']
+ calib_dict[calib_prefix + 'n_pix'] = self.freq.calib_orig['n_pix']
+ calib_dict[calib_prefix + 'probe'] = self.freq.calib_orig['probe']
+
+ try: # Doesn't really matter if we have the units
+ calib_dict[calib_prefix + 'units'] = self.freq.calib_orig['units']
+ except Exception:
+ pass
+
+ except Exception:
+ print('Could not get calibration information')
+ else:
+ temp_dict.update(calib_dict)
+
+ # return self._meta
+ return temp_dict
+
+ @meta.setter
+ def meta(self, value):
+ if isinstance(value, dict):
+ self._meta = value
+ else:
+ raise TypeError('meta must be of type dict')
+
+ @property
+ def f_pix(self):
+ if self._data is not None:
+ return self._data.shape[-1]
+
+ @property
+ def ndim(self):
+ if self._data is None:
+ return None
+ elif isinstance(self._data, _np.ndarray):
+ return self._data.ndim
+ else:
+ return len(self._data.shape)
+
+ @property
+ def shape(self):
+ if self._data is None:
+ return None
+ else:
+ return self._data.shape
+
+ @property
+ def size(self):
+ if self._data is None:
+ return None
+ else:
+ return self._data.size
+
+ def mean(self, extent=None, over_space=True):
+ """
+ Return mean spectrum (or mean over extent [list with 2 elements]). If\
+ over_space is False, returns reps-number of mean spectra
+ """
+ if self._data is None:
+ return None
+
+ ndim = len(self._data.shape)
+
+ if ndim == 1:
+ if isinstance(self._data, _np.ndarray):
+ return self._data.mean()
+ else:
+ return _np.mean(self._data)
+
+ if ndim > 1:
+ if over_space is True:
+ axes = tuple(_np.arange(ndim - 1))
+ else:
+ axes = -1
+
+ if isinstance(self._data, _np.ndarray):
+ if extent is None:
+ return self._data.mean(axis=axes)
+ else:
+ return self._data[:, extent[0]:extent[1] + 1].mean(axis=axes)
+ else:
+ if extent is None:
+ return _np.mean(self._data, axis=axes)
+ else:
+ return _np.mean(self._data[:, extent[0]:extent[1] + 1],
+ axis=axes)
+
+ def std(self, extent=None, over_space=True):
+ """
+ Return standard deviation (std) spectrum (or std over extent
+ [list with 2 elements]). If over_space is False, reps (or reps x reps)
+ number of std's.
+ """
+ if self._data is None:
+ return None
+
+ ndim = len(self._data.shape)
+
+ if ndim == 1:
+ if isinstance(self._data, _np.ndarray):
+ return self._data.std()
+ else:
+ return _np.std(self._data)
+
+ if ndim > 1:
+ if over_space is True:
+ axes = tuple(_np.arange(ndim - 1))
+ else:
+ axes = -1
+
+ if isinstance(self._data, _np.ndarray):
+ if extent is None:
+ return self._data.std(axis=axes)
+ else:
+ return self._data[:, extent[0]:extent[1] + 1].std(axis=axes)
+ else:
+ if extent is None:
+ return _np.std(self._data, axis=axes)
+ else:
+ return _np.std(self._data[:, extent[0]:extent[1] + 1],
+ axis=axes)
+
+ def subtract(self, spectrum, overwrite=True):
+ """
+ Subtract spectrum from data
+ """
+ if isinstance(spectrum, Spectrum):
+ if overwrite:
+ self.data -= spectrum.data
+ return None
+ else:
+ return self.data - spectrum.data
+ elif isinstance(spectrum, _np.ndarray):
+ if overwrite:
+ self.data -= spectrum
+ return None
+ else:
+ return self.data - spectrum
+
+ def __sub__(self, spectrum):
+ return self.subtract(spectrum, overwrite=False)
+
+
+class Hsi(Spectrum):
+ """
+ Hyperspectral imagery class
+
+ Parameters
+ ----------
+ data : 3D ndarray [y_pix, x_pix, f_pix]
+ HSI image
+
+ mask : 3D ndarray (int) [y_pix, x_pix, f_pix]
+ 0,1 mask with 1 is a usable pixel and 0 is not
+
+ freq : crikit.data.frequency.Frequency instance
+ Frequency [wavelength, wavenumber] object (i.e., the independent \
+ variable)
+
+ label : str
+ Image label (i.e., a string describing what the image is)
+
+ units : str
+ Units of image (e.g., intensity)
+
+ x_rep : crikit.data.replicate.Replicate instance, Not implemented yet
+ x-axis spatial object
+
+ y_rep : crikit.data.replicate.Replicate instance, Not implemented yet
+ x-axis spatial object
+
+ x : 1D ndarray
+ x-axis spatial vector
+
+ y : 1D ndarray
+ y-axis spatial vector
+
+ meta : dict
+ Meta-data dictionary
+
+ Attributes
+ ----------
+ shape : tuple, read-only
+ Shape of data
+
+ size : int, read-only
+ Size of data (i.e., total number of entries)
+
+ extent : list, read-only
+ Extent of image [xmin, xmax, ymin, ymax]
+
+ Methods
+ -------
+ mean : 1D ndarray
+ Mean spectrum. If extent [a,b] is provided, calculate mean over that\
+ inclusive region.
+
+ std : 1D ndarray
+ Standard deviation of spectrum. If extent [a,b] is provided, calculate standard\
+ deviation over that inclusive region.
+
+ subtract : 3D ndarray or None
+ Subtract spectrum or object
+
+ Notes
+ -----
+ * freq object contains some useful parameters such as op_range_* and \
+ plot_range_*, which define spectral regions-of-interest. (It's debatable \
+ as to whether those parameters should be in Frequency or Spectrum classes)
-class Spectra(_Spectrum):
+ """
+
+ # Configurations
+ config = {}
+ config['nd_axis'] = -1
+
+ def __init__(self, data=None, freq=None, x=None, y=None, x_rep=None,
+ y_rep=None, label=None, units=None, meta=None):
+
+ super().__init__(data, freq, label, units, meta)
+ self._x_rep = _Replicate()
+ self._y_rep = _Replicate()
+ self._mask = None
+
+ self._x_rep = _Replicate(data=x)
+ self._y_rep = _Replicate(data=y)
+
+ if x is None and x_rep is not None:
+ self.x_rep = _copy.deepcopy(x_rep)
+ if y is None and y_rep is not None:
+ self.y_rep = _copy.deepcopy(y_rep)
+
+ @staticmethod
+ def _mean_axes(*args, **kwargs):
+ """ Inhereted from Spectrum """
+ raise NotImplementedError('Only applicable to Spectrum class.')
+
+ @staticmethod
+ def _reshape_axes(shape, spectral_axis):
+ """
+ Parameters
+ ----------
+ shape : tuple
+ Input data shape
+
+ spectral_axis : int
+ Spectral axis
+
+ Returns
+ -------
+ Reshape vector
+ """
+ ndim = len(shape)
+
+ if ndim == 1:
+ out = [1, 1, 1]
+ out[spectral_axis] = shape[0]
+ elif ndim == 2: # ! Super-wonky
+ out = [1, shape[0], shape[1]]
+ elif ndim == 3:
+ out = shape
+ elif ndim > 3:
+ out = [-1, shape[-2], shape[-1]]
+ else:
+ raise ValueError('Shape error')
+
+ return tuple(out)
+
+ @property
+ def extent(self):
+ if (self.x is not None) & (self.y is not None):
+ return [self.x.min(), self.x.max(), self.y.min(), self.y.max()]
+
+ @property
+ def mask(self):
+ return self._mask
+
+ @property
+ def x_rep(self):
+ return self._x_rep
+
+ @x_rep.setter
+ def x_rep(self, value):
+ if isinstance(value, _Replicate):
+ self._x_rep = value
+ elif isinstance(value, _np.ndarray):
+ self._x_rep.data = value
+
+ @property
+ def y_rep(self):
+ return self._y_rep
+
+ @property
+ def x(self):
+ return self._x_rep.data
+
+ @x.setter
+ def x(self, value):
+ self._x_rep.data = value
+
+ @property
+ def y(self):
+ return self._y_rep.data
+
+ @y.setter
+ def y(self, value):
+ self._y_rep.data = value
+
+ @y_rep.setter
+ def y_rep(self, value):
+ if isinstance(value, _Replicate):
+ self._y_rep = value
+ elif isinstance(value, _np.ndarray):
+ self._y_rep.data = value
+
+ @property
+ def data(self):
+ return self._data
+
+ @data.setter
+ def data(self, value):
+ if not isinstance(value, _np.ndarray):
+ raise TypeError('data must be of type ndarray, not {}'.format(type(value)))
+
+ ax_rs = self._reshape_axes(value.shape, self.config['nd_axis'])
+
+ # self._mask = _np.ones(tuple([n for n in range(3) if n != self.config['nd_axis']]),
+ # dtype=int)
+
+ if self.freq is None or self.freq.op_list_pix is None:
+ self._data = value.reshape(ax_rs)
+ else:
+ if value.shape[self.config['nd_axis']] == self.freq.op_range_pix.size:
+ temp = _np.zeros((self._data.shape), dtype=value.dtype)
+ temp[:, :, self.freq.op_range_pix] = value.reshape(ax_rs)
+ self._data = 1 * temp
+ del temp
+ elif value.shape[self.config['nd_axis']] == self._data.shape[self.config['nd_axis']]:
+ temp = _np.zeros((self._data.shape), dtype=value.dtype)
+ temp[..., self.freq.op_range_pix] = value.reshape(ax_rs)[..., self.freq.op_range_pix]
+ self._data = 1 * temp
+ del temp
+
+ def check(self):
+ """
+ Check x, y, and freq to make sure the dimensions agree with data
+ """
+ if self._data is None:
+ print('Hsi check: data is None, not checking')
+ else:
+ if self._x_rep._data is None:
+ self._x_rep._data = _np.arange(self.shape[1])
+ self._x_rep._label = 'X'
+ self._x_rep._units = 'pix'
+ print('Hsi check: setting x to pixels')
+ elif self._x_rep._data.size != self._data.shape[1]:
+ self._x_rep = _Replicate()
+ self._x_rep._data = _np.arange(self.shape[1])
+ self._x_rep._label = 'X'
+ self._x_rep._units = 'pix'
+ print('Hsi check: setting x to pixels')
+
+ if self._y_rep._data is None:
+ self._y_rep._data = _np.arange(self.shape[0])
+ self._y_rep._label = 'Y'
+ self._y_rep._units = 'pix'
+ print('Hsi check: setting y to pixels')
+ elif self._y_rep._data.size != self._data.shape[0]:
+ self._y_rep = _Replicate()
+ self._y_rep._data = _np.arange(self.shape[0])
+ self._y_rep._label = 'Y'
+ self._y_rep._units = 'pix'
+ print('Hsi check: setting y to pixels')
+
+ if self.freq._data is None:
+ self.freq._data = _np.arange(self.shape[-1])
+ self.freq._label = 'Frequency'
+ self.freq._units = 'pix'
+ print('Hsi check: setting freq to pixels')
+ elif self.freq._data.size != self._data.shape[-1]:
+ self.freq = _Frequency()
+ self.freq._data = _np.arange(self.shape[-1])
+ print('Hsi check: setting freq to pixels')
+ return None
+
+ def subtract(self, spectra, overwrite=True):
+ """
+ Subtract spectrum from data
+ """
+ # Order IS important
+ if isinstance(spectra, Hsi):
+ if overwrite:
+ self.data -= spectra.data
+ return None
+ else:
+ return self.data - spectra.data
+ elif isinstance(spectra, Spectrum):
+ if overwrite:
+ self.data -= spectra.data[None, None, :]
+ return None
+ else:
+ return self.data - spectra.data
+ elif isinstance(spectra, _np.ndarray):
+ if spectra.shape == self.data.shape:
+ if overwrite:
+ self.data -= spectra
+ return None
+ else:
+ return self.data - spectra
+ else:
+ if overwrite:
+ self.data -= spectra[None, None, :]
+ return None
+ else:
+ return self.data - spectra[None, None, :]
+
+ def get_rand_spectra(self, num, pt_sz=1, quads=False, full=False):
+
+ mlen, nlen, freqlen = self.data.shape
+
+ if quads:
+ num_spectra = num + 5
+ else:
+ num_spectra = num
+
+ if _np.iscomplexobj(self.data):
+ dtype = complex
+ else:
+ dtype = float
+
+ temp = _np.zeros((num_spectra, self.data.shape[-1]), dtype=dtype)
+
+ quad_mid_row = int(_np.round(mlen / 2))
+ quad_mid_col = int(_np.round(nlen / 2))
+ center_row = (int(_np.round(mlen / 3)), int(_np.round(2 * mlen / 3)))
+ center_col = (int(_np.round(nlen / 3)), int(_np.round(2 * nlen / 3)))
+
+ start_count = 0
+ if quads:
+ # QUADS
+ # Bottom-left
+ temp[0, :] = _np.mean(self.data[0:quad_mid_row, 0:quad_mid_col, :], axis=(0, 1))
+
+ # Upper-left
+ temp[1, :] = _np.mean(self.data[0:quad_mid_row, quad_mid_col + 1::, :], axis=(0, 1))
+
+ # Upper-right
+ temp[2, :] = _np.mean(self.data[quad_mid_row + 1::, quad_mid_col + 1::, :], axis=(0, 1))
+
+ # Bottom-right
+ temp[3, :] = _np.mean(self.data[quad_mid_row + 1::, 0:quad_mid_col, :], axis=(0, 1))
+
+ # Center
+ temp[4, :] = _np.mean(self.data[center_row[0]:center_row[1], center_col[0]:center_col[1], :], axis=(0, 1))
+
+ start_count += 5
+ else:
+ pass
+
+ rand_rows = ((mlen - pt_sz - 1) * _np.random.rand(num_spectra)).astype(int)
+ rand_cols = ((nlen - pt_sz - 1) * _np.random.rand(num_spectra)).astype(int)
+
+ for count in _np.arange(start_count, num_spectra):
+ if pt_sz == 1:
+ temp[count, :] = _np.squeeze(self.data[rand_rows[count - start_count],
+ rand_cols[count - start_count]])
+ else:
+
+ rows = [rand_rows[count - start_count] - (pt_sz - 1),
+ rand_rows[count - start_count] + pt_sz]
+ cols = [rand_cols[count - start_count] - (pt_sz - 1),
+ rand_cols[count - start_count] + pt_sz]
+
+ if rows[0] < 0:
+ rows[0] = 0
+ if rows[1] >= mlen:
+ rows[1] = mlen - 1
+ if cols[0] < 0:
+ cols[0] = 0
+ if cols[1] >= nlen:
+ cols[1] = nlen - 1
+
+ if cols[0] == cols[1] or rows[0] == rows[1]:
+ pass
+ else:
+ temp[count, :] = _np.squeeze(_np.mean(self.data[rows[0]:rows[1], cols[0]:cols[1], :], axis=(0, 1)))
+
+ if (not full) and (self.freq.data is not None):
+ temp = temp[..., self.freq.op_range_pix]
+
+ return temp
+
+ def __sub__(self, spectrum):
+ return self.subtract(spectrum, overwrite=False)
+
+
+class Spectra(Spectrum):
"""
Spectra class
@@ -125,7 +846,7 @@ def data(self, value):
print('Spectra: converting data input from {}D to 2D ndarray'.format(value.ndim))
if value.shape[-1] == self.freq.op_range_pix.size:
temp = _np.zeros((self._data.shape), dtype=value.dtype)
- temp[:,self.freq.op_range_pix] = value
+ temp[:, self.freq.op_range_pix] = value
self._data = temp
elif value.shape[-1] == self.freq.size:
temp = _np.zeros((self._data.shape), dtype=value.dtype)
@@ -160,12 +881,12 @@ def subtract(self, spectra, overwrite=True):
return None
else:
return self.data - spectra.data
- elif isinstance(spectra, _Spectrum):
+ elif isinstance(spectra, Spectrum):
if overwrite:
- self.data -= spectra.data[None,:]
+ self.data -= spectra.data[None, :]
return None
else:
- return self.data - spectra.data[None,:]
+ return self.data - spectra.data[None, :]
elif isinstance(spectra, _np.ndarray):
if spectra.shape == self.data.shape:
if overwrite:
@@ -175,19 +896,20 @@ def subtract(self, spectra, overwrite=True):
return self.data - spectra
else:
if overwrite:
- self.data -= spectra[None,:]
+ self.data -= spectra[None, :]
return None
else:
- return self.data - spectra[None,:]
+ return self.data - spectra[None, :]
def __sub__(self, spectrum):
return self.subtract(spectrum, overwrite=False)
-if __name__ == '__main__': # pragma: no cover
+
+if __name__ == '__main__': # pragma: no cover
sp = Spectra()
print(sp.__dict__)
- print('Subclass? : {}'.format(issubclass(Spectra,_Spectrum)))
- print('Instance of Spectra? : {}'.format(isinstance(sp,Spectra)))
- print('Instance of Spectrum? : {}'.format(isinstance(sp,_Spectrum)))
- print('Type(sp) == Spectrum? : {}'.format(type(sp)==_Spectrum))
- print('Type(sp) == Spectra? : {}'.format(type(sp)==Spectra))
\ No newline at end of file
+ print('Subclass? : {}'.format(issubclass(Spectra, Spectrum)))
+ print('Instance of Spectra? : {}'.format(isinstance(sp, Spectra)))
+ print('Instance of Spectrum? : {}'.format(isinstance(sp, Spectrum)))
+ print('Type(sp) == Spectrum? : {}'.format(type(sp) == Spectrum))
+ print('Type(sp) == Spectra? : {}'.format(type(sp) == Spectra))
diff --git a/crikit/data/spectrum.py b/crikit/data/spectrum.py
deleted file mode 100644
index 4abc8d1..0000000
--- a/crikit/data/spectrum.py
+++ /dev/null
@@ -1,392 +0,0 @@
-"""
-Spectrum class and function
-
-"""
-
-import numpy as _np
-import copy as _copy
-
-from crikit.data.frequency import Frequency as _Frequency
-
-__all__ = ['Spectrum']
-
-class Spectrum:
- """
- Spectrum class
-
- Attributes
- ----------
- data : 1D ndarray [f_pix]
- Spectrum
-
- freq : crikit.data.Frequency instance
- Frequency [wavelength, wavenumber] object (i.e., the independent \
- variable)
-
- label : str
- Spectrum label (i.e., a string describing what the spectrum is)
-
- units : str
- Units of spectrum
-
- meta : dict
- Meta-data dictionary
-
- f_pix : int, read-only
- Size of data. Note: this matches the size of data and does NOT check \
- the size of freq.freq_vec.
-
- ndim : int, read-only
- Number of data dimensions
-
- shape : tuple, read-only
- Shape of data
-
- size : int, read-only
- Size of data (i.e., total number of entries)
-
- Methods
- -------
- mean : int
- Mean value. If extent [a,b] is provided, calculate mean over that\
- inclusive region.
-
- std : int
- Standard deviation. If extent [a,b] is provided, calculate standard\
- deviation over that inclusive region.
-
- subtract : 1D ndarray or None
- Subtract spectrum or object
-
- Notes
- -----
- * freq object contains some useful parameters such as op_range* and \
- plot_range*, which define spectral regions-of-interest. (It's debatable \
- as to whether those parameters should be in Frequency or Spectrum classes)
-
- """
-
- # Configurations
- config = {}
- config['nd_axis'] = -1
- config['nd_fcn'] = _np.mean
-
- def __init__(self, data=None, freq=None, label=None, units=None, meta=None):
-
- self._data = None
- self._freq = _Frequency()
- self._label = None
- self._units = None
- self._meta = {}
-
- if data is not None:
- self.data = _copy.deepcopy(data)
- if freq is not None:
- self.freq = _copy.deepcopy(freq)
- else:
- self.freq = _Frequency()
- if label is not None:
- self.label = _copy.deepcopy(label)
- if units is not None:
- self.units = _copy.deepcopy(units)
- if meta is not None:
- self._meta = _copy.deepcopy(meta)
-
- @staticmethod
- def _mean_axes(ndim, axis):
- """
- Parameters
- ----------
- ndim : int
- Number of dimensions of input data (target is 1D spectrum)
-
- axis : int
- For ND data, axis is remaining axis
-
- Returns
- -------
- Vector that describes what axes to operate (using a mean or similar method) with
- axis parameter
- """
- if axis < 0:
- axis2 = ndim + axis
- else:
- axis2 = axis
- return tuple([n for n in range(ndim) if n != axis2])
-
- @property
- def data(self):
- return self._data
-
- @data.setter
- def data(self, value):
- if not isinstance(value, _np.ndarray):
- raise TypeError('data must be of type ndarray')
-
- # If sub-range of operation is defined. Only perform action over op_range_pix
- if self.freq is not None and self.freq.op_list_pix is not None:
- if value.shape[self.config['nd_axis']] == self.freq.op_range_pix.size:
- temp = _np.zeros((self.freq.size), dtype=value.dtype)
- if value.ndim == 1:
- temp[self.freq.op_range_pix] = value
- else:
- print('Input data is {}-dim. Performing {}'.format(value.ndim, self.config['nd_fcn'].__name__))
- nd_ax = self._mean_axes(value.ndim, axis=self.config['nd_axis'])
- temp[self.freq.op_range_pix] = self.config['nd_fcn'](value, axis=nd_ax)
- elif value.shape[self.config['nd_axis']] == self.freq.size:
- temp = _np.zeros((self.freq.size), dtype=value.dtype)
- if value.ndim == 1:
- temp[self.freq.op_range_pix] = value[self.freq.op_range_pix]
- else:
- print('Input data is {}-dim. Performing {}'.format(value.ndim, self.config['nd_fcn'].__name__))
- nd_ax = self._mean_axes(value.ndim, axis=self.config['nd_axis'])
- temp[self.freq.op_range_pix] = self.config['nd_fcn'](value, axis=nd_ax)[self.freq.op_range_pix]
-
- else:
- raise TypeError('data is of an unrecognized shape: {}'.format(value.shape))
- self._data = 1*temp
- del temp
- else:
- if value.ndim == 1:
- self._data = value
- else:
- print('Input data is {}-dim. Performing {}'.format(value.ndim, self.config['nd_fcn'].__name__))
- nd_ax = self._mean_axes(value.ndim, axis=self.config['nd_axis'])
- self._data = self.config['nd_fcn'](value, axis=nd_ax)
-
-
- @property
- def freq(self):
- return self._freq
-
- @freq.setter
- def freq(self, value):
- if isinstance(value, _Frequency):
- self._freq = value
- elif isinstance(value, _np.ndarray):
- self.freq = _Frequency(data=value)
- else:
- raise TypeError('freq must be of type crikit.data.Frequency')
-
- @property
- def f(self):
- """
- Convenience attribute: return frequency vector within operating (op) \
- range
- """
- return self.freq.op_range_freq
-
- @property
- def f_full(self):
- """
- Convenience attribute: return full frequency vector
- """
- return self.freq.data
-
- @property
- def units(self):
- return self._units
-
- @units.setter
- def units(self, value):
- if isinstance(value, str):
- self._units = value
- else:
- raise TypeError('units must be of type str')
-
- @property
- def label(self):
- return self._label
-
- @label.setter
- def label(self, value):
- if isinstance(value, str):
- self._label = value
- else:
- raise TypeError('label must be of type str')
-
- @property
- def meta(self):
- temp_dict = self._meta.copy()
-
- if self.freq.calib is not None:
- try:
- calib_dict = {}
- calib_prefix = 'Calib.'
-
- calib_dict[calib_prefix + 'a_vec'] = self.freq.calib['a_vec']
- calib_dict[calib_prefix + 'ctr_wl'] = self.freq.calib['ctr_wl']
- calib_dict[calib_prefix + 'ctr_wl0'] = self.freq.calib['ctr_wl0']
- calib_dict[calib_prefix + 'n_pix'] = self.freq.calib['n_pix']
- calib_dict[calib_prefix + 'probe'] = self.freq.calib['probe']
-
- try: # Doesn't really matter if we have the units
- calib_dict[calib_prefix + 'units'] = self.freq.calib['units']
- except:
- pass
-
- except:
- print('Could not get calibration information')
- else:
- temp_dict.update(calib_dict)
-
- if self.freq.calib_orig is not None:
- try:
- calib_dict = {}
- calib_prefix = 'CalibOrig.'
-
- calib_dict[calib_prefix + 'a_vec'] = self.freq.calib_orig['a_vec']
- calib_dict[calib_prefix + 'ctr_wl'] = self.freq.calib_orig['ctr_wl']
- calib_dict[calib_prefix + 'ctr_wl0'] = self.freq.calib_orig['ctr_wl0']
- calib_dict[calib_prefix + 'n_pix'] = self.freq.calib_orig['n_pix']
- calib_dict[calib_prefix + 'probe'] = self.freq.calib_orig['probe']
-
- try: # Doesn't really matter if we have the units
- calib_dict[calib_prefix + 'units'] = self.freq.calib_orig['units']
- except:
- pass
-
- except:
- print('Could not get calibration information')
- else:
- temp_dict.update(calib_dict)
-
- # return self._meta
- return temp_dict
-
- @meta.setter
- def meta(self, value):
- if isinstance(value, dict):
- self._meta = value
- else:
- raise TypeError('meta must be of type dict')
-
- @property
- def f_pix(self):
- if self._data is not None:
- return self._data.shape[-1]
-
- @property
- def ndim(self):
- if self._data is None:
- return None
- elif isinstance(self._data, _np.ndarray):
- return self._data.ndim
- else:
- return len(self._data.shape)
-
- @property
- def shape(self):
- if self._data is None:
- return None
- else:
- return self._data.shape
-
- @property
- def size(self):
- if self._data is None:
- return None
- else:
- return self._data.size
-
- def mean(self, extent=None, over_space=True):
- """
- Return mean spectrum (or mean over extent [list with 2 elements]). If\
- over_space is False, returns reps-number of mean spectra
- """
- if self._data is None:
- return None
-
- ndim = len(self._data.shape)
-
- if ndim == 1:
- if isinstance(self._data, _np.ndarray):
- return self._data.mean()
- else:
- return _np.mean(self._data)
-
- if ndim > 1:
- if over_space == True:
- axes = tuple(_np.arange(ndim-1))
- else:
- axes = -1
-
- if isinstance(self._data, _np.ndarray):
- if extent is None:
- return self._data.mean(axis=axes)
- else:
- return self._data[:,extent[0]:extent[1]+1].mean(axis=axes)
- else:
- if extent is None:
- return _np.mean(self._data, axis=axes)
- else:
- return _np.mean(self._data[:,extent[0]:extent[1]+1],
- axis=axes)
-
- def std(self, extent=None, over_space=True):
- """
- Return standard deviation (std) spectrum (or std over extent
- [list with 2 elements]). If over_space is False, reps (or reps x reps)
- number of std's.
- """
- if self._data is None:
- return None
-
- ndim = len(self._data.shape)
-
- if ndim == 1:
- if isinstance(self._data, _np.ndarray):
- return self._data.std()
- else:
- return _np.std(self._data)
-
- if ndim > 1:
- if over_space == True:
- axes = tuple(_np.arange(ndim-1))
- else:
- axes = -1
-
- if isinstance(self._data, _np.ndarray):
- if extent is None:
- return self._data.std(axis=axes)
- else:
- return self._data[:,extent[0]:extent[1]+1].std(axis=axes)
- else:
- if extent is None:
- return _np.std(self._data, axis=axes)
- else:
- return _np.std(self._data[:,extent[0]:extent[1]+1],
- axis=axes)
-
- def subtract(self, spectrum, overwrite=True):
- """
- Subtract spectrum from data
- """
- if isinstance(spectrum, Spectrum):
- if overwrite:
- self.data -= spectrum.data
- return None
- else:
- return self.data - spectrum.data
- elif isinstance(spectrum, _np.ndarray):
- if overwrite:
- self.data -= spectrum
- return None
- else:
- return self.data - spectrum
-
- def __sub__(self, spectrum):
- return self.subtract(spectrum, overwrite=False)
-
-
-if __name__ == '__main__': # pragma: no cover
- import timeit as _timeit
-
- N = 10001
- wn = _np.linspace(500,3000,N)
- sp = Spectrum(data=_np.random.rand(N) + 1j*_np.random.rand(N), freq=wn)
-
- tmr = _timeit.default_timer()
- sp.data[200:500]
- tmr -= _timeit.default_timer()
- print(-tmr)
diff --git a/crikit/data/tests/test_hsi.py b/crikit/data/tests/test_hsi.py
index 13e9437..446cd64 100644
--- a/crikit/data/tests/test_hsi.py
+++ b/crikit/data/tests/test_hsi.py
@@ -3,12 +3,12 @@
import numpy.testing
import pytest
-import lazy5
+import crikit.io.lazy5 as lazy5
from crikit.io.hdf5 import hdf_import_data
-from crikit.data.spectrum import Spectrum
+from crikit.data.spectra import Spectrum
from crikit.data.spectra import Spectra
-from crikit.data.hsi import Hsi
+from crikit.data.spectra import Hsi
@pytest.fixture(scope="function")
def make_datasets():
diff --git a/crikit/data/tests/test_mosaic.py b/crikit/data/tests/test_mosaic.py
index 639aa44..a02f980 100644
--- a/crikit/data/tests/test_mosaic.py
+++ b/crikit/data/tests/test_mosaic.py
@@ -42,7 +42,7 @@ def test_crop_2D():
assert mos.shape == tuple(n*[new_obj.shape])
assert mos.size == n
assert mos.issamedim
- assert mos.dtype == np.float
+ assert mos.dtype == float
# AFFECTED BY START* END*
assert mos.unitshape == (m_obj_crop, n_obj_crop)
@@ -90,7 +90,7 @@ def test_crop_3D():
assert mos.shape == tuple(n*[new_obj.shape])
assert mos.size == n
assert mos.issamedim
- assert mos.dtype == np.float
+ assert mos.dtype == float
# AFFECTED BY START* END*
assert mos.unitshape == (m_obj_crop, n_obj_crop, p_obj_crop)
@@ -143,7 +143,7 @@ def test_3D_crop_transpose_flips():
assert mos.shape == tuple(n*[new_obj.shape])
assert mos.size == n
assert mos.issamedim
- assert mos.dtype == np.float
+ assert mos.dtype == float
# AFFECTED BY START* END*
assert mos.unitshape == (m_obj_crop, n_obj_crop, p_obj_crop)
@@ -200,7 +200,7 @@ def test_3D_crop_transpose_flips_2():
assert mos.shape == tuple(n*[new_obj.shape])
assert mos.size == n
assert mos.issamedim
- assert mos.dtype == np.float
+ assert mos.dtype == float
# AFFECTED BY START* END*
assert mos.unitshape == (m_obj_crop, n_obj_crop, p_obj_crop)
@@ -242,7 +242,7 @@ def test_2D_uniform_obj():
assert mos.shape == tuple(n*[new_obj.shape])
assert mos.size == n
assert mos.issamedim
- assert mos.dtype == np.float
+ assert mos.dtype == float
assert mos.unitshape == (m_obj, n_obj)
assert mos.unitshape_orig == (m_obj, n_obj)
@@ -278,7 +278,7 @@ def test_3D_uniform_obj():
assert mos.shape == tuple(n*[new_obj.shape])
assert mos.size == n
assert mos.issamedim
- assert mos.dtype == np.float
+ assert mos.dtype == float
with pytest.raises(ValueError):
mos.mosaic2d((m_side, n_side)).shape
diff --git a/crikit/data/tests/test_mosaic_hdf.py b/crikit/data/tests/test_mosaic_hdf.py
index 3f685f8..ee99713 100644
--- a/crikit/data/tests/test_mosaic_hdf.py
+++ b/crikit/data/tests/test_mosaic_hdf.py
@@ -5,7 +5,7 @@
import pytest
import h5py
-import lazy5
+import crikit.io.lazy5 as lazy5
from crikit.data.mosaic import Mosaic
@@ -31,7 +31,7 @@ def hdf_dataset2():
time.sleep(1)
try:
os.remove(filename)
- except:
+ except Exception:
print('Could not delete {}'.format(filename))
def test_hdf2(hdf_dataset2):
@@ -101,13 +101,13 @@ def test_big_to_small_3d_output_given():
time.sleep(1)
try:
os.remove(filename_in)
- except:
+ except Exception:
print('Could not delete {}'.format(filename_in))
time.sleep(1)
try:
os.remove(filename_out)
- except:
+ except Exception:
print('Could not delete {}'.format(filename_out))
def test_big_to_small_3d_output_given_crop():
@@ -172,13 +172,13 @@ def test_big_to_small_3d_output_given_crop():
time.sleep(1)
try:
os.remove(filename_in)
- except:
+ except Exception:
print('Could not delete {}'.format(filename_in))
time.sleep(1)
try:
os.remove(filename_out)
- except:
+ except Exception:
print('Could not delete {}'.format(filename_out))
def test_big_to_small_3d_output_given_crop_transpose_flips():
@@ -244,11 +244,11 @@ def test_big_to_small_3d_output_given_crop_transpose_flips():
time.sleep(1)
try:
os.remove(filename_in)
- except:
+ except Exception:
print('Could not delete {}'.format(filename_in))
time.sleep(1)
try:
os.remove(filename_out)
- except:
+ except Exception:
print('Could not delete {}'.format(filename_out))
diff --git a/crikit/data/tests/test_spectra.py b/crikit/data/tests/test_spectra.py
index 61555d8..6b05ed2 100644
--- a/crikit/data/tests/test_spectra.py
+++ b/crikit/data/tests/test_spectra.py
@@ -3,12 +3,12 @@
import numpy.testing
import pytest
-import lazy5
+import crikit.io.lazy5 as lazy5
from crikit.io.hdf5 import hdf_import_data
-from crikit.data.spectrum import Spectrum
+from crikit.data.spectra import Spectrum
from crikit.data.spectra import Spectra
-from crikit.data.hsi import Hsi
+from crikit.data.spectra import Hsi
@pytest.fixture(scope="function")
def make_datasets():
diff --git a/crikit/data/tests/test_spectrum.py b/crikit/data/tests/test_spectrum.py
index 7c0a68d..4214f16 100644
--- a/crikit/data/tests/test_spectrum.py
+++ b/crikit/data/tests/test_spectrum.py
@@ -3,12 +3,12 @@
import numpy.testing
import pytest
-import lazy5
+import crikit.io.lazy5 as lazy5
from crikit.io.hdf5 import hdf_import_data
-from crikit.data.spectrum import Spectrum
+from crikit.data.spectra import Spectrum
from crikit.data.spectra import Spectra
-from crikit.data.hsi import Hsi
+from crikit.data.spectra import Hsi
@pytest.fixture(scope="function")
def make_datasets():
diff --git a/crikit/data/tests/~test_data_indexing.py b/crikit/data/tests/~test_data_indexing.py
index 77292ea..4e28a84 100644
--- a/crikit/data/tests/~test_data_indexing.py
+++ b/crikit/data/tests/~test_data_indexing.py
@@ -7,9 +7,9 @@
import numpy as np
import numpy.testing as np_testing
-from crikit.data.spectrum import Spectrum
+from crikit.data.spectra import Spectrum
from crikit.data.spectra import Spectra
-from crikit.data.hsi import Hsi
+from crikit.data.spectra import Hsi
from crikit.utils.general import find_nearest
def test_spectrum():
diff --git a/crikit/datasets/model.py b/crikit/datasets/model.py
index a179e03..603587c 100644
--- a/crikit/datasets/model.py
+++ b/crikit/datasets/model.py
@@ -19,7 +19,7 @@ class Model:
_M = 300
_N = 300
- def __init__(self, subsample=1, dtype=_np.complex):
+ def __init__(self, subsample=1, dtype=complex):
self.n_layers = 7 # Number of components
self.img_shape = [300, 300] # Spaital imaging shape
@@ -68,7 +68,7 @@ def __init__(self, subsample=1, dtype=_np.complex):
gd_spec = _get_data('crikit.datasets', '{}{}{}'.format(self.__spec_prefix,
num, '.csv'))
self.spec_list.append(_np.genfromtxt(_BytesIO(gd_spec), delimiter=','))
- except:
+ except Exception:
print('Failed to import model layer and/or spectral information')
else:
print('Model spatial size: {}'.format(self.img_shape))
@@ -115,7 +115,7 @@ def make_spectra(self, f):
self.n_peak_list.append(a_vec.size)
self.spectra[num, :] = _np.sum(a_vec[:,None] / (omega_vec [:,None] - f[None,:] - 1j*gamma_vec[:,None]), axis=0)
- except:
+ except Exception:
print('Failed to make model spectra')
else:
print('Model spectral size: {}'.format(self.f.size))
@@ -136,7 +136,7 @@ def make_hsi(self, f=None):
# self.hsi = _np.zeros(self.img_shape + [self._f.size], dtype=self.dtype)
self.hsi = _np.dot(self.layers, self.spectra)
print('Model HSI shape: {}'.format(self.hsi.shape))
- except:
+ except Exception:
print('Faled to make model HSI')
#%%
diff --git a/crikit/io/csv_nist.py b/crikit/io/csv_nist.py
index 7d152bb..bea8fb7 100644
--- a/crikit/io/csv_nist.py
+++ b/crikit/io/csv_nist.py
@@ -9,9 +9,9 @@
import numpy as _np
import copy as _copy
-from crikit.data.spectrum import Spectrum as _Spectrum
+from crikit.data.spectra import Spectrum as _Spectrum
from crikit.data.spectra import Spectra as _Spectra
-from crikit.data.hsi import Hsi as _Hsi
+from crikit.data.spectra import Hsi as _Hsi
from configparser import ConfigParser as _ConfigParser
#
@@ -35,7 +35,7 @@ def csv_nist_import_data(pth, filename_header, filename_data,
filename_data : str
File name of data
- output_cls_instance : crikit.data.spectrum.Spectrum (or subclass)
+ output_cls_instance : crikit.data.spectra.Spectrum (or subclass)
Spectrum class (or sub) object
Returns
@@ -56,7 +56,7 @@ def csv_nist_import_data(pth, filename_header, filename_data,
try:
with open(pfname_header,'r') as _:
pass
- except:
+ except Exception:
print('Invalid header filename')
else:
valid_import_locs += 1
@@ -64,7 +64,7 @@ def csv_nist_import_data(pth, filename_header, filename_data,
try:
with open(pfname_data,'r') as _:
pass
- except:
+ except Exception:
print('Invalid data filename')
else:
valid_import_locs += 1
@@ -126,10 +126,10 @@ def csv_nist_import_data(pth, filename_header, filename_data,
try: # int
v = int(each_val)
#print('Integer')
- except:
+ except Exception:
try: # float
v = float(each_val)
- except: # string
+ except Exception: # string
v = str.strip(each_val,'"')
#print('{}.{}: {}'.format(each_section,each_key, v))
attr.update({k:v})
@@ -160,7 +160,7 @@ def csv_nist_import_data(pth, filename_header, filename_data,
y_steps = config.getint('Y scan Paramters','Y steps')
y_step_size = config.getfloat('Y scan Paramters','Y step size (um)')
y = _np.linspace(y_start, y_start + y_step_size * (y_steps-1), y_steps)
- except: # In case typo is corrected in the future
+ except Exception: # In case typo is corrected in the future
y_start = config.getfloat('Y scan Parameters','Y start (um)')
y_steps = config.getint('Y scan Parameters','Y steps')
y_step_size = config.getfloat('Y scan Parameters','Y step size (um)')
@@ -175,10 +175,10 @@ def csv_nist_import_data(pth, filename_header, filename_data,
try: # int
v = int(each_val)
#print('Integer')
- except:
+ except Exception:
try: # float
v = float(each_val)
- except: # string
+ except Exception: # string
v = str.strip(each_val,'"')
#print('{}.{}: {}'.format(each_section,each_key, v))
attr.update({k:v})
@@ -231,14 +231,14 @@ def csv_nist_import_data(pth, filename_header, filename_data,
# Figure out fixed positions later
- except:
+ except Exception:
pass
else:
output_cls_instance.data = data
output_cls_instance.meta = attr
return True
- except:
+ except Exception:
print('Something failed in import')
if __name__ == '__main__':
diff --git a/crikit/io/hdf5.py b/crikit/io/hdf5.py
index cde9ed0..b88c547 100644
--- a/crikit/io/hdf5.py
+++ b/crikit/io/hdf5.py
@@ -7,17 +7,120 @@
@author: chc
"""
-import os as _os
+import numpy as np
-import numpy as _np
+from crikit.data.spectra import Spectrum
+from crikit.data.spectra import Spectra
+from crikit.data.spectra import Hsi
-from crikit.data.spectrum import Spectrum as _Spectrum
-from crikit.data.spectra import Spectra as _Spectra
-from crikit.data.hsi import Hsi as _Hsi
+import crikit.io.lazy5 as lazy5
+from crikit.utils.general import find_nearest
+from scipy.interpolate import interp1d
-import lazy5 as _lazy5
-__all__ = ['hdf_import_data']
+__all__ = ['hdf_import_data', 'hdf_import_data_macroraster']
+
+
+def hdf_import_data_macroraster(pth, filename, dset_list, output_cls_instance, config_dict=None):
+ """
+ Import dataset(s) from HDF file with each dset being a single line scan.
+
+ Parameters
+ ----------
+ pth : str
+ Path
+
+ filename : str
+ File name
+
+ dset_list : list
+ List of 1 or more datasets
+
+ output_cls_instance : crikit.data.spectra.Spectrum (or subclass)
+ Spectrum class (or sub) object
+
+ Returns
+ -------
+ Success : bool
+ Success of import
+ Data, Meta : list (ndarray, dict)
+ If output_cls_instance is None and import is successful, will \
+ return the data from dset_list and associated meta data.
+
+ """
+
+ config_dict_template = {'fast_start': 'Macro.Raster.Fast.Start',
+ 'fast_stop': 'Macro.Raster.Fast.Stop',
+ 'fast_steps': 'Macro.Raster.Fast.Steps',
+ 'fast_pos_sampled': 'MicroStage.raster.fast.pos_sample_vec',
+ 'n_imgs_at_sampled': 'MicroStage.raster.fast.n_images_at_pos_samples',
+ 'slow_start': 'Macro.Raster.Slow.Start',
+ 'slow_stop': 'Macro.Raster.Slow.Stop',
+ 'slow_steps': 'Macro.Raster.Slow.Steps',
+ 'slow_current_pos': 'MicroStage.raster.slow.pos',
+ 'n_pix': 'Spectrometer.calib.n_pix'}
+ config = {}
+ config.update(config_dict_template)
+ if config_dict is not None:
+ config.update(config_dict)
+
+ # Join path and filename in an os-independant way
+ pfname = lazy5.utils.fullpath(filename, pth=pth)
+
+ if not lazy5.inspect.valid_file(filename, pth=pth, verbose=True):
+ return False
+ elif not lazy5.inspect.valid_dsets(filename, dset_list, pth=pth, verbose=False):
+ return False
+ else:
+ fof = lazy5.utils.FidOrFile(pfname, mode='r')
+ fid = fof.fid
+
+ assert isinstance(output_cls_instance, Hsi)
+ assert isinstance(dset_list, list)
+
+ all_xs = []
+ all_ys = []
+
+ for num, dname in enumerate(dset_list):
+ # Convert to hardware-oriented dtype (endianess)
+ # dset_dtype_import = fid[dname].dtype.newbyteorder('=')
+ # dset_shp = fid[dname].shape
+
+ curr_slice = fid[dname][:]
+ meta = lazy5.inspect.get_attrs_dset(fid, dname)
+ if num == 0:
+ dset_dtype_import = fid[dname].dtype.newbyteorder('=')
+ dset_shp = (meta[config['slow_steps']], meta[config['fast_steps']],
+ meta[config['n_pix']])
+ output_cls_instance.data = np.zeros(dset_shp, dtype=dset_dtype_import)
+
+ x_vec = np.linspace(meta[config['fast_start']], meta[config['fast_stop']],
+ meta[config['fast_steps']])
+ y_vec = np.linspace(meta[config['slow_start']], meta[config['slow_stop']],
+ meta[config['slow_steps']])
+
+ curr_y_pos = meta[config['slow_current_pos']]
+
+ curr_x_vec = meta[config['fast_pos_sampled']]
+ curr_n_imgs_vec = meta[config['n_imgs_at_sampled']]
+
+ all_xs.extend(curr_x_vec.tolist())
+ all_ys.extend([curr_y_pos])
+
+ intfcn = interp1d(curr_n_imgs_vec, curr_x_vec, kind='linear')
+
+ int_fcn_intensity = interp1d(intfcn(np.arange(curr_slice.shape[0])),
+ curr_slice, axis=0, bounds_error=False, kind='linear', fill_value='extrapolate')
+
+ y_idx = find_nearest(y_vec, curr_y_pos)[1]
+ output_cls_instance.data[y_idx, ...] = int_fcn_intensity(x_vec)
+
+ # extent = [x_vec.min(), x_vec.max(), y_vec.min(), y_vec.max()]
+
+ fid.close()
+ output_cls_instance.meta = meta
+ return True
+
def hdf_import_data(pth, filename, dset_list, output_cls_instance=None):
"""
@@ -34,7 +137,7 @@ def hdf_import_data(pth, filename, dset_list, output_cls_instance=None):
dset_list : list
List of 1 or more datasets
- output_cls_instance : crikit.data.spectrum.Spectrum (or subclass)
+ output_cls_instance : crikit.data.spectra.Spectrum (or subclass)
Spectrum class (or sub) object
Returns
@@ -47,27 +150,27 @@ def hdf_import_data(pth, filename, dset_list, output_cls_instance=None):
"""
# Join path and filename in an os-independant way
- pfname = _lazy5.utils.fullpath(filename, pth=pth)
+ pfname = lazy5.utils.fullpath(filename, pth=pth)
- if not _lazy5.inspect.valid_file(filename, pth=pth, verbose=True):
+ if not lazy5.inspect.valid_file(filename, pth=pth, verbose=True):
return False
- elif not _lazy5.inspect.valid_dsets(filename, dset_list, pth=pth, verbose=True):
+ elif not lazy5.inspect.valid_dsets(filename, dset_list, pth=pth, verbose=True):
return False
else:
- fof = _lazy5.utils.FidOrFile(pfname, mode='r')
+ fof = lazy5.utils.FidOrFile(pfname, mode='r')
fid = fof.fid
- if type(output_cls_instance) == _Hsi:
+ if type(output_cls_instance) == Hsi:
print('Type Hsi')
if isinstance(dset_list, str):
# Convert to hardware-oriented dtype (endianess)
dset_dtype_import = fid[dset_list].dtype.newbyteorder('=')
dset_shp = fid[dset_list].shape
- output_cls_instance.data = _np.zeros(dset_shp, dtype=dset_dtype_import)
+ output_cls_instance.data = np.zeros(dset_shp, dtype=dset_dtype_import)
fid[dset_list].read_direct(output_cls_instance.data)
# output_cls_instance.data = fid[dset_list].value
- output_cls_instance.meta = _lazy5.inspect.get_attrs_dset(fid, dset_list)
+ output_cls_instance.meta = lazy5.inspect.get_attrs_dset(fid, dset_list)
elif isinstance(dset_list, list):
if len(dset_list) > 1:
print('Cannot accept more than 1 HSI image at this time')
@@ -77,30 +180,28 @@ def hdf_import_data(pth, filename, dset_list, output_cls_instance=None):
dset_dtype_import = fid[dname].dtype.newbyteorder('=')
dset_shp = fid[dname].shape
if num == 0:
- output_cls_instance.data = _np.zeros(dset_shp,
- dtype=dset_dtype_import)
+ output_cls_instance.data = np.zeros(dset_shp, dtype=dset_dtype_import)
fid[dname].read_direct(output_cls_instance.data)
# output_cls_instance.data = fid[dname][:]
- output_cls_instance.meta = _lazy5.inspect.get_attrs_dset(fid, dname)
+ output_cls_instance.meta = lazy5.inspect.get_attrs_dset(fid, dname)
else:
- output_cls_instance.data = _np.vstack((output_cls_instance.data,
- fid[dname][:].astype(dset_dtype_import)))
+ output_cls_instance.data = np.vstack((output_cls_instance.data, fid[dname][:].astype(dset_dtype_import)))
ret = True
- elif type(output_cls_instance) == _Spectra:
+ elif type(output_cls_instance) == Spectra:
print('Type Spectra')
if isinstance(dset_list, str):
# Convert to hardware-oriented dtype (endianess)
dset_dtype_import = fid[dset_list].dtype.newbyteorder('=')
if fid[dset_list].ndim == 2: # Compatible dimensions-- use read-direct
dset_shp = fid[dset_list].shape
- output_cls_instance.data = _np.zeros(dset_shp, dtype=dset_dtype_import)
+ output_cls_instance.data = np.zeros(dset_shp, dtype=dset_dtype_import)
fid[dset_list].read_direct(output_cls_instance.data)
else:
output_cls_instance.data = fid[dset_list].value.astype(dset_dtype_import)
# output_cls_instance.data = fid[dset_list].value
- output_cls_instance.meta = _lazy5.inspect.get_attrs_dset(fid, dset_list)
+ output_cls_instance.meta = lazy5.inspect.get_attrs_dset(fid, dset_list)
elif isinstance(dset_list, list):
for num, dname in enumerate(dset_list):
@@ -108,22 +209,21 @@ def hdf_import_data(pth, filename, dset_list, output_cls_instance=None):
dset_dtype_import = fid[dname].dtype.newbyteorder('=')
if num == 0:
output_cls_instance.data = fid[dname][:].astype(dset_dtype_import)
- output_cls_instance.meta = _lazy5.inspect.get_attrs_dset(fid, dname)
+ output_cls_instance.meta = lazy5.inspect.get_attrs_dset(fid, dname)
else:
- output_cls_instance.data = _np.vstack((output_cls_instance.data,
- fid[dname][:].astype(dset_dtype_import)))
+ output_cls_instance.data = np.vstack((output_cls_instance.data, fid[dname][:].astype(dset_dtype_import)))
ret = True
- elif type(output_cls_instance) == _Spectrum:
+ elif type(output_cls_instance) == Spectrum:
print('Type Spectrum')
if isinstance(dset_list, str):
# Convert to hardware-oriented dtype (endianess)
dset_dtype_import = fid[dset_list].dtype.newbyteorder('=')
dset_shp = fid[dset_list].shape
- output_cls_instance.data = _np.zeros(dset_shp, dtype=dset_dtype_import)
+ output_cls_instance.data = np.zeros(dset_shp, dtype=dset_dtype_import)
fid[dset_list].read_direct(output_cls_instance.data)
# output_cls_instance.data = fid[dset_list].value
- output_cls_instance.meta = _lazy5.inspect.get_attrs_dset(fid, dset_list)
+ output_cls_instance.meta = lazy5.inspect.get_attrs_dset(fid, dset_list)
elif isinstance(dset_list, list):
if len > 1:
print('Will average spectra into a single spectrum')
@@ -133,46 +233,44 @@ def hdf_import_data(pth, filename, dset_list, output_cls_instance=None):
dset_dtype_import = fid[dname].dtype.newbyteorder('=')
dset_shp = fid[dname].shape
if num == 0:
- output_cls_instance.data = _np.zeros(dset_shp,
- dtype=dset_dtype_import)
+ output_cls_instance.data = np.zeros(dset_shp, dtype=dset_dtype_import)
fid[dname].read_direct(output_cls_instance.data)
# output_cls_instance.data = fid[dname][:]
- output_cls_instance.meta = _lazy5.inspect.get_attrs_dset(fid, dname)
+ output_cls_instance.meta = lazy5.inspect.get_attrs_dset(fid, dname)
else:
output_cls_instance.data += fid[dname][:].astype(dset_dtype_import)
- output_cls_instance.data /= num+1
+ output_cls_instance.data /= num + 1
ret = True
elif output_cls_instance is None:
if isinstance(dset_list, str):
# Convert to hardware-oriented dtype (endianess)
dset_dtype_import = fid[dset_list].dtype.newbyteorder('=')
dset_shp = fid[dset_list].shape
- data = _np.zeros(dset_shp, dtype=dset_dtype_import)
+ data = np.zeros(dset_shp, dtype=dset_dtype_import)
fid[dset_list].read_direct(data)
# data = fid[dset_list].value
- meta = _lazy5.inspect.get_attrs_dset(fid, dset_list)
+ meta = lazy5.inspect.get_attrs_dset(fid, dset_list)
elif isinstance(dset_list, list):
for num, dname in enumerate(dset_list):
# Convert to hardware-oriented dtype (endianess)
dset_dtype_import = fid[dname].dtype.newbyteorder('=')
dset_shp = fid[dname].shape
if num == 0:
- data = _np.zeros(dset_shp,
- dtype=dset_dtype_import)
+ data = np.zeros(dset_shp, dtype=dset_dtype_import)
fid[dname].read_direct(data)
# data = fid[dname][:]
- meta = _lazy5.inspect.get_attrs_dset(fid, dname)
+ meta = lazy5.inspect.get_attrs_dset(fid, dname)
else:
- data = _np.vstack((data, fid[dname][:].astype(dset_dtype_import)))
+ data = np.vstack((data, fid[dname][:].astype(dset_dtype_import)))
ret = [data, meta]
else:
raise TypeError('output_cls must be Spectrum, Spectra, or Hsi')
-
fid.close()
return ret
+
if __name__ == '__main__': # pragma: no cover
from crikit.io.meta_configs import (special_nist_bcars2 as _snb)
@@ -181,35 +279,34 @@ def hdf_import_data(pth, filename, dset_list, output_cls_instance=None):
pth = '../'
filename = 'mP2_w_small.h5'
-
dset = '/Spectra/Dark_3_5ms_2'
- tester = _lazy5.inspect.valid_dsets(pth=pth, file='fake.h5', dset_list='fake')
+ tester = lazy5.inspect.valid_dsets(pth=pth, file='fake.h5', dset_list='fake')
assert not tester
- tester = _lazy5.inspect.valid_dsets(pth=pth, file='fake.h5', dset_list='fake_dset')
+ tester = lazy5.inspect.valid_dsets(pth=pth, file='fake.h5', dset_list='fake_dset')
assert not tester
- tester = _lazy5.inspect.valid_dsets(pth=pth, file='fake.h5',
- dset_list=['fake_dset1', 'fake_dset2'])
+ tester = lazy5.inspect.valid_dsets(pth=pth, file='fake.h5',
+ dset_list=['fake_dset1', 'fake_dset2'])
assert not tester
print('Path: {}'.format(pth))
- tester = _lazy5.inspect.valid_dsets(pth=pth, file=filename, dset_list=dset, verbose=True)
+ tester = lazy5.inspect.valid_dsets(pth=pth, file=filename, dset_list=dset, verbose=True)
assert tester
print('--------------\n\n')
- spect_dark = _Spectra()
- tester = _lazy5.inspect.valid_dsets(pth=pth, file=filename,
- dset_list=['/Spectra/Dark_3_5ms_2'])
+ spect_dark = Spectra()
+ tester = lazy5.inspect.valid_dsets(pth=pth, file=filename,
+ dset_list=['/Spectra/Dark_3_5ms_2'])
hdf_import_data(pth, filename, '/Spectra/Dark_3_5ms_2', spect_dark)
- #hdf_process_attr(rosetta, spect_dark)
+ # hdf_process_attr(rosetta, spect_dark)
print('Shape of dark spectra: {}'.format(spect_dark.shape))
print('Shape of dark spectra.mean(): {}'.format(spect_dark.mean().shape))
print('Dtype of dark spectra: {}'.format(spect_dark._data.dtype))
print('')
- img = _Hsi()
+ img = Hsi()
hdf_import_data(pth, filename, '/BCARSImage/mP2_3_5ms_Pos_2_0/mP2_3_5ms_Pos_2_0_small', img)
print('Shape of img: {}'.format(img.shape))
print('Shape of img.mean(): {}'.format(img.mean().shape))
@@ -221,9 +318,9 @@ def hdf_import_data(pth, filename, dset_list, output_cls_instance=None):
pth = 'C:/Users/chc/Documents/Data/2018/OliverJonas/180629/'
filename = 'L1d1_pos0.h5'
dsetname = '/BCARSImage/L1d1_pos0_0/NRB_Post_0'
- spect_nrb = _Spectra()
- tester = _lazy5.inspect.valid_dsets(pth=pth, file=filename,
- dset_list=[dsetname])
+ spect_nrb = Spectra()
+ tester = lazy5.inspect.valid_dsets(pth=pth, file=filename,
+ dset_list=[dsetname])
out = hdf_import_data(pth, filename, dsetname, spect_nrb)
print('HDF_import_data returned: {}'.format(out))
# hdf_process_attr(rosetta, spect_nrb)
@@ -232,7 +329,7 @@ def hdf_import_data(pth, filename, dset_list, output_cls_instance=None):
# print('Shape of dark spectra.mean(): {}'.format(spect_dark.mean().shape))
# print('Dtype of dark spectra: {}'.format(spect_dark._data.dtype))
# print('')
- # img = _Hsi()
+ # img = Hsi()
# hdf_import_data(pth, filename, '/BCARSImage/mP2_3_5ms_Pos_2_0/mP2_3_5ms_Pos_2_0_small', img)
# print('Shape of img: {}'.format(img.shape))
# print('Shape of img.mean(): {}'.format(img.mean().shape))
diff --git a/crikit/io/lazy5/__init__.py b/crikit/io/lazy5/__init__.py
new file mode 100644
index 0000000..55fe5fe
--- /dev/null
+++ b/crikit/io/lazy5/__init__.py
@@ -0,0 +1,13 @@
+"""Macros for h5py... because I'm lazy"""
+
+# ! Inside try-except so setup can still grab the __version__ prior to install
+try:
+ from . import config
+ from . import utils
+ from . import inspect
+ from . import alter
+ from . import create
+except Exception as e:
+ print(e)
+
+__version__ = '0.3.0'
\ No newline at end of file
diff --git a/crikit/io/lazy5/alter.py b/crikit/io/lazy5/alter.py
new file mode 100644
index 0000000..ca01635
--- /dev/null
+++ b/crikit/io/lazy5/alter.py
@@ -0,0 +1,159 @@
+""" Macros for inspection of HDF5 files """
+import h5py as _h5py
+
+from .utils import (FidOrFile as _FidOrFile, fullpath as _fullpath)
+from .nonh5utils import (check_type_compat as _check_type_compat)
+
+from .config import DefaultConfig
+_h5py.get_config().complex_names = DefaultConfig().complex_names
+
+def alter_attr(dset, attr_key, attr_val, file=None, pth=None, verbose=False,
+ check_same_type=False, must_exist=False):
+ """
+ Alter attribute dset['attr_key'] with attr_val.
+
+ Parameters
+ ----------
+ dset : str or h5py.Dataset
+ String to or Dataset-object for dataset in HDF5 file. If string,
+ file/fid must be provided.
+
+ attr_key : str
+ Attribute name (key)
+
+ attr_val : str
+ Attribute value to write (replace old)
+
+ file : str or h5py.File
+ Filename or File-object for open HDF5 file
+
+ pth : str
+ Path
+
+ verbose : bool
+ Verbose output to stdout
+
+ check_same_type : bool
+ Check that the inputs are compatible types as defined in
+ lazy5.nonh5utils.check_type_compat or lazy5.utils.return_family_type
+
+ must_exist : bool
+ The attribute must already exist.
+
+ Notes
+ -----
+ None
+ """
+
+ if file is not None:
+ fp = _fullpath(file, pth)
+ # Get fid for a file (str or open fid)
+ fof = _FidOrFile(fp, mode='r+') # Read/write, file must exist
+ fid = fof.fid
+ if isinstance(dset, str):
+ dset_object = fid[dset]
+ elif isinstance(dset, _h5py.Dataset):
+ if isinstance(file, str):
+ raise TypeError('Cannot provide h5py.Dataset dset and a filename str.')
+ dset_object = dset
+ else:
+ raise TypeError('dset unknown')
+ else:
+ fof = None
+ if isinstance(dset, _h5py.Dataset):
+ dset_object = dset
+ else:
+ raise TypeError('With no file or fid given, dset must be an h5py.Dataset object')
+
+ if must_exist:
+ if dset_object.attrs.get(attr_key) is None:
+ err_str1 = 'Attribute {} does not exist and '.format(attr_key)
+ raise KeyError(err_str1 + 'must_exist set to True')
+
+ if check_same_type & (dset_object.attrs.get(attr_key) is not None):
+ if not _check_type_compat(dset_object.attrs[attr_key], attr_val):
+ err_str1 = 'New attribute value type ({}) '.format(type(attr_val))
+ err_str2 = 'must be of the same type as the original '
+ err_str3 = '({})'.format(type(dset_object.attrs[attr_key]))
+ raise TypeError(err_str1 + err_str2 + err_str3)
+
+ if verbose:
+ if dset_object.attrs.get(attr_key) is None:
+ print('Attribute {} does not exist. Creating.'.format(attr_key))
+ else:
+ print('Dataset[{}] = {} -> {}'.format(attr_key, dset_object.attrs[attr_key],
+ attr_val))
+
+ dset_object.attrs[attr_key] = attr_val
+
+ if fof is not None:
+ fof.close_if_file_not_fid()
+
+def alter_attr_same(dset, attr_key, attr_val, file=None, pth=None, verbose=True,
+ must_exist=False):
+ """
+ Alter attribute dset['attr_key] with attr_val checkint to make sure that
+ the original and new attribute values are of similar type e.g., int and
+ np.int32.
+
+ Parameters
+ ----------
+ dset : str or h5py.Dataset
+ String to or Dataset-object for dataset in HDF5 file. If string,
+ file/fid must be provided.
+
+ attr_key : str
+ Attribute name (key)
+
+ attr_val : str
+ Attribute value to write (replace old)
+
+ file : str or h5py.File
+ Filename or File-object for open HDF5 file
+
+ pth : str
+ Path
+
+ verbose : bool
+ Verbose output to stdout
+
+ Notes
+ -----
+ None
+ """
+ return alter_attr(dset, attr_key, attr_val, file, pth, verbose,
+ check_same_type=True, must_exist=must_exist)
+
+def write_attr_dict(dset, attr_dict, fid=None, sort_attrs=False, verbose=False):
+ """
+ Write entire dictionary of attrbutes to dataset.
+
+ Parameters
+ ----------
+ dset : str or h5py.Dataset
+ String to or Dataset-object for dataset in HDF5 file. If string,
+ fid must be provided.
+
+ attr_dict : dict
+ Attribute dictionary
+
+ fid : h5py.File
+ If dset is a string, file-object for open HDF5 file must be provided.
+
+ sort_attrs : bool
+ Sort attribute keys alphabetically prior to writing
+
+ verbose : bool
+ Verbose output to stdout
+ """
+
+ attr_key_list = list(attr_dict)
+ if sort_attrs:
+ attr_key_list.sort()
+
+ for attr_key in attr_key_list:
+ attr_val = attr_dict[attr_key]
+ alter_attr(dset, attr_key, attr_val, file=fid, verbose=verbose,
+ check_same_type=False, must_exist=False)
+
+ return True
\ No newline at end of file
diff --git a/crikit/io/lazy5/config.py b/crikit/io/lazy5/config.py
new file mode 100644
index 0000000..fe57c46
--- /dev/null
+++ b/crikit/io/lazy5/config.py
@@ -0,0 +1,5 @@
+""" Default configuration class """
+
+class DefaultConfig:
+ def __init__(self):
+ self.complex_names = ('Re', 'Im')
\ No newline at end of file
diff --git a/crikit/io/lazy5/create.py b/crikit/io/lazy5/create.py
new file mode 100644
index 0000000..c545b87
--- /dev/null
+++ b/crikit/io/lazy5/create.py
@@ -0,0 +1,81 @@
+""" Macros for creation of HDF5 files and/or datasets"""
+import h5py as _h5py
+
+from .config import DefaultConfig
+from .utils import (FidOrFile as _FidOrFile, fullpath as _fullpath)
+from .inspect import (valid_dsets as _valid_dsets)
+from .alter import (write_attr_dict as _write_attr_dict)
+
+_h5py.get_config().complex_names = DefaultConfig().complex_names
+
+def save(file, dset, data, pth=None, attr_dict=None, mode='a',
+ dset_overwrite=False, sort_attrs=False,
+ chunks=True, verbose=False):
+ """
+ Save an HDF5 file
+
+ Parameters
+ ----------
+
+ file : str or h5py.File object (fid)
+ Filename
+
+ dset : str
+ Dataset name (including groups if any)
+
+ data : ndarray
+ Data to write
+
+ pth : str
+ Path to file. Otherwise, will use present working directory (PWD)
+
+ attr_dict : dict
+ Attribute dictionary. Will be Ordered.
+
+ mode : str
+ h5py file mode.
+
+ dset_overwrite : bool
+ If a dset already exists, overwrite or raise error?
+
+ sort_attrs : bool
+ Sort the attribute dictionary (alphabetically) prior to saving
+
+ chunks : str or tuple or list
+ Chunking shape or True for auto-chunking
+
+ verbose : bool
+ Verbose output
+
+ Returns
+ -------
+
+ bool : Saved with no errors
+
+ """
+
+ if isinstance(file, str):
+ fp = _fullpath(file, pth)
+ fof = _FidOrFile(fp, mode=mode)
+ elif isinstance(file, _h5py.File):
+ fof = _FidOrFile(file, mode=mode)
+ else:
+ raise TypeError('file needs to be a str or h5py.File object.')
+
+ fid = fof.fid
+
+ if not dset_overwrite:
+ if _valid_dsets(fid, dset, pth=pth, verbose=False):
+ err_str1 = 'Dataset {} exists. '.format(dset)
+ err_str2 = 'Param dset_overwrite=False. Will not overwrite'
+ raise IOError(err_str1 + err_str2)
+
+ dset_id = fid.require_dataset(name=dset, data=data, shape=data.shape,
+ dtype=data.dtype, chunks=chunks)
+
+ if attr_dict:
+ _write_attr_dict(dset_id, attr_dict, sort_attrs=sort_attrs)
+
+ fof.close_if_file_not_fid()
+
+ return True
diff --git a/crikit/io/lazy5/inspect.py b/crikit/io/lazy5/inspect.py
new file mode 100644
index 0000000..7dbf496
--- /dev/null
+++ b/crikit/io/lazy5/inspect.py
@@ -0,0 +1,288 @@
+""" Macros for inspection of HDF5 files """
+import os as _os
+from collections import OrderedDict as _OrderedDict
+
+import h5py as _h5py
+import numpy as _np
+
+from .utils import (FidOrFile as _FidOrFile, hdf_is_open as _hdf_is_open,
+ fullpath as _fullpath)
+
+from .config import DefaultConfig
+_h5py.get_config().complex_names = DefaultConfig().complex_names
+
+__all__ = ['get_groups', 'get_datasets', 'get_hierarchy',
+ 'get_attrs_dset', 'valid_dsets', 'valid_file']
+
+def get_groups(file, pth=None):
+ """
+ Parameters
+ ----------
+
+ file : str or h5py.File
+ Filename or File-object for open HDF5 file
+
+ Notes
+ -----
+ Gets groups in a hierarchical list starting from the base '/'. Thus if
+ Group2 is INSIDE Group1, it will return Group1, Group1/Group2 -- NOT Group2
+ inidividually.
+ """
+
+ fp = _fullpath(file, pth)
+ # Get fid for a file (str or open fid)
+ fof = _FidOrFile(fp)
+ fid = fof.fid
+
+ all_items_list = []
+ fid.visit(lambda x: all_items_list.append('/{}'.format(x)))
+
+ # list-set-list removes duplicates
+ grp_list = list(set([item for item in all_items_list if isinstance(fid[item], _h5py.Group)]))
+
+ grp_list.append('/') # Add in base level group
+ grp_list.sort()
+
+ fof.close_if_file_not_fid()
+
+ return grp_list
+
+def get_datasets(file, pth=None, fulldsetpath=True):
+ """
+ Parameters
+ ----------
+
+ file : str or _h5py.File
+ Filename or File-object for open HDF5 file
+
+ fulldsetpath : bool
+ Return just the dataset names with group names or not.
+ """
+
+ if isinstance(file, str):
+ fp = _fullpath(file, pth)
+ fof = _FidOrFile(fp)
+
+ else:
+ fof = _FidOrFile(file)
+
+ fid = fof.fid
+
+ all_items_list = []
+ fid.visit(lambda x: all_items_list.append('/{}'.format(x)))
+ dset_list = []
+
+ # list-set-list removes duplicates
+ dset_list = list(set([item for item in all_items_list if isinstance(fid[item], _h5py.Dataset)]))
+ dset_list.sort()
+
+ if not fulldsetpath:
+ for num, dset in enumerate(dset_list):
+ split_out = dset.rsplit('/', maxsplit=1)
+ if len(split_out) == 1:
+ pass
+ else:
+ dset_list[num] = split_out[-1]
+
+ fof.close_if_file_not_fid()
+
+ return dset_list
+
+def get_hierarchy(file, pth=None, fulldsetpath=False, grp_w_dset=False):
+ """
+ Return an ordered dictionary, where the keys are groups and the items are
+ the datasets
+
+ Parameters
+ ----------
+
+ file : str or h5py.File
+ Filename or File-object for open HDF5 file
+
+ fulldsetpath : bool
+ If True, a dataset name will be prepended with the group down to the
+ base level, '/'. If False, it will just be the dset name.
+
+ grp_w_dset : bool
+ If True, only return groups that contain datasets. If False, include
+ empty groups
+
+ Returns
+ -------
+ OrderedDict : (group, [dataset list])
+ Group and dataset names
+
+ """
+ fp = _fullpath(file, pth)
+
+ # Get fid for a file (str or open fid)
+ fof = _FidOrFile(fp)
+ fid = fof.fid
+
+ grp_list = get_groups(fid)
+ dset_list = get_datasets(fid, fulldsetpath=True)
+
+ grp_dict = _OrderedDict([[grp, []] for grp in grp_list])
+
+ for dset in dset_list:
+ split_out = dset.rsplit('/', maxsplit=1)
+ if (len(split_out) == 1) or (split_out[0] == ''):
+ if dset[0] == '/':
+ grp_dict['/'].append(dset[1:])
+ else:
+ grp_dict['/'].append(dset)
+ else:
+ if fulldsetpath:
+ grp_dict[split_out[0]].append(dset)
+ else:
+ grp_dict[split_out[0]].append(split_out[1])
+
+ # Only keep groups with datasets
+ if grp_w_dset:
+ to_pop = []
+ for k in grp_dict:
+ if not grp_dict[k]: # is empty
+ to_pop.append(k)
+
+ for empty_grp in to_pop:
+ grp_dict.pop(empty_grp)
+
+ fof.close_if_file_not_fid()
+
+ return grp_dict
+
+def get_attrs_dset(file, dset, pth=None, convert_to_str=True, convert_sgl_np_to_num=False):
+ """
+ Get dictionary of attribute values for a given dataset
+
+ Parameters
+ ----------
+
+ file : str or h5py.File
+ Filename or File-object for open HDF5 file
+
+ dset : str
+ Full dataset name with preprended group names. E.g., '/Group1/Dataset'
+
+ convert_to_str : bool
+ If an attribute is a numpy.bytes_ string-like object, but not a str, try
+ to decode into utf-8.
+
+ convert_sgl_np_to_num : bool
+ If an attribute is a numpy array with a single entry, convert to non-numpy
+ numeric type. E.g. np.array([1.0]) -> 1.0
+
+ Returns
+ -------
+ OrderedDict : (key, value)
+
+ """
+ fp = _fullpath(file, pth)
+
+ # Get fid for a file (str or open fid)
+ fof = _FidOrFile(fp)
+ fid = fof.fid
+
+ ds_attrs = fid[dset].attrs
+
+ attr_keys_list = list(ds_attrs)
+ attr_keys_list.sort()
+
+ attr_list = []
+ for k in attr_keys_list:
+ try:
+ attr_val = ds_attrs[k]
+ except (TypeError, ValueError):
+ print('Could not get value for attribute: {}. Set to None'.format(k))
+ attr_list.append([k, None])
+ else:
+ if isinstance(attr_val, _np.ndarray):
+ if (isinstance(attr_val, _np.bytes_) | (attr_val.dtype.type == _np.bytes_)) & convert_to_str: # pylint: disable=no-member
+ # * tostring() added in \x00 to end of string; thus, used list comprehension
+ np_byte_to_str = [q for q in attr_val][0].decode()
+ attr_list.append([k, np_byte_to_str])
+ elif (_np.issubdtype(attr_val.dtype, _np.number) & (attr_val.size == 1)) & convert_sgl_np_to_num:
+ attr_list.append([k, attr_val.item()])
+ else:
+ attr_list.append([k, attr_val])
+ elif isinstance(attr_val, bytes) & convert_to_str:
+ attr_list.append([k, attr_val.decode()])
+ else:
+ attr_list.append([k, attr_val])
+
+ attr_dict = _OrderedDict(attr_list)
+
+ fof.close_if_file_not_fid()
+
+ return attr_dict
+
+def valid_file(file, pth=None, verbose=False):
+ """ Validate whether a file exists (or if a fid, is-open """
+
+ if isinstance(file, str):
+ fp = _fullpath(file, pth)
+ isvalid = _os.path.isfile(fp)
+
+ if verbose:
+ if isvalid:
+ print('{} is a valid file.'.format(fp))
+ else:
+ print('{} is a not valid file.'.format(fp))
+
+ elif isinstance(file, _h5py.File):
+ isvalid = _hdf_is_open(file)
+ else:
+ raise TypeError('file need be of type str or h5py.File object.')
+
+ return isvalid
+
+def valid_dsets(file, dset_list, pth=None, verbose=False):
+ """ Check whether 1 or more datasets are valid """
+
+ def _add_leading_slash(str_to_check):
+ """ Return string sans leading '/' if there is one """
+ if str_to_check[0] == '/':
+ return str_to_check
+ else:
+ return '/' + str_to_check
+
+ file_is_valid = valid_file(file, pth=pth, verbose=verbose)
+
+ if not file_is_valid:
+ return False
+
+ dset_in_file = get_datasets(file, pth=pth, fulldsetpath=True)
+
+ if isinstance(dset_list, (list, tuple)):
+ hits = 0
+ for dset in dset_list:
+ dset_to_test = _add_leading_slash(dset)
+ if dset_in_file.count(dset_to_test) > 0:
+ hits += 1
+ if verbose:
+ print('{} : VALID'.format(dset_to_test))
+ else:
+ if verbose:
+ print('{} : NOT VALID'.format(dset_to_test))
+ if hits == len(dset_list):
+ if verbose:
+ print('All datasets are valid')
+ return True
+ else:
+ if verbose:
+ print('Some or all datasets are NOT valid')
+ return False
+ elif isinstance(dset_list, str):
+ if dset_in_file.count(_add_leading_slash(dset_list)) > 0:
+ if verbose:
+ print('{} : VALID'.format(dset_list))
+ return True
+ else:
+ if verbose:
+ print('{} : NOT VALID'.format(dset_list))
+ return False
+ else:
+ err_str1 = 'dset_list: {} of type {} '.format(dset_list, type(dset_list))
+ err_str2 = 'is not a str, list, or tuple'
+ raise TypeError(err_str1 + err_str2)
+
diff --git a/crikit/io/lazy5/nonh5utils.py b/crikit/io/lazy5/nonh5utils.py
new file mode 100644
index 0000000..4ee2c98
--- /dev/null
+++ b/crikit/io/lazy5/nonh5utils.py
@@ -0,0 +1,97 @@
+""" Non-HDF5 utility functions """
+import copy as _copy
+from collections import OrderedDict as _OrderedDict
+
+import numpy as _np
+
+__all__ = ['filterlist', 'check_type_compat', 'return_family_type']
+
+def filterlist(in_list, filters, keep_filtered_items=True, exclusive=True):
+ """
+ Parameters
+ ----------
+ in_list : list
+ List of strings to filter
+
+ filters : str, list, tuple
+ Find filters (or entries of filters) in in_list
+
+ keep_filtered_items : bool
+ Returns entries from in_list that DO have filters (INCLUDE filter).
+ If False, EXCLUDE filter
+
+ exclusive : bool
+ Filter is exclusive, i.e. includes/excludes in_list entries that
+ have ALL filters. Otherwise, non-exclusive and any entry with A
+ filter are excluded/included.
+
+ Returns
+ -------
+ list : filtered list
+
+ """
+ if isinstance(filters, (tuple, list)):
+ filter_list = filters
+ elif isinstance(filters, str):
+ filter_list = [filters]
+ else:
+ raise TypeError('filters must be of type str, tuple, or list')
+
+ def condition(keep_it, item):
+ """ Keep or don't keep item depending on keep_it bool """
+ if keep_it:
+ return item
+ else:
+ return not item
+
+ if exclusive:
+ out_list = _copy.deepcopy(in_list)
+ for current_filt in filter_list:
+ out_list = [entry for entry in out_list if condition(keep_filtered_items,
+ entry.count(current_filt))]
+ else:
+ out_list = []
+ for current_filt in filter_list:
+ out_list.extend([entry for entry in in_list if condition(keep_filtered_items,
+ entry.count(current_filt))])
+ # Removes duplicates
+ out_list = list(_OrderedDict.fromkeys(out_list))
+
+ return out_list
+
+def check_type_compat(input_a, input_b):
+ """
+ Check the compatibility of types. E.g. np.float32 IS compatible with
+ float
+ """
+ return return_family_type(input_a) is return_family_type(input_b)
+
+def return_family_type(input_a):
+ """ Return family of type input_a. int, float, complex, str, bytes, bool """
+ a_type = None
+
+ # Have to do numpy first, bc np.str_ is subtype of str also
+ if isinstance(input_a, _np.generic): # Is input_a numpy-type
+ if isinstance(input_a, _np.bool_):
+ a_type = bool
+ elif isinstance(input_a, _np.bytes_): # pylint: disable=E1101
+ a_type = bytes
+ elif isinstance(input_a, _np.str_): # pylint: disable=E1101
+ a_type = str
+ elif isinstance(input_a, _np.integer):
+ a_type = int
+ elif isinstance(input_a, _np.floating): # pylint: disable=E1101
+ a_type = float
+ elif isinstance(input_a, _np.complexfloating): # pylint: disable=E1101
+ a_type = complex
+ elif isinstance(input_a, _np.ndarray):
+ # Cute trick: Send 1 as type from the dtype for testing
+ a_type = return_family_type(input_a.dtype.type(1))
+ elif isinstance(input_a, (int, float, complex, str, bytes, bool)):
+ a_type = type(input_a)
+
+ if a_type is None:
+ err_str1 = 'input_a is not int, float, str, or bool; '
+ raise TypeError(err_str1 + 'or a numpy-equivalent: {}'.format(type(input_a)))
+
+ return a_type
diff --git a/crikit/io/lazy5/tests/__init__.py b/crikit/io/lazy5/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/crikit/io/lazy5/tests/test_alter.py b/crikit/io/lazy5/tests/test_alter.py
new file mode 100644
index 0000000..e56004f
--- /dev/null
+++ b/crikit/io/lazy5/tests/test_alter.py
@@ -0,0 +1,347 @@
+""" Test inspection of HDF5 files """
+import os
+import time
+
+from collections import OrderedDict as _OrderedDict
+
+import h5py
+import pytest
+
+import numpy as np
+
+from crikit.io.lazy5.utils import hdf_is_open
+from crikit.io.lazy5.alter import alter_attr_same, alter_attr, write_attr_dict
+
+@pytest.fixture(scope="function")
+def hdf_dataset():
+ """ Setups and tears down a sample HDF5 file """
+ filename = 'temp_test.h5'
+ fid = h5py.File(filename, 'w')
+ data_m, data_n, data_p = [20, 22, 24]
+ data = np.random.randn(data_m, data_n, data_p)
+
+ fid.create_dataset('base', data=data)
+
+ grp1 = fid.create_group('Group1')
+ grp3 = fid.create_group('Group2/Group3')
+ grp6 = fid.create_group('Group4/Group5/Group6')
+
+ grp1.create_dataset('ingroup1_1', data=data)
+ grp1.create_dataset('ingroup1_2', data=data)
+ fid.create_dataset('Group2/ingroup2', data=data)
+ grp3.create_dataset('ingroup3', data=data)
+
+ grp6.create_dataset('ingroup6', data=data)
+
+ fid['base'].attrs['Attribute_str'] = 'Test'
+ fid['base'].attrs['Attribute_bytes'] = b'Test'
+ fid['base'].attrs['Attribute_np_bytes'] = np.bytes_('Test') # pylint: disable=no-member
+ fid['base'].attrs.create('Attribute_int', 1)
+ fid['base'].attrs.create('Attribute_float', 1.1)
+ fid['base'].attrs.create('Attribute_np_1d', np.array([1, 2, 3]))
+ fid['base'].attrs.create('Attribute_np_2d', np.array([[1, 2, 3], [4, 5, 6]]))
+
+ yield filename, fid
+
+ # Tear-down
+ if hdf_is_open(fid):
+ fid.close()
+
+ time.sleep(1)
+ try:
+ os.remove(filename)
+ except Exception:
+ print('Could not delete {}'.format(filename))
+
+
+def test_attr_alter_same(hdf_dataset):
+ """ Try altering an attribute with the same type of value type """
+
+ _, fid = hdf_dataset
+ dset_obj = fid['base']
+ attr_obj = dset_obj.attrs
+
+ # Try new attrbute
+ orig_key = 'Attribute_new'
+ new_val = 'Test2'
+ alter_attr_same(dset_obj, orig_key, new_val)
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_new2'
+ new_val = 'Test2'
+ with pytest.raises(KeyError):
+ alter_attr_same(dset_obj, orig_key, new_val, must_exist=True)
+
+ # Try same-type writes first
+ orig_key = 'Attribute_str'
+ orig_val = attr_obj[orig_key]
+ new_val = 'Test2'
+ alter_attr_same(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ # NOTE: looks like hdf5 has changed how byte strings are delt with, maybe
+ # orig_key = 'Attribute_bytes'
+ # orig_val = attr_obj[orig_key]
+ # new_val = b'Test2'
+ # alter_attr_same(dset_obj, orig_key, new_val)
+ # assert orig_val != attr_obj[orig_key]
+ # assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_np_bytes'
+ orig_val = attr_obj[orig_key]
+ new_val = np.bytes_('Test2') # pylint: disable=E1101
+ alter_attr_same(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_int'
+ orig_val = attr_obj[orig_key]
+ new_val = 2
+ alter_attr_same(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_float'
+ orig_val = attr_obj[orig_key]
+ new_val = 2.2
+ alter_attr_same(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_np_1d'
+ orig_val = attr_obj[orig_key]
+ new_val = np.array([4, 5, 6])
+ alter_attr_same(dset_obj, orig_key, new_val)
+ assert np.allclose(attr_obj[orig_key], new_val)
+
+ orig_key = 'Attribute_np_2d'
+ orig_val = attr_obj[orig_key]
+ new_val = np.array([[7, 8, 9], [10, 11, 12]])
+ alter_attr_same(dset_obj, orig_key, new_val)
+ assert np.allclose(attr_obj[orig_key], new_val)
+
+ # Try DIFFERENT-type writes first
+ orig_key = 'Attribute_str'
+ orig_val = attr_obj[orig_key]
+ new_val = 1
+ with pytest.raises(TypeError):
+ alter_attr_same(dset_obj, orig_key, new_val)
+
+ # NOTE: looks like hdf5 has changed how byte strings are delt with, maybe
+ # orig_key = 'Attribute_bytes'
+ # orig_val = attr_obj[orig_key]
+ # new_val = 'Test2'
+ # with pytest.raises(TypeError):
+ # alter_attr_same(dset_obj, orig_key, new_val)
+
+ orig_key = 'Attribute_np_bytes'
+ orig_val = attr_obj[orig_key]
+ new_val = 'Test2'
+ with pytest.raises(TypeError):
+ alter_attr_same(dset_obj, orig_key, new_val)
+
+ orig_key = 'Attribute_int'
+ orig_val = attr_obj[orig_key]
+ new_val = True
+ with pytest.raises(TypeError):
+ alter_attr_same(dset_obj, orig_key, new_val)
+
+ orig_key = 'Attribute_float'
+ orig_val = attr_obj[orig_key]
+ new_val = 2
+ with pytest.raises(TypeError):
+ alter_attr_same(dset_obj, orig_key, new_val)
+
+ orig_key = 'Attribute_np_1d'
+ orig_val = attr_obj[orig_key]
+ new_val = np.array([4.1, 5.1, 6.1])
+ with pytest.raises(TypeError):
+ alter_attr_same(dset_obj, orig_key, new_val)
+
+ orig_key = 'Attribute_np_2d'
+ orig_val = attr_obj[orig_key]
+ new_val = np.array([[7, 8.1, 9], [10, 11, 12.1]])
+ with pytest.raises(TypeError):
+ alter_attr_same(dset_obj, orig_key, new_val)
+
+
+def test_attr_alter(hdf_dataset):
+ """ Try altering an attribute with the same or different type of value"""
+
+ filename, fid = hdf_dataset
+ dset_obj = fid['base']
+ attr_obj = dset_obj.attrs
+
+ # Try new attrbute (dset_obj)
+ orig_key = 'Attribute_new'
+ new_val = 'Test2'
+ assert attr_obj.get(orig_key) is None
+ alter_attr(dset_obj, orig_key, new_val)
+ assert attr_obj[orig_key] == new_val
+
+ # Try new attrbute (dset_obj)
+ orig_key = 'Attribute_new2'
+ new_val = 'Test2'
+ assert attr_obj.get(orig_key) is None
+ with pytest.raises(TypeError):
+ alter_attr([], orig_key, new_val, file=fid)
+ alter_attr(dset_obj, orig_key, new_val, file=fid)
+ assert attr_obj[orig_key] == new_val
+
+ # Try new attrbute (given filename)
+ orig_key = 'Attribute_new3'
+ new_val = 'Test3'
+ with pytest.raises(TypeError):
+ alter_attr(dset_obj, orig_key, new_val, file=filename)
+ alter_attr('base', orig_key, new_val, file=filename)
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_new4'
+ new_val = 'Test2'
+ with pytest.raises(KeyError):
+ alter_attr(dset_obj, orig_key, new_val, must_exist=True)
+
+ # Try same-type writes
+ orig_key = 'Attribute_str'
+ orig_val = attr_obj[orig_key]
+ new_val = 'Test2'
+ alter_attr(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ # NOTE: looks like hdf5 has changed how byte strings are delt with, maybe
+ # orig_key = 'Attribute_bytes'
+ # orig_val = attr_obj[orig_key]
+ # new_val = b'Test2'
+ # alter_attr(dset_obj, orig_key, new_val)
+ # assert orig_val != attr_obj[orig_key]
+ # assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_np_bytes'
+ orig_val = attr_obj[orig_key]
+ new_val = np.bytes_('Test2') # pylint: disable=E1101
+ alter_attr(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_int'
+ orig_val = attr_obj[orig_key]
+ new_val = 2
+ alter_attr(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_float'
+ orig_val = attr_obj[orig_key]
+ new_val = 2.2
+ alter_attr(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_np_1d'
+ orig_val = attr_obj[orig_key]
+ new_val = np.array([4, 5, 6])
+ alter_attr(dset_obj, orig_key, new_val)
+ assert np.allclose(attr_obj[orig_key], new_val)
+
+ orig_key = 'Attribute_np_2d'
+ orig_val = attr_obj[orig_key]
+ new_val = np.array([[7, 8, 9], [10, 11, 12]])
+ alter_attr(dset_obj, orig_key, new_val)
+ assert np.allclose(attr_obj[orig_key], new_val)
+
+ # Try DIFFERENT-type writes first
+ orig_key = 'Attribute_str'
+ orig_val = attr_obj[orig_key]
+ new_val = 1
+ alter_attr(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_bytes'
+ orig_val = attr_obj[orig_key]
+ new_val = 'Test2'
+ alter_attr(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_np_bytes'
+ orig_val = attr_obj[orig_key]
+ new_val = 'Test2'
+ alter_attr(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_int'
+ orig_val = attr_obj[orig_key]
+ new_val = True
+ alter_attr(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_float'
+ orig_val = attr_obj[orig_key]
+ new_val = 2
+ alter_attr(dset_obj, orig_key, new_val)
+ assert orig_val != attr_obj[orig_key]
+ assert attr_obj[orig_key] == new_val
+
+ orig_key = 'Attribute_np_1d'
+ orig_val = attr_obj[orig_key]
+ new_val = np.array([4.1, 5.1, 6.1])
+ alter_attr(dset_obj, orig_key, new_val)
+ assert not np.allclose(orig_val, attr_obj[orig_key])
+ assert np.allclose(attr_obj[orig_key], new_val)
+
+ orig_key = 'Attribute_np_2d'
+ orig_val = attr_obj[orig_key]
+ new_val = np.array([[7, 8.1, 9], [10, 11, 12.1]])
+ alter_attr(dset_obj, orig_key, new_val)
+ assert not np.allclose(orig_val, attr_obj[orig_key])
+ assert np.allclose(attr_obj[orig_key], new_val)
+
+ # Try providing dset as str but no file
+ orig_key = 'Attribute_int'
+ new_val = 3
+ with pytest.raises(TypeError):
+ alter_attr('base', orig_key, new_val)
+
+def test_write_attr_dict(hdf_dataset):
+ """ Try writing dictionary of attributes """
+
+ filename, fid = hdf_dataset
+ dset_obj = fid['base']
+
+ attr_dict = _OrderedDict([['WDA2', 2], ['WDA1', 1]])
+
+ # Write via dset obj
+ assert write_attr_dict(dset_obj, attr_dict, fid=None, sort_attrs=False, verbose=False)
+ assert dset_obj.attrs['WDA2'] == 2
+ assert dset_obj.attrs['WDA1'] == 1
+ assert dset_obj.attrs['Attribute_str'] == 'Test'
+ l_attr = list(dset_obj.attrs.keys())
+
+ # Order should be that written
+ l_attr[-2] == 'WDA2'
+ l_attr[-1] == 'WDA1'
+
+ # Write via dset str MISSING fid
+ with pytest.raises(TypeError):
+ write_attr_dict('base', attr_dict, fid=None, sort_attrs=False, verbose=False)
+
+ # Write via dset str
+ assert write_attr_dict('base', attr_dict, fid=fid, sort_attrs=False, verbose=False)
+ assert dset_obj.attrs['WDA2'] == 2
+ assert dset_obj.attrs['WDA1'] == 1
+ assert dset_obj.attrs['Attribute_str'] == 'Test'
+
+ # Write via dset str and SORT attr
+ assert write_attr_dict('base', attr_dict, fid=fid, sort_attrs=True, verbose=False)
+ assert dset_obj.attrs['WDA2'] == 2
+ assert dset_obj.attrs['WDA1'] == 1
+
+ # Order should be sorted. WDA* are the last alphanumerically in this test file
+ l_attr = list(dset_obj.attrs.keys())
+ l_attr[-1] == 'WDA2'
+ l_attr[-2] == 'WDA1'
\ No newline at end of file
diff --git a/crikit/io/lazy5/tests/test_create.py b/crikit/io/lazy5/tests/test_create.py
new file mode 100644
index 0000000..51fe82e
--- /dev/null
+++ b/crikit/io/lazy5/tests/test_create.py
@@ -0,0 +1,103 @@
+""" Test creation of HDF5 files """
+import os
+import time
+
+import pytest
+
+import numpy as np
+import h5py
+
+from crikit.io.lazy5.create import save
+from crikit.io.lazy5.utils import FidOrFile
+
+
+def test_save_no_attrs():
+ data = np.random.randn(20,20)
+ filename = 'temp_create.h5'
+ dset_name = '/Group1/Dset'
+ save(filename, dset_name, data, mode='w')
+
+ fof = FidOrFile(filename)
+ fid = fof.fid
+ assert np.allclose(fid[dset_name], data)
+ fof.close_if_file_not_fid()
+
+ # Test re-write
+ data = np.random.randn(20,20)
+ save(filename, dset_name, data, mode='w')
+ fof = FidOrFile(filename)
+ fid = fof.fid
+ assert np.allclose(fid[dset_name], data)
+ fof.close_if_file_not_fid()
+
+ # Test re-write when overwrite of dset set to False
+ data = np.random.randn(20,20)
+ with pytest.raises(IOError):
+ save(filename, dset_name, data, dset_overwrite=False)
+
+ # Test re-write with attributes
+ data = np.random.randn(20,20)
+ attr_dict = {'AT1':1, 'AT2':2}
+ save(filename, dset_name, data, attr_dict=attr_dict, mode='w')
+
+ fof = FidOrFile(filename)
+ fid = fof.fid
+ assert fid[dset_name].attrs['AT1'] == 1
+ assert fid[dset_name].attrs['AT2'] == 2
+ with pytest.raises(KeyError):
+ fid[dset_name].attrs['DOESNOTEXIST'] == 2
+
+ fof.close_if_file_not_fid()
+
+ time.sleep(1)
+ try:
+ os.remove(filename)
+ except Exception:
+ print('Could not delete {}'.format(filename))
+
+
+def test_save_diff_path():
+ data = np.random.randn(20,20)
+ filename = 'temp_create2.h5'
+ dset_name = '/Group1/Dset'
+
+ pth = './temp_test'
+ os.mkdir(pth)
+
+ save(filename, dset_name, data, pth=pth, mode='w')
+
+ fp = os.path.join(pth, filename)
+
+ assert os.path.isdir(pth)
+ assert os.path.isfile(fp)
+ assert os.path.getsize(fp) >= data.nbytes
+
+ os.remove(fp)
+ os.rmdir(pth)
+
+def test_save_to_open_h5_file():
+ """ Test saving to an H5 file where the H5 file id is passed """
+
+ data = np.random.randn(20,20)
+ filename = 'temp_create2.h5'
+ dset_name = '/Group1/Dset'
+
+ pth = './temp_test'
+ os.mkdir(pth)
+ assert os.path.isdir(pth)
+
+ fp = os.path.join(pth, filename)
+ with h5py.File(fp, 'w') as fid:
+ save(fid, dset_name, data, pth=pth, mode='w')
+
+ assert os.path.isfile(fp)
+ assert os.path.getsize(fp) >= data.nbytes
+
+ os.remove(fp)
+ os.rmdir(pth)
+
+def test_save_to_open_wrong_type():
+ """ Test saving to an inappripriate input (not string or h5 file fid) """
+
+ with pytest.raises(TypeError):
+ save(123, 'Name', np.random.rand(10,10), pth=None, mode='w')
diff --git a/crikit/io/lazy5/tests/test_inspect.py b/crikit/io/lazy5/tests/test_inspect.py
new file mode 100644
index 0000000..fba3297
--- /dev/null
+++ b/crikit/io/lazy5/tests/test_inspect.py
@@ -0,0 +1,348 @@
+""" Test inspection of HDF5 files """
+import os
+import time
+
+import h5py
+import pytest
+
+import numpy as np
+from numpy.testing import assert_array_almost_equal
+
+from crikit.io.lazy5.inspect import (get_groups, get_datasets, get_hierarchy,
+ get_attrs_dset, valid_dsets, valid_file)
+
+from crikit.io.lazy5.utils import hdf_is_open
+
+@pytest.fixture(scope="module")
+def hdf_dataset():
+ """ Setups and tears down a sample HDF5 file """
+ filename = 'temp_test.h5'
+ fid = h5py.File(filename, 'w')
+ data_m, data_n, data_p = [20, 22, 24]
+ data = np.random.randn(data_m, data_n, data_p)
+
+ fid.create_dataset('base', data=data)
+
+ grp1 = fid.create_group('Group1')
+ grp3 = fid.create_group('Group2/Group3')
+ grp6 = fid.create_group('Group4/Group5/Group6')
+
+ grp1.create_dataset('ingroup1_1', data=data)
+ grp1.create_dataset('ingroup1_2', data=data)
+ fid.create_dataset('Group2/ingroup2', data=data)
+ grp3.create_dataset('ingroup3', data=data)
+
+ grp6.create_dataset('ingroup6', data=data)
+
+ fid['base'].attrs['Attribute_str'] = 'Test'
+ fid['base'].attrs['Attribute_bytes'] = b'Test'
+ fid['base'].attrs['Attribute_np_bytes'] = np.bytes_('Test') # pylint: disable=no-member
+ fid['base'].attrs['Attribute_np_bytes_inarray'] = np.array(b'Test') # pylint: disable=no-member
+ fid['base'].attrs['Attribute_np_bytes_inarray2'] = np.array([b'Test']) # pylint: disable=no-member
+ fid['base'].attrs.create('Attribute_int', 1)
+ fid['base'].attrs.create('Attribute_float', 1.1)
+ fid['base'].attrs.create('Attribute_np_1d', np.array([1, 2, 3]))
+ fid['base'].attrs.create('Attribute_np_2d', np.array([[1, 2, 3], [4, 5, 6]]))
+ fid['base'].attrs.create('Attribute_np_sgl_int', np.array([1]))
+ fid['base'].attrs.create('Attribute_np_sgl_float', np.array([1.0]))
+ fid['base'].attrs.create('Attribute_np_array_float', np.array([1.0, 2.0]))
+ fid['base'].attrs.create('Attribute_np_sgl_complex', np.array([1.0 + 1j]))
+
+ yield filename, fid
+
+ # Tear-down
+ if hdf_is_open(fid):
+ fid.close()
+
+ time.sleep(1)
+ try:
+ os.remove(filename)
+ except Exception:
+ print('Could not delete {}'.format(filename))
+
+def test_valid_file(hdf_dataset): # pylint:disable=redefined-outer-name
+ """ Test whether a file is valid or not """
+
+ filename, fid = hdf_dataset
+
+ assert valid_file(filename, verbose=True)
+ assert not valid_file('NOT_A_REAL_FILE.XYZ', verbose=True)
+
+def test_valid_dsets(hdf_dataset): # pylint:disable=redefined-outer-name
+ """ Test whether a dset or list of dsets are valid or not """
+
+ filename, fid = hdf_dataset
+
+ # NOT valid filename
+ assert not valid_dsets('NOTAFILE.XYZ', 'base', verbose=True)
+
+ # Single dataset
+ dset_list = 'base'
+ assert valid_dsets(filename, dset_list, verbose=True)
+
+ # Single dataset by fid
+ dset_list = 'base'
+ assert valid_dsets(fid, dset_list, verbose=True)
+
+ # Single dataset is WRONG
+ dset_list = 'NOTADSET'
+ assert not valid_dsets(filename, dset_list, verbose=True)
+
+ # Single dataset in list
+ dset_list = ['base']
+ assert valid_dsets(filename, dset_list, verbose=True)
+
+ # Datasets in list -- NOTE some have leading slashes, some don't
+ dset_list = ['base', '/Group1/ingroup1_1', '/Group1/ingroup1_2',
+ 'Group2/ingroup2']
+ assert valid_dsets(filename, dset_list, verbose=True)
+
+ # Datasets in list -- 1 IS WRONG
+ dset_list = ['base', '/Group1/ingroup1_1', '/Group1/ingroup1_2',
+ 'Group2/DOESNOTEXIST']
+ assert not valid_dsets(filename, dset_list, verbose=True)
+
+ # Dataset with wrong-type
+ dset_list = 1
+ with pytest.raises(TypeError):
+ valid_dsets(filename, dset_list, verbose=True)
+
+ # Dataset with wrong-type
+ dset_list = ['base', 1]
+ with pytest.raises(TypeError):
+ valid_dsets(filename, dset_list, verbose=True)
+
+def test_get_groups(hdf_dataset): # pylint:disable=redefined-outer-name
+ """ Get an HDF5 file's group list """
+
+ filename, fid = hdf_dataset
+
+ # Passing fid
+ grp_list = get_groups(fid)
+ assert set(grp_list) == {'/', '/Group1', '/Group2', '/Group2/Group3', '/Group4', '/Group4/Group5',
+ '/Group4/Group5/Group6'}
+
+ # Passing filename
+ grp_list = get_groups(filename)
+ assert set(grp_list) == {'/', '/Group1', '/Group2', '/Group2/Group3', '/Group4', '/Group4/Group5',
+ '/Group4/Group5/Group6'}
+
+def test_get_datasets_fullpath(hdf_dataset): # pylint:disable=redefined-outer-name
+ """ Get an HDF5 file's dataset list with groupnames prepended"""
+ filename, fid = hdf_dataset
+
+ # Passing fid
+ dataset_list = get_datasets(fid, fulldsetpath=True)
+ assert set(dataset_list) == {'/base', '/Group1/ingroup1_1', '/Group1/ingroup1_2',
+ '/Group2/ingroup2', '/Group2/Group3/ingroup3',
+ '/Group4/Group5/Group6/ingroup6'}
+
+ # Passing filename
+ dataset_list = get_datasets(filename, fulldsetpath=True)
+ assert set(dataset_list) == {'/base', '/Group1/ingroup1_1', '/Group1/ingroup1_2',
+ '/Group2/ingroup2', '/Group2/Group3/ingroup3',
+ '/Group4/Group5/Group6/ingroup6'}
+
+def test_get_datasets_nopath(hdf_dataset): # pylint:disable=redefined-outer-name
+ """ Get an HDF5 file's dataset list with no groupnames prepended """
+ filename, fid = hdf_dataset
+
+ # Passing fid
+ dataset_list = get_datasets(fid, fulldsetpath=False)
+ assert set(dataset_list) == {'base', 'ingroup1_1', 'ingroup1_2', 'ingroup2',
+ 'ingroup3', 'ingroup6'}
+
+ # Passing filename
+ dataset_list = get_datasets(filename, fulldsetpath=False)
+ assert set(dataset_list) == {'base', 'ingroup1_1', 'ingroup1_2', 'ingroup2',
+ 'ingroup3', 'ingroup6'}
+
+def test_get_hierarchy_fullpath(hdf_dataset): # pylint:disable=redefined-outer-name
+ """
+ OrderedDict describing HDF5 file hierarchy. Testing with full paths in
+ the dataset names. Keys are groups, values are datasets.
+ """
+ filename, fid = hdf_dataset
+
+ # Passing fid
+ hierarchy = get_hierarchy(fid, fulldsetpath=True, grp_w_dset=False)
+ assert hierarchy == {'/':['base'],
+ '/Group1':['/Group1/ingroup1_1', '/Group1/ingroup1_2'],
+ '/Group2':['/Group2/ingroup2'],
+ '/Group2/Group3':['/Group2/Group3/ingroup3'],
+ '/Group4':[],
+ '/Group4/Group5':[],
+ '/Group4/Group5/Group6':['/Group4/Group5/Group6/ingroup6']
+ }
+
+ # Passing filename
+ hierarchy = get_hierarchy(filename, fulldsetpath=True, grp_w_dset=False)
+ assert hierarchy == {'/':['base'],
+ '/Group1':['/Group1/ingroup1_1', '/Group1/ingroup1_2'],
+ '/Group2':['/Group2/ingroup2'],
+ '/Group2/Group3':['/Group2/Group3/ingroup3'],
+ '/Group4':[],
+ '/Group4/Group5':[],
+ '/Group4/Group5/Group6':['/Group4/Group5/Group6/ingroup6']
+ }
+
+def test_get_hierarchy_grp_w_dset(hdf_dataset): # pylint:disable=redefined-outer-name
+ """
+ OrderedDict describing HDF5 file hierarchy. Testing empty sets are NOT
+ returned. Keys are groups, values are datasets.
+ """
+
+ filename, fid = hdf_dataset
+
+ # Passing fid
+ hierarchy = get_hierarchy(fid, fulldsetpath=True, grp_w_dset=True)
+ assert hierarchy == {'/':['base'],
+ '/Group1':['/Group1/ingroup1_1', '/Group1/ingroup1_2'],
+ '/Group2':['/Group2/ingroup2'],
+ '/Group2/Group3':['/Group2/Group3/ingroup3'],
+ '/Group4/Group5/Group6':['/Group4/Group5/Group6/ingroup6']
+ }
+
+ # Passing filename
+ hierarchy = get_hierarchy(filename, fulldsetpath=True, grp_w_dset=True)
+ assert hierarchy == {'/':['base'],
+ '/Group1':['/Group1/ingroup1_1', '/Group1/ingroup1_2'],
+ '/Group2':['/Group2/ingroup2'],
+ '/Group2/Group3':['/Group2/Group3/ingroup3'],
+ '/Group4/Group5/Group6':['/Group4/Group5/Group6/ingroup6']
+ }
+
+def test_get_hierarchy_nopath(hdf_dataset): # pylint:disable=redefined-outer-name
+ """
+ OrderedDict describing HDF5 file hierarchy. Testing with no full paths in
+ the dataset names. Keys are groups, values are datasets.
+ """
+ filename, fid = hdf_dataset
+
+ # Passing fid
+ hierarchy = get_hierarchy(fid, fulldsetpath=False, grp_w_dset=False)
+ assert hierarchy == {'/':['base'],
+ '/Group1':['ingroup1_1', 'ingroup1_2'],
+ '/Group2':['ingroup2'],
+ '/Group2/Group3':['ingroup3'],
+ '/Group4':[],
+ '/Group4/Group5':[],
+ '/Group4/Group5/Group6':['ingroup6']
+ }
+
+
+ # Passing filename
+ hierarchy = get_hierarchy(filename, fulldsetpath=False, grp_w_dset=False)
+ assert hierarchy == {'/':['base'],
+ '/Group1':['ingroup1_1', 'ingroup1_2'],
+ '/Group2':['ingroup2'],
+ '/Group2/Group3':['ingroup3'],
+ '/Group4':[],
+ '/Group4/Group5':[],
+ '/Group4/Group5/Group6':['ingroup6']}
+
+
+def test_get_dset_attrs(hdf_dataset): # pylint:disable=redefined-outer-name
+ """ Get an HDF5 file's dataset list with groupnames prepended"""
+
+ filename, fid = hdf_dataset
+
+ # Passing fid
+ # DO NOT CONVERT-to-STR
+ dset_attrs = get_attrs_dset(fid, 'base', convert_to_str=False)
+ assert dset_attrs['Attribute_str'] == 'Test'
+
+ # NOTE: looks like hdf5 has changed how byte strings are delt with, maybe
+ # assert dset_attrs['Attribute_bytes'] == b'Test'
+
+ assert dset_attrs['Attribute_np_bytes'] == b'Test'
+ assert np.allclose(dset_attrs['Attribute_np_bytes_inarray'] == b'Test', True)
+ assert np.allclose(dset_attrs['Attribute_np_bytes_inarray2'] == b'Test', True)
+ assert dset_attrs['Attribute_int'] == 1
+ assert dset_attrs['Attribute_float'] == 1.1
+ assert np.allclose(dset_attrs['Attribute_np_1d'], np.array([1, 2, 3]))
+ assert np.allclose(dset_attrs['Attribute_np_2d'], np.array([[1, 2, 3], [4, 5, 6]]))
+ assert dset_attrs['Attribute_np_sgl_int'] == np.array([1])
+ assert dset_attrs['Attribute_np_sgl_float'] == np.array([1.0])
+ assert dset_attrs['Attribute_np_sgl_complex'] == np.array([1.0 + 1j])
+ assert_array_almost_equal(dset_attrs['Attribute_np_array_float'], np.array([1.0, 2.0]))
+
+ # DO CONVERT-to-STR
+ dset_attrs = get_attrs_dset(fid, 'base', convert_to_str=True)
+ assert dset_attrs['Attribute_str'] == 'Test'
+ assert dset_attrs['Attribute_bytes'] == 'Test'
+ assert dset_attrs['Attribute_np_bytes'] == 'Test'
+ assert np.allclose(dset_attrs['Attribute_np_bytes_inarray'] == 'Test', True)
+ assert np.allclose(dset_attrs['Attribute_np_bytes_inarray2'] == 'Test', True)
+ assert dset_attrs['Attribute_int'] == 1
+ assert dset_attrs['Attribute_float'] == 1.1
+ assert np.allclose(dset_attrs['Attribute_np_1d'], np.array([1, 2, 3]))
+ assert np.allclose(dset_attrs['Attribute_np_2d'], np.array([[1, 2, 3], [4, 5, 6]]))
+ assert dset_attrs['Attribute_np_sgl_int'] == np.array([1])
+ assert dset_attrs['Attribute_np_sgl_float'] == np.array([1.0])
+ assert dset_attrs['Attribute_np_sgl_complex'] == np.array([1.0 + 1j])
+ assert_array_almost_equal(dset_attrs['Attribute_np_array_float'], np.array([1.0, 2.0]))
+
+ # Passing filename
+ # DO NOT CONVERT-to-STR
+ dset_attrs = get_attrs_dset(filename, 'base', convert_to_str=False)
+ assert dset_attrs['Attribute_str'] == 'Test'
+
+ # NOTE: looks like hdf5 has changed how byte strings are delt with, maybe
+ # assert dset_attrs['Attribute_bytes'] == b'Test'
+
+ assert dset_attrs['Attribute_np_bytes'] == b'Test'
+ assert np.allclose(dset_attrs['Attribute_np_bytes_inarray'] == b'Test', True)
+ assert np.allclose(dset_attrs['Attribute_np_bytes_inarray2'] == b'Test', True)
+ assert dset_attrs['Attribute_int'] == 1
+ assert dset_attrs['Attribute_float'] == 1.1
+ assert np.allclose(dset_attrs['Attribute_np_1d'], np.array([1, 2, 3]))
+ assert np.allclose(dset_attrs['Attribute_np_2d'], np.array([[1, 2, 3], [4, 5, 6]]))
+ assert dset_attrs['Attribute_np_sgl_int'] == np.array([1])
+ assert dset_attrs['Attribute_np_sgl_float'] == np.array([1.0])
+ assert dset_attrs['Attribute_np_sgl_complex'] == np.array([1.0 + 1j])
+ assert_array_almost_equal(dset_attrs['Attribute_np_array_float'], np.array([1.0, 2.0]))
+
+ # DO CONVERT-to-STR
+ dset_attrs = get_attrs_dset(filename, 'base', convert_to_str=True)
+ assert dset_attrs['Attribute_str'] == 'Test'
+ assert dset_attrs['Attribute_bytes'] == 'Test'
+ assert dset_attrs['Attribute_np_bytes'] == 'Test'
+ assert np.allclose(dset_attrs['Attribute_np_bytes_inarray'] == 'Test', True)
+ assert np.allclose(dset_attrs['Attribute_np_bytes_inarray2'] == 'Test', True)
+ assert dset_attrs['Attribute_int'] == 1
+ assert dset_attrs['Attribute_float'] == 1.1
+ assert np.allclose(dset_attrs['Attribute_np_1d'], np.array([1, 2, 3]))
+ assert np.allclose(dset_attrs['Attribute_np_2d'], np.array([[1, 2, 3], [4, 5, 6]]))
+ assert dset_attrs['Attribute_np_sgl_int'] == np.array([1])
+ assert dset_attrs['Attribute_np_sgl_float'] == np.array([1.0])
+ assert dset_attrs['Attribute_np_sgl_complex'] == np.array([1.0 + 1j])
+ assert_array_almost_equal(dset_attrs['Attribute_np_array_float'], np.array([1.0, 2.0]))
+
+ # CONVERT Single Numpy Value to Numeric
+ dset_attrs = get_attrs_dset(filename, 'base', convert_sgl_np_to_num=True)
+ assert dset_attrs['Attribute_np_sgl_int'] == 1
+ assert isinstance(dset_attrs['Attribute_np_sgl_int'], int)
+ assert not isinstance(dset_attrs['Attribute_np_sgl_int'], np.ndarray)
+ assert dset_attrs['Attribute_np_sgl_float'] == 1.0
+ assert isinstance(dset_attrs['Attribute_np_sgl_float'], float)
+ assert not isinstance(dset_attrs['Attribute_np_sgl_float'], np.ndarray)
+ assert dset_attrs['Attribute_np_sgl_complex'] == 1.0 + 1j
+ assert isinstance(dset_attrs['Attribute_np_sgl_complex'], complex)
+ assert not isinstance(dset_attrs['Attribute_np_sgl_complex'], np.ndarray)
+ assert_array_almost_equal(dset_attrs['Attribute_np_array_float'], np.array([1.0, 2.0]))
+ assert isinstance(dset_attrs['Attribute_np_array_float'], np.ndarray)
+
+ # DO NOT CONVERT Single Numpy Value to Numeric
+ dset_attrs = get_attrs_dset(filename, 'base', convert_sgl_np_to_num=False)
+ assert dset_attrs['Attribute_np_sgl_int'] == 1
+ assert not isinstance(dset_attrs['Attribute_np_sgl_int'], int)
+ assert isinstance(dset_attrs['Attribute_np_sgl_int'], np.ndarray)
+ assert dset_attrs['Attribute_np_sgl_float'] == 1.0
+ assert not isinstance(dset_attrs['Attribute_np_sgl_float'], float)
+ assert isinstance(dset_attrs['Attribute_np_sgl_float'], np.ndarray)
+ assert dset_attrs['Attribute_np_sgl_complex'] == 1.0 + 1j
+ assert not isinstance(dset_attrs['Attribute_np_sgl_complex'], complex)
+ assert isinstance(dset_attrs['Attribute_np_sgl_complex'], np.ndarray)
+ assert_array_almost_equal(dset_attrs['Attribute_np_array_float'], np.array([1.0, 2.0]))
+ assert isinstance(dset_attrs['Attribute_np_array_float'], np.ndarray)
\ No newline at end of file
diff --git a/crikit/io/lazy5/tests/test_nonh5utils.py b/crikit/io/lazy5/tests/test_nonh5utils.py
new file mode 100644
index 0000000..2428c5e
--- /dev/null
+++ b/crikit/io/lazy5/tests/test_nonh5utils.py
@@ -0,0 +1,130 @@
+""" Test non-HDF-related utilities """
+import pytest
+
+import numpy as np
+
+import crikit.io.lazy5 as lazy5
+from crikit.io.lazy5.nonh5utils import (filterlist, check_type_compat, return_family_type)
+
+def test_filter_list():
+ """ Test filtering of lists """
+ list_to_filter = ['Keep1', 'Keep2', 'KeepExclude', 'Exclude1', 'Exclude2']
+
+ # Keep, filter=str, exclusive
+ filters = 'Keep'
+ out_list = filterlist(list_to_filter, filters, keep_filtered_items=True,
+ exclusive=True)
+ assert out_list == ['Keep1', 'Keep2', 'KeepExclude']
+
+ # Exclude, filter=str, exclusive
+ filters = 'Exclude'
+ out_list = filterlist(list_to_filter, filters, keep_filtered_items=False,
+ exclusive=True)
+ assert out_list == ['Keep1', 'Keep2']
+
+ # Keep, filter=list, exclusive
+ filters = ['Keep']
+ out_list = filterlist(list_to_filter, filters, keep_filtered_items=True,
+ exclusive=True)
+ assert out_list == ['Keep1', 'Keep2', 'KeepExclude']
+
+ # Keep, filter=tuple, exclusive
+ filters = ('Keep')
+ out_list = filterlist(list_to_filter, filters, keep_filtered_items=True,
+ exclusive=True)
+ assert out_list == ['Keep1', 'Keep2', 'KeepExclude']
+
+ # Keep, filter=list, exclusive
+ filters = ['Keep', '1']
+ out_list = filterlist(list_to_filter, filters, keep_filtered_items=True,
+ exclusive=True)
+ assert out_list == ['Keep1']
+
+ # Keep, filter=list, NOT-exclusive
+ filters = ['Keep', '1']
+ out_list = filterlist(list_to_filter, filters, keep_filtered_items=True,
+ exclusive=False)
+ assert out_list == ['Keep1', 'Keep2', 'KeepExclude', 'Exclude1']
+
+ # Exclude, filter=list, exclusive
+ filters = ['Exclude', '2']
+ out_list = filterlist(list_to_filter, filters, keep_filtered_items=False,
+ exclusive=True)
+ assert out_list == ['Keep1']
+
+ # Exclude, filter=list, NON-exclusive
+ filters = ['Exclude', '2']
+ out_list = filterlist(list_to_filter, filters, keep_filtered_items=False,
+ exclusive=False)
+
+ assert out_list == ['Keep1', 'Keep2', 'KeepExclude', 'Exclude1']
+
+ # Wrong type of filter
+ filters = 1
+ with pytest.raises(TypeError):
+ out_list = filterlist(list_to_filter, filters, keep_filtered_items=False,
+ exclusive=False)
+
+def test_return_family_type():
+ """ Test return_family_type """
+ assert return_family_type(1) is int
+ assert return_family_type(1.1) is float
+ assert return_family_type(1 + 1j*3) is complex
+ assert return_family_type('Test') is str
+ assert return_family_type(b'Test') is bytes
+ assert return_family_type(True) is bool
+
+ assert return_family_type(np.int32(1)) is int
+ assert return_family_type(int(1)) is int
+ assert return_family_type(np.float32(1.1)) is float
+ assert return_family_type(float(1.1)) is float
+ assert return_family_type(np.complex64(1 + 1j*3)) is complex
+ assert return_family_type(complex(1 + 1j*3)) is complex
+ assert return_family_type(str('Test')) is str
+ assert return_family_type(np.str_('Test')) is str # pylint: disable=E1101
+ assert return_family_type(np.bytes_('Test')) is bytes # pylint: disable=E1101
+ assert return_family_type(bool(True)) is bool
+ assert return_family_type(np.bool_(True)) is bool
+
+ with pytest.raises(TypeError):
+ return_family_type([1, 2, 3])
+
+ with pytest.raises(TypeError):
+ return_family_type((1, 2, 3))
+
+ with pytest.raises(TypeError):
+ return_family_type({'a':1})
+
+
+def test_check_type_compat():
+ """ Test check_type_compat[ibility] """
+
+ # Positive tests
+ assert check_type_compat(1, 2)
+ assert check_type_compat(1.1, 2.1)
+ assert check_type_compat(1.1+1j*3, 2.1+1j*8)
+ assert check_type_compat('Test', 'Test2')
+ assert check_type_compat(b'Test', b'Test2')
+ assert check_type_compat(True, False)
+
+ assert check_type_compat(1, np.int32(2))
+ assert check_type_compat(1.1, np.float32(2.1))
+ assert check_type_compat(1.1+1j*3, np.complex64(2.1+1j*8))
+ assert check_type_compat('Test', np.str_('Test2')) # pylint: disable=E1101
+ assert check_type_compat(b'Test', np.bytes_('Test2')) # pylint: disable=E1101
+ assert check_type_compat(True, np.bool_(False))
+
+ # Negative checks
+ assert not check_type_compat(1, 2.1)
+ assert not check_type_compat(1.1, 2)
+ assert not check_type_compat(1.1+1j*3, 2.1)
+ assert not check_type_compat('Test', 1)
+ assert not check_type_compat('Test', b'Test2')
+ assert not check_type_compat(True, 1)
+
+ assert not check_type_compat(1.1, np.int32(2))
+ assert not check_type_compat(1, np.float32(2.1))
+ assert not check_type_compat(1, np.complex64(2.1+1j*8))
+ assert not check_type_compat(1, np.str_('Test2')) # pylint: disable=E1101
+ assert not check_type_compat('Test', np.bytes_('Test2')) # pylint: disable=E1101
+ assert not check_type_compat(1, np.bool_(False))
diff --git a/crikit/io/lazy5/tests/test_ui.py b/crikit/io/lazy5/tests/test_ui.py
new file mode 100644
index 0000000..1d6592a
--- /dev/null
+++ b/crikit/io/lazy5/tests/test_ui.py
@@ -0,0 +1,163 @@
+""" Test inspection of HDF5 files """
+import os
+import sys
+import h5py
+
+import numpy as np
+import pytest
+
+try:
+ from PyQt5.QtWidgets import QApplication
+ from PyQt5.QtTest import QTest
+ import PyQt5.QtCore
+ from PyQt5.QtCore import Qt
+except Exception:
+ HAS_PYQT5 = False
+else:
+ HAS_PYQT5 = True
+ import crikit.io.lazy5 as lazy5
+ from crikit.io.lazy5.ui.QtHdfLoad import HdfLoad
+
+from crikit.io.lazy5.utils import hdf_is_open
+
+@pytest.mark.skipif(not HAS_PYQT5, reason='PyQt5 not installed, skipping.')
+class TestUI:
+ """ Test the HDF5 PyQt5 Viewer """
+
+ @pytest.fixture(scope="module")
+ def hdf_dataset(self):
+ """ Setups and tears down a sample HDF5 file """
+ filename = 'temp_test_ui.h5'
+ fid = h5py.File(filename, 'w')
+ data_m, data_n, data_p = [20, 22, 24]
+ data = np.random.randn(data_m, data_n, data_p)
+
+ fid.create_dataset('base', data=data)
+
+ grp1 = fid.create_group('Group1')
+ grp3 = fid.create_group('Group2/Group3')
+ grp6 = fid.create_group('Group4/Group5/Group6')
+
+ grp1.create_dataset('ingroup1_1', data=data)
+ grp1.create_dataset('ingroup1_2', data=data)
+ fid.create_dataset('Group2/ingroup2', data=data)
+ grp3.create_dataset('ingroup3', data=data)
+
+ grp6.create_dataset('ingroup6', data=data)
+
+ fid['base'].attrs['Attribute_str'] = 'Test'
+ fid['base'].attrs['Attribute_bytes'] = b'Test'
+ fid['base'].attrs['Attribute_np_bytes'] = np.bytes_('Test') # pylint: disable=no-member
+ fid['base'].attrs.create('Attribute_int', 1)
+ fid['base'].attrs.create('Attribute_float', 1.1)
+ fid['base'].attrs.create('Attribute_np_1d', np.array([1, 2, 3]))
+ fid['base'].attrs.create('Attribute_np_2d', np.array([[1, 2, 3], [4, 5, 6]]))
+
+ app = QApplication(sys.argv) # pylint: disable=C0103, W0612
+ yield filename
+
+ # Tear-down
+ if hdf_is_open(fid):
+ fid.close()
+ os.remove(filename)
+ # sys.exit()
+
+ def test_ui_win_title_empty_load_dataset(self, hdf_dataset):
+ """ Test whether load dataset dialog is titled properly with no title provided"""
+ self.filename = hdf_dataset
+ dialog = HdfLoad()
+ _ = dialog.fileOpen(self.filename)
+
+ assert dialog.windowTitle() == 'Select a dataset...'
+
+ def test_ui_win_title_load_dataset(self, hdf_dataset):
+ """ Test whether load dataset dialog is titled properly """
+ self.filename = hdf_dataset
+ dialog = HdfLoad(title='TEST')
+ _ = dialog.fileOpen(self.filename)
+
+ assert dialog.windowTitle() == 'TEST: Select a dataset...'
+
+ def test_ui_load_file(self, hdf_dataset):
+ """ Load test file and check groups """
+ self.filename = hdf_dataset
+ dialog = HdfLoad()
+ _ = dialog.fileOpen(self.filename)
+
+ list_dsets = [dialog.ui.listDataSet.item(num).text() for num in
+ range(dialog.ui.listDataSet.count())]
+
+ list_grps = [dialog.ui.comboBoxGroupSelect.itemText(num) for num in
+ range(dialog.ui.comboBoxGroupSelect.count())]
+
+ assert list_dsets == ['base']
+ assert '/Group1' in list_grps
+ assert '/Group2/Group3' in list_grps
+ assert '/Group4/Group5/Group6' in list_grps
+ assert '/Group5' not in list_grps
+
+ def test_ui_change_grp_and_filter_include(self, hdf_dataset):
+ """ Load test file, change to Group1, filter for _1 """
+ self.filename = hdf_dataset
+ dialog = HdfLoad()
+ _ = dialog.fileOpen(self.filename)
+
+ # Change group to Group1
+ dialog.ui.comboBoxGroupSelect.setCurrentIndex(1)
+ list_dsets = [dialog.ui.listDataSet.item(num).text() for num in
+ range(dialog.ui.listDataSet.count())]
+ assert dialog.ui.comboBoxGroupSelect.currentText() == '/Group1'
+ assert list_dsets == ['ingroup1_1', 'ingroup1_2']
+
+ dialog.ui.filterIncludeString.setText('_1')
+ QTest.mouseClick(dialog.ui.pushButtonFilter, Qt.LeftButton)
+ list_dsets = [dialog.ui.listDataSet.item(num).text() for num in
+ range(dialog.ui.listDataSet.count())]
+ assert list_dsets == ['ingroup1_1']
+
+ def test_ui_change_grp_and_filter_exclude(self, hdf_dataset):
+ """ Load test file, change to Group1, filter for _1 """
+ self.filename = hdf_dataset
+ dialog = HdfLoad()
+ _ = dialog.fileOpen(self.filename)
+
+ # Change group to Group1
+ dialog.ui.comboBoxGroupSelect.setCurrentIndex(1)
+ list_dsets = [dialog.ui.listDataSet.item(num).text() for num in
+ range(dialog.ui.listDataSet.count())]
+ assert dialog.ui.comboBoxGroupSelect.currentText() == '/Group1'
+ assert list_dsets == ['ingroup1_1', 'ingroup1_2']
+
+ dialog.ui.filterExcludeString.setText('_1')
+ QTest.mouseClick(dialog.ui.pushButtonFilter, Qt.LeftButton)
+ list_dsets = [dialog.ui.listDataSet.item(num).text() for num in
+ range(dialog.ui.listDataSet.count())]
+ assert list_dsets == ['ingroup1_2']
+
+ def test_ui_attrs(self, hdf_dataset):
+ """ Load test file, change to base group (/), check attributes """
+ self.filename = hdf_dataset
+ dialog = HdfLoad()
+ _ = dialog.fileOpen(self.filename)
+
+ # Change group to Group1
+ dialog.ui.comboBoxGroupSelect.setCurrentIndex(0)
+ list_dsets = [dialog.ui.listDataSet.item(num).text() for num in
+ range(dialog.ui.listDataSet.count())]
+ assert dialog.ui.comboBoxGroupSelect.currentText() == '/'
+ assert list_dsets == ['base']
+
+ # Select dataset base
+ dialog.ui.listDataSet.item(0).setSelected(True)
+ QTest.mouseClick(dialog.ui.listDataSet.viewport(), Qt.LeftButton)
+
+ assert (dialog.ui.tableAttributes.findItems('Attribute_str', Qt.MatchExactly)[0].text() ==
+ 'Attribute_str')
+ assert not dialog.ui.tableAttributes.findItems('fake', Qt.MatchExactly) # Empty
+
+ def test_ui_wrongfile(self, hdf_dataset):
+ """ Load test file, change to base group (/), check attributes """
+ self.filename = hdf_dataset
+ dialog = HdfLoad()
+ with pytest.raises(FileNotFoundError):
+ _ = dialog.fileOpen('does_not_exist.h5')
diff --git a/crikit/io/lazy5/tests/test_utils.py b/crikit/io/lazy5/tests/test_utils.py
new file mode 100644
index 0000000..506fac7
--- /dev/null
+++ b/crikit/io/lazy5/tests/test_utils.py
@@ -0,0 +1,106 @@
+""" Test HDF-related utilities """
+import os
+
+import pytest
+
+import h5py
+import numpy as np
+
+import crikit.io.lazy5 as lazy5
+from crikit.io.lazy5.utils import (FidOrFile, hdf_is_open, fullpath)
+
+@pytest.fixture(scope="module")
+def hdf_dataset():
+ """ Setups and tears down a sample HDF5 file """
+ filename = 'temp_test_utils.h5'
+ fid = h5py.File(filename, 'w')
+ data_m, data_n, data_p = [20, 22, 24]
+ data = np.random.randn(data_m, data_n, data_p)
+ fid.create_dataset('base', data=data)
+
+ yield filename, fid
+
+ # Tear-down
+ if hdf_is_open(fid):
+ fid.close()
+ os.remove(filename)
+
+def test_fid_or_file_filename_provided(hdf_dataset):
+ """ Test FidOrFile Class with provided filename """
+ filename, _ = hdf_dataset
+
+ fof = FidOrFile(filename)
+
+ # ! New h5py v 2.9.*: id instead of fid
+ try:
+ status = fof.fid.id.valid
+ except AttributeError:
+ status = fof.fid.fid.valid
+
+ assert status == 1
+ assert fof.fid is not None
+ assert not fof.is_fid
+
+ fof.fid.close()
+
+def test_fid_or_file_fid_provided(hdf_dataset):
+ """ Test FidOrFile Class with provided fid """
+ _, fid = hdf_dataset
+
+ fof = FidOrFile(fid)
+ # ! New h5py v 2.9.*: id instead of fid
+ try:
+ status = fof.fid.id.valid
+ except AttributeError:
+ status = fof.fid.fid.valid
+
+ assert status == 1
+ assert fof.fid is not None
+ assert fof.is_fid
+
+def test_fid_or_file_close_if_not_fid(hdf_dataset):
+ """ Test close if filename was provided """
+ filename, fid = hdf_dataset
+
+ fof = FidOrFile(fid)
+ fof.close_if_file_not_fid()
+ # ! New h5py v 2.9.*: id instead of fid
+ try:
+ status = fof.fid.id.valid
+ except AttributeError:
+ status = fof.fid.fid.valid
+
+ assert status == 1
+
+ fof = FidOrFile(filename)
+ fof.close_if_file_not_fid()
+ # ! New h5py v 2.9.*: id instead of fid
+ try:
+ status = fof.fid.id.valid
+ except AttributeError:
+ status = fof.fid.fid.valid
+
+ assert status == 0
+
+def test_hdf_is_open(hdf_dataset):
+ """ Test hdf_is_open function """
+ _, fid = hdf_dataset
+
+ assert hdf_is_open(fid)
+ fid.close()
+
+ assert not hdf_is_open(fid)
+
+def test_fullpath():
+ """ Test full path """
+ fp = fullpath(filename=None,pth=None)
+ assert fp is None
+
+ fn = 'test.XYZ'
+ p = 'Dir1/Dir2'
+
+ fp = fullpath(filename=fn,pth=None)
+ assert fp == fn
+
+ fp = fullpath(filename=fn, pth=p)
+ assert fp == os.path.join(p, fn)
\ No newline at end of file
diff --git a/crikit/io/lazy5/ui/QtHdfLoad.py b/crikit/io/lazy5/ui/QtHdfLoad.py
new file mode 100644
index 0000000..3f3713c
--- /dev/null
+++ b/crikit/io/lazy5/ui/QtHdfLoad.py
@@ -0,0 +1,242 @@
+"""
+HDF5 LOAD DATA QDialog (crikit.vis.subguis.h5loadgui)
+=======================================================
+
+ H5LoadGUI : A graphical user interface (GUI) to select HDF5 dataset(s)
+
+ Method : H5LoadGUI.getFileDataSets()
+
+ Return (tuple) : (path [str], filename [str], dataset(s) [list], selection_made [bool])
+
+ Notes
+ -----
+ Methods that interact with Qt follow the Qt naming convention:
+ firstSecondThird
+"""
+
+
+# Append sys path
+import sys as _sys
+import os as _os
+
+try:
+ # Generic imports for QT-based programs
+ from PyQt5.QtWidgets import (QApplication as _QApplication, \
+ QDialog as _QDialog, QFileDialog as _QFileDialog, \
+ QTableWidgetItem as _QTableWidgetItem)
+except Exception:
+ HAS_PYQT5 = False
+else:
+ HAS_PYQT5 = True
+from crikit.io.lazy5.ui.qt_HdfLoad import Ui_Dialog
+
+from crikit.io.lazy5.inspect import get_hierarchy, get_attrs_dset
+from crikit.io.lazy5.nonh5utils import filterlist
+
+class HdfLoad(_QDialog): ### EDIT ###
+ """ GUI Loader Class for H5 Files """
+
+ # Default configuration
+ config = {'only_show_grp_w_dset': True, # Only show groups with datasets
+ 'attr_description': 'Memo', # Description attribute key (optional)
+ 'excl_filtering' : True # Filtering is exclusive (filters are AND'd)
+ }
+
+ def __init__(self, title=None, parent=None):
+
+ # Generic load/init designer-based GUI
+ super(HdfLoad, self).__init__(parent)
+ self.ui = Ui_Dialog() # pylint: disable=C0103
+ self.ui.setupUi(self)
+
+ self.path = None
+ self.filename = None
+ self.all_selected = None
+ self.group_dset_dict = None
+
+ if title:
+ self.setWindowTitle('{}: Select a dataset...'.format(title))
+ else:
+ self.setWindowTitle('Select a dataset...')
+
+ self.ui.pushButtonOk.clicked.connect(self.accept)
+ self.ui.pushButtonCancel.clicked.connect(self.reject)
+ self.ui.comboBoxGroupSelect.currentTextChanged.connect(self.dataGroupChange)
+ self.ui.listDataSet.itemClicked.connect(self.datasetSelected)
+ self.ui.pushButtonFilter.clicked.connect(self.filterDatasets)
+ self.ui.pushButtonResetFilter.clicked.connect(self.dataGroupChange)
+
+
+ @staticmethod
+ def getFileDataSets(pth='./', title=None, parent=None): # pylint: disable=C0103; # pragma: no cover
+ """
+ Retrieve the filename and datasets selected by the user (via GUI)
+
+ Parameters
+ ----------
+ pth : str
+ Home directory to start in OR the relative pth to a file
+
+ Returns
+ ----------
+ Tuple (str, str, list[str]) as (path, filename, [dataset(s)])
+
+ """
+
+ # pragma: no cover
+ dialog = HdfLoad(title=title, parent=parent)
+
+ ret_fileopen = True
+ if pth is None:
+ pth = './'
+ else:
+ pth = _os.path.abspath(pth)
+
+ while True:
+ ret_fileopen = dialog.fileOpen(pth, title=title)
+
+ ret = None
+ if ret_fileopen:
+ ret_dset_select = dialog.exec_()
+ if ret_dset_select == _QDialog.Rejected:
+ pth = dialog.path
+ elif dialog.all_selected is None:
+ pass
+ else:
+ ret = (dialog.path, dialog.filename, dialog.all_selected)
+ break
+ else:
+ break
+ return ret
+
+ def fileOpen(self, pth='./', title=None): # Qt-related pylint: disable=C0103
+ """ Select HDF5 File via QDialog built-in."""
+
+ if pth is None:
+ pth = './'
+
+ if title is None:
+ title_file='Select a file...'
+ else:
+ title_file='{}: Select a file...'.format(title)
+
+ if _os.path.isdir(pth): # No file provided, use QFileDialog; # pragma: no cover
+ filetype_options = 'HDF5 Files (*.h5 *.hdf);;All Files (*.*)'
+ full_pth_fname, _ = _QFileDialog.getOpenFileName(self, title_file, pth,
+ filetype_options)
+ elif _os.path.isfile(pth): # Is a valid file
+ full_pth_fname = pth
+ else:
+ raise FileNotFoundError('Not a valid path. Not a valid file.')
+
+ ret = None
+ if full_pth_fname:
+ full_pth_fname = _os.path.abspath(full_pth_fname) # Ensure correct /'s for each OS
+ self.filename = _os.path.basename(full_pth_fname)
+ self.path = _os.path.dirname(full_pth_fname)
+ self.populateGroups()
+ ret = True
+ return ret
+
+ def populateGroups(self): # Qt-related pylint: disable=C0103
+ """ Populate dropdown box of group ui.comboBoxGroupSelect """
+ self.group_dset_dict = get_hierarchy(_os.path.join(self.path, self.filename),
+ grp_w_dset=HdfLoad.config['only_show_grp_w_dset'])
+ # Load Group dropdown box
+ self.ui.comboBoxGroupSelect.clear()
+ for count in self.group_dset_dict:
+ self.ui.comboBoxGroupSelect.addItem(count)
+ return [self.path, self.filename]
+
+ def dataGroupChange(self): # Qt-related pylint: disable=C0103
+ """ Action : ComboBox containing Groups with DataSets has changed"""
+
+ #self.dsetlist = QListWidget(self.verticalLayoutWidget)
+ self.ui.listDataSet.clear()
+
+ if self.ui.comboBoxGroupSelect.currentText() != '':
+ self.ui.listDataSet.addItems(self.group_dset_dict[self.ui.comboBoxGroupSelect.currentText()])
+ #print('Changed')
+
+ def populate_attrs(self, attr_dict=None):
+ """ Populate attribute and memo boxes for currently selected dataset """
+
+ self.ui.tableAttributes.setRowCount(0)
+ self.ui.tableAttributes.setColumnCount(2)
+ self.ui.tableAttributes.setSortingEnabled(False)
+ self.ui.textDescription.setText('')
+
+ if attr_dict:
+ try:
+ self.ui.textDescription.setText(attr_dict[HdfLoad.config['attr_description']])
+ except (KeyError, AttributeError) as error_msg:
+ print('{}\nNo memo at key {}'.format(error_msg, HdfLoad.config['attr_description']))
+
+ for num, key in enumerate(attr_dict):
+ self.ui.tableAttributes.insertRow(self.ui.tableAttributes.rowCount())
+ self.ui.tableAttributes.setItem(num, 0, _QTableWidgetItem(key))
+ self.ui.tableAttributes.setItem(num, 1, _QTableWidgetItem(str(attr_dict[key])))
+
+ def datasetSelected(self): # Qt-related pylint: disable=C0103
+ """ Action : One or more DataSets were selected from the list """
+
+ all_selected = self.ui.listDataSet.selectedItems()
+ n_selected = len(all_selected)
+
+ self.ui.textCurrentDataset.setText('')
+ self.all_selected = []
+ attrs = {}
+
+ if n_selected > 0:
+ current_selection = all_selected[-1].text()
+ current_grp = self.ui.comboBoxGroupSelect.currentText()
+
+ selection_str = '{} + ({} others)'.format(current_selection, n_selected - 1)
+ self.ui.textCurrentDataset.setText(selection_str)
+ if current_grp == '/':
+ current_dset_fullpath = '{}{}'.format(current_grp, current_selection)
+ else:
+ current_dset_fullpath = '{}/{}'.format(current_grp, current_selection)
+ # TODO: Figure out a better way to deal with base-group datasets
+ # Bug when dsets are in base group '/'
+ current_dset_fullpath = current_dset_fullpath.replace('//','/')
+ attrs = get_attrs_dset(_os.path.join(self.path, self.filename),
+ current_dset_fullpath, convert_to_str=True)
+ self.all_selected = [('{}/{}'.format(current_grp, selection.text())).replace('//','/')
+ for selection in all_selected]
+
+ # Fill-in attribute table
+ self.populate_attrs(attr_dict=attrs)
+
+ def filterDatasets(self): # Qt-related pylint: disable=C0103
+ """ Filter list of datasets based on include and exclude strings """
+ incl_str = self.ui.filterIncludeString.text()
+ excl_str = self.ui.filterExcludeString.text()
+
+ # From string with comma separation to list-of-strings
+ incl_list = [q.strip() for q in incl_str.split(',') if q.strip()]
+ excl_list = [q.strip() for q in excl_str.split(',') if q.strip()]
+
+ dset_list = [self.ui.listDataSet.item(num).text() for num in
+ range(self.ui.listDataSet.count())]
+
+ if incl_list: # Include list is not empty
+ dset_list = filterlist(dset_list, incl_list,
+ keep_filtered_items=True,
+ exclusive=HdfLoad.config['excl_filtering'])
+
+ if excl_list: # Exclude list is not empty
+ dset_list = filterlist(dset_list, excl_list,
+ keep_filtered_items=False,
+ exclusive=HdfLoad.config['excl_filtering'])
+
+ self.ui.listDataSet.clear()
+ self.ui.listDataSet.addItems(dset_list)
+
+if __name__ == '__main__': # pragma: no cover
+ app = _QApplication(_sys.argv) # pylint: disable=C0103
+ result = HdfLoad.getFileDataSets(pth='.', title='Test title') # pylint: disable=C0103
+ print('Result: {}'.format(result))
+
+ _sys.exit()
+ # pylint: error
diff --git a/crikit/io/lazy5/ui/__init__.py b/crikit/io/lazy5/ui/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/crikit/io/lazy5/ui/qt5/ui_HDFLoad.ui b/crikit/io/lazy5/ui/qt5/ui_HDFLoad.ui
new file mode 100644
index 0000000..6e1ac90
--- /dev/null
+++ b/crikit/io/lazy5/ui/qt5/ui_HDFLoad.ui
@@ -0,0 +1,279 @@
+
+
+ Dialog
+
+
+
+ 0
+ 0
+ 845
+ 588
+
+
+
+ HDF File Inspector
+
+
+ font: 10pt "Arial";
+
+
+ -
+
+
+ QLayout::SetNoConstraint
+
+ -
+
+
+ QLayout::SetNoConstraint
+
+ -
+
+ -
+
+
+ Groups Containing Datasets
+
+
+
+ -
+
+
+ -
+
+
+ Datasets
+
+
+
+ -
+
+
+ QAbstractItemView::DoubleClicked|QAbstractItemView::EditKeyPressed|QAbstractItemView::SelectedClicked
+
+
+ QAbstractItemView::ExtendedSelection
+
+
+
+ -
+
+
+ <html><head/><body><p><span style=" font-weight:600;">Include</span> Entires with Substring (separate by ',' [comma])</p></body></html>
+
+
+
+ -
+
+
+ -
+
+
+ <html><head/><body><p><span style=" font-weight:600;">Exclude</span> Entires with Substring (separate by ',' [comma])</p></body></html>
+
+
+
+ -
+
+
+ -
+
+ -
+
+
+ Filter List
+
+
+
+ -
+
+
+ Reset List
+
+
+
+
+
+
+
+ -
+
+ -
+
+
+ Current Selection
+
+
+
+ -
+
+
+
+ 0
+ 0
+
+
+
+
+ 16777215
+ 100
+
+
+
+
+ -
+
+
+ Atrribute Table
+
+
+
+ -
+
+
+
+
+
+ true
+
+
+ 2
+
+
+ true
+
+
+ 200
+
+
+ false
+
+
+ true
+
+
+ true
+
+
+ false
+
+
+ false
+
+
+
+ Attribute
+
+
+
+ 10
+
+
+
+
+ 121
+ 121
+ 121
+
+
+
+
+
+ 91
+ 91
+ 91
+
+
+
+
+
+
+ Value
+
+
+
+ 123
+ 123
+ 123
+
+
+
+
+
+ 91
+ 91
+ 91
+
+
+
+
+
+
+ -
+
+
+ Dataset Information (Memo)
+
+
+
+ -
+
+
+
+ 0
+ 0
+
+
+
+
+ 16777215
+ 100
+
+
+
+
+
+
+
+
+ -
+
+
+ 10
+
+ -
+
+
+ Qt::Horizontal
+
+
+
+ 40
+ 20
+
+
+
+
+ -
+
+
+ OK
+
+
+
+ -
+
+
+ Cancel
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/crikit/io/lazy5/ui/qt_HdfLoad.py b/crikit/io/lazy5/ui/qt_HdfLoad.py
new file mode 100644
index 0000000..3d8be62
--- /dev/null
+++ b/crikit/io/lazy5/ui/qt_HdfLoad.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+
+# Form implementation generated from reading ui file '.\ui_HdfLoad.ui'
+#
+# Created by: PyQt5 UI code generator 5.8.1
+#
+# WARNING! All changes made in this file will be lost!
+
+from PyQt5 import QtCore, QtGui, QtWidgets
+
+class Ui_Dialog(object):
+ def setupUi(self, Dialog):
+ Dialog.setObjectName("Dialog")
+ Dialog.resize(845, 588)
+ Dialog.setStyleSheet("font: 10pt \"Arial\";")
+ self.verticalLayout_2 = QtWidgets.QVBoxLayout(Dialog)
+ self.verticalLayout_2.setObjectName("verticalLayout_2")
+ self.verticalLayout = QtWidgets.QVBoxLayout()
+ self.verticalLayout.setSizeConstraint(QtWidgets.QLayout.SetNoConstraint)
+ self.verticalLayout.setObjectName("verticalLayout")
+ self.topHL = QtWidgets.QHBoxLayout()
+ self.topHL.setSizeConstraint(QtWidgets.QLayout.SetNoConstraint)
+ self.topHL.setObjectName("topHL")
+ self.dataSetVL = QtWidgets.QVBoxLayout()
+ self.dataSetVL.setObjectName("dataSetVL")
+ self.label_1 = QtWidgets.QLabel(Dialog)
+ self.label_1.setObjectName("label_1")
+ self.dataSetVL.addWidget(self.label_1)
+ self.comboBoxGroupSelect = QtWidgets.QComboBox(Dialog)
+ self.comboBoxGroupSelect.setObjectName("comboBoxGroupSelect")
+ self.dataSetVL.addWidget(self.comboBoxGroupSelect)
+ self.label_2 = QtWidgets.QLabel(Dialog)
+ self.label_2.setObjectName("label_2")
+ self.dataSetVL.addWidget(self.label_2)
+ self.listDataSet = QtWidgets.QListWidget(Dialog)
+ self.listDataSet.setEditTriggers(QtWidgets.QAbstractItemView.DoubleClicked|QtWidgets.QAbstractItemView.EditKeyPressed|QtWidgets.QAbstractItemView.SelectedClicked)
+ self.listDataSet.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
+ self.listDataSet.setObjectName("listDataSet")
+ self.dataSetVL.addWidget(self.listDataSet)
+ self.label_3 = QtWidgets.QLabel(Dialog)
+ self.label_3.setObjectName("label_3")
+ self.dataSetVL.addWidget(self.label_3)
+ self.filterIncludeString = QtWidgets.QLineEdit(Dialog)
+ self.filterIncludeString.setObjectName("filterIncludeString")
+ self.dataSetVL.addWidget(self.filterIncludeString)
+ self.label_4 = QtWidgets.QLabel(Dialog)
+ self.label_4.setObjectName("label_4")
+ self.dataSetVL.addWidget(self.label_4)
+ self.filterExcludeString = QtWidgets.QLineEdit(Dialog)
+ self.filterExcludeString.setObjectName("filterExcludeString")
+ self.dataSetVL.addWidget(self.filterExcludeString)
+ self.horizontalLayout = QtWidgets.QHBoxLayout()
+ self.horizontalLayout.setObjectName("horizontalLayout")
+ self.pushButtonFilter = QtWidgets.QPushButton(Dialog)
+ self.pushButtonFilter.setObjectName("pushButtonFilter")
+ self.horizontalLayout.addWidget(self.pushButtonFilter)
+ self.pushButtonResetFilter = QtWidgets.QPushButton(Dialog)
+ self.pushButtonResetFilter.setObjectName("pushButtonResetFilter")
+ self.horizontalLayout.addWidget(self.pushButtonResetFilter)
+ self.dataSetVL.addLayout(self.horizontalLayout)
+ self.topHL.addLayout(self.dataSetVL)
+ self.attribVL = QtWidgets.QVBoxLayout()
+ self.attribVL.setObjectName("attribVL")
+ self.label_5 = QtWidgets.QLabel(Dialog)
+ self.label_5.setObjectName("label_5")
+ self.attribVL.addWidget(self.label_5)
+ self.textCurrentDataset = QtWidgets.QTextBrowser(Dialog)
+ sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum)
+ sizePolicy.setHorizontalStretch(0)
+ sizePolicy.setVerticalStretch(0)
+ sizePolicy.setHeightForWidth(self.textCurrentDataset.sizePolicy().hasHeightForWidth())
+ self.textCurrentDataset.setSizePolicy(sizePolicy)
+ self.textCurrentDataset.setMaximumSize(QtCore.QSize(16777215, 100))
+ self.textCurrentDataset.setObjectName("textCurrentDataset")
+ self.attribVL.addWidget(self.textCurrentDataset)
+ self.label_6 = QtWidgets.QLabel(Dialog)
+ self.label_6.setObjectName("label_6")
+ self.attribVL.addWidget(self.label_6)
+ self.tableAttributes = QtWidgets.QTableWidget(Dialog)
+ self.tableAttributes.setStyleSheet("")
+ self.tableAttributes.setColumnCount(2)
+ self.tableAttributes.setObjectName("tableAttributes")
+ self.tableAttributes.setRowCount(0)
+ item = QtWidgets.QTableWidgetItem()
+ font = QtGui.QFont()
+ font.setPointSize(10)
+ item.setFont(font)
+ item.setBackground(QtGui.QColor(121, 121, 121))
+ brush = QtGui.QBrush(QtGui.QColor(91, 91, 91))
+ brush.setStyle(QtCore.Qt.SolidPattern)
+ item.setForeground(brush)
+ self.tableAttributes.setHorizontalHeaderItem(0, item)
+ item = QtWidgets.QTableWidgetItem()
+ item.setBackground(QtGui.QColor(123, 123, 123))
+ brush = QtGui.QBrush(QtGui.QColor(91, 91, 91))
+ brush.setStyle(QtCore.Qt.SolidPattern)
+ item.setForeground(brush)
+ self.tableAttributes.setHorizontalHeaderItem(1, item)
+ self.tableAttributes.horizontalHeader().setCascadingSectionResizes(True)
+ self.tableAttributes.horizontalHeader().setDefaultSectionSize(200)
+ self.tableAttributes.horizontalHeader().setHighlightSections(False)
+ self.tableAttributes.horizontalHeader().setSortIndicatorShown(True)
+ self.tableAttributes.horizontalHeader().setStretchLastSection(True)
+ self.tableAttributes.verticalHeader().setVisible(False)
+ self.tableAttributes.verticalHeader().setHighlightSections(False)
+ self.attribVL.addWidget(self.tableAttributes)
+ self.label_7 = QtWidgets.QLabel(Dialog)
+ self.label_7.setObjectName("label_7")
+ self.attribVL.addWidget(self.label_7)
+ self.textDescription = QtWidgets.QTextBrowser(Dialog)
+ sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum)
+ sizePolicy.setHorizontalStretch(0)
+ sizePolicy.setVerticalStretch(0)
+ sizePolicy.setHeightForWidth(self.textDescription.sizePolicy().hasHeightForWidth())
+ self.textDescription.setSizePolicy(sizePolicy)
+ self.textDescription.setMaximumSize(QtCore.QSize(16777215, 100))
+ self.textDescription.setObjectName("textDescription")
+ self.attribVL.addWidget(self.textDescription)
+ self.topHL.addLayout(self.attribVL)
+ self.verticalLayout.addLayout(self.topHL)
+ self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
+ self.horizontalLayout_2.setContentsMargins(-1, 10, -1, -1)
+ self.horizontalLayout_2.setObjectName("horizontalLayout_2")
+ spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
+ self.horizontalLayout_2.addItem(spacerItem)
+ self.pushButtonOk = QtWidgets.QPushButton(Dialog)
+ self.pushButtonOk.setObjectName("pushButtonOk")
+ self.horizontalLayout_2.addWidget(self.pushButtonOk)
+ self.pushButtonCancel = QtWidgets.QPushButton(Dialog)
+ self.pushButtonCancel.setObjectName("pushButtonCancel")
+ self.horizontalLayout_2.addWidget(self.pushButtonCancel)
+ self.verticalLayout.addLayout(self.horizontalLayout_2)
+ self.verticalLayout_2.addLayout(self.verticalLayout)
+
+ self.retranslateUi(Dialog)
+ QtCore.QMetaObject.connectSlotsByName(Dialog)
+
+ def retranslateUi(self, Dialog):
+ _translate = QtCore.QCoreApplication.translate
+ Dialog.setWindowTitle(_translate("Dialog", "HDF File Inspector"))
+ self.label_1.setText(_translate("Dialog", "Groups Containing Datasets"))
+ self.label_2.setText(_translate("Dialog", "Datasets"))
+ self.label_3.setText(_translate("Dialog", "
Include Entires with Substring (separate by \',\' [comma])
"))
+ self.label_4.setText(_translate("Dialog", "Exclude Entires with Substring (separate by \',\' [comma])
"))
+ self.pushButtonFilter.setText(_translate("Dialog", "Filter List"))
+ self.pushButtonResetFilter.setText(_translate("Dialog", "Reset List"))
+ self.label_5.setText(_translate("Dialog", "Current Selection"))
+ self.label_6.setText(_translate("Dialog", "Atrribute Table"))
+ self.tableAttributes.setSortingEnabled(True)
+ item = self.tableAttributes.horizontalHeaderItem(0)
+ item.setText(_translate("Dialog", "Attribute"))
+ item = self.tableAttributes.horizontalHeaderItem(1)
+ item.setText(_translate("Dialog", "Value"))
+ self.label_7.setText(_translate("Dialog", "Dataset Information (Memo)"))
+ self.pushButtonOk.setText(_translate("Dialog", "OK"))
+ self.pushButtonCancel.setText(_translate("Dialog", "Cancel"))
+
diff --git a/crikit/io/lazy5/utils.py b/crikit/io/lazy5/utils.py
new file mode 100644
index 0000000..499defa
--- /dev/null
+++ b/crikit/io/lazy5/utils.py
@@ -0,0 +1,93 @@
+""" Utility functions """
+import os as _os
+
+import h5py as _h5py
+import numpy as _np
+
+from .config import DefaultConfig
+_h5py.get_config().complex_names = DefaultConfig().complex_names
+
+__all__ = ['FidOrFile', 'hdf_is_open', 'fullpath']
+
+class FidOrFile:
+ """
+ Class for opening an HDF5 file and returning a file ID (fid) or if passed
+ and already open fid, passing it along (pass-thru). Primarily for enabling
+ functions and methods to operate on open and closed files.
+
+ Parameters
+ ----------
+ file : str or h5py.File
+ Filename or File-object for open HDF5 file
+
+ mode : str
+ If opening a file, open with mode. Available: r,r+,w,w-,x,a
+
+ Attributes
+ ----------
+ is_fid : bool
+ Was the input file actually an fid.
+
+ fid : h5py.File object
+ File ID
+ """
+ def __init__(self, file=None, mode='r'):
+ self.is_fid = None
+ self.fid = None
+ if file is not None:
+ self.return_fid_from_file(file, mode=mode)
+
+ def return_fid_from_file(self, file, mode='r'):
+ """
+ Return an open fid (h5py.File). If provided a string, open file, else
+ pass-thru given fid.
+
+ Parameters
+ ----------
+ file : str or h5py.File
+ Filename or File-object for open HDF5 file
+
+ mode : str
+ If opening a file, open with mode. Available: r,r+,w,w-,x,a
+
+ Returns
+ -------
+ fid : h5py.File object
+ File ID
+
+ """
+ self.is_fid = isinstance(file, _h5py.File)
+ if not self.is_fid:
+ self.fid = _h5py.File(file, mode=mode)
+ else:
+ self.fid = file
+ return self.fid
+
+ def close_if_file_not_fid(self):
+ """ Close the file if originally a filename (not a fid) was passed """
+ if not self.is_fid:
+ return self.fid.close()
+ else:
+ return None
+
+def hdf_is_open(fid):
+ """ Is an HDF file open via fid """
+ # ! New h5py v 2.9.*: id instead of fid
+ try:
+ status = fid.id.valid
+ except AttributeError:
+ status = fid.fid.valid
+
+ if status == 0:
+ return False
+ elif status == 1:
+ return True
+ else:
+ return None
+
+def fullpath(filename, pth=None):
+ """ Return a full path by joining a pth and filename """
+ if not pth:
+ return filename
+ else:
+ return _os.path.join(pth, filename)
\ No newline at end of file
diff --git a/crikit/io/macros.py b/crikit/io/macros.py
index 8c5eb0b..69e1b77 100644
--- a/crikit/io/macros.py
+++ b/crikit/io/macros.py
@@ -3,19 +3,28 @@
@author: chc
"""
-import os as _os
-import lazy5
+import crikit.io.lazy5 as lazy5
from crikit.io.meta_configs import (special_nist_bcars2 as _snb,
special_nist_bcars1_sample_scan as _snb1ss)
from crikit.io.meta_process import meta_process as _meta_process
-from crikit.io.hdf5 import hdf_import_data as _hdf_import_data
+from crikit.io.hdf5 import (hdf_import_data as _hdf_import_data, hdf_import_data_macroraster as _hdf_import_data_macroraster)
from crikit.io.csv_nist import csv_nist_import_data as _csv_nist_import_data
__all__ = ['import_hdf_nist_special', 'import_csv_nist_special1']
+def hdf_nist_special_macroraster(pth, filename, dset_list, output_cls_instance):
+ print('\n')
+ import_success = _hdf_import_data_macroraster(pth, filename, dset_list, output_cls_instance)
+ if import_success is False:
+ raise ValueError('hdf_import_data_macroraster failed')
+ return False
+ _meta_process(_snb(), output_cls_instance)
+ return True
+
+
def import_hdf_nist_special(pth, filename, dset, output_cls_instance):
"""
Import data from HDF File as specified by NIST-specific settings
@@ -25,7 +34,7 @@ def import_hdf_nist_special(pth, filename, dset, output_cls_instance):
Success : bool
Whether import was successful
"""
-
+
print('\n')
import_success = _hdf_import_data(pth, filename, dset, output_cls_instance)
if import_success is False:
@@ -34,6 +43,7 @@ def import_hdf_nist_special(pth, filename, dset, output_cls_instance):
_meta_process(_snb(), output_cls_instance)
return True
+
def import_hdf_nist_special_ooc(pth, filename, dset, output_cls_instance):
"""
Import data from HDF File (OUT-OF-CORE) as specified by NIST-specific settings
@@ -43,7 +53,7 @@ def import_hdf_nist_special_ooc(pth, filename, dset, output_cls_instance):
Success : bool
Whether import was successful
"""
-
+
print('\n')
try:
@@ -51,12 +61,13 @@ def import_hdf_nist_special_ooc(pth, filename, dset, output_cls_instance):
output_cls_instance._data = fid[dset]
output_cls_instance.meta = lazy5.inspect.get_attrs_dset(fid, dset)
_meta_process(_snb(), output_cls_instance)
- except:
+ except Exception:
raise ValueError('hdf_import_data failed')
return False
else:
- return fid
-
+ return fid
+
+
def import_csv_nist_special1(pth, filename_header, filename_data,
output_cls_instance):
"""
@@ -74,31 +85,31 @@ def import_csv_nist_special1(pth, filename_header, filename_data,
if import_success is None or import_success is False:
raise ValueError('csv_import_data returned None')
_meta_process(_snb1ss(), output_cls_instance)
- except:
+ except Exception:
print('Something failed in import_csv_nist_special')
return False
else:
return True
+
if __name__ == '__main__': # pragma: no cover
- from crikit.data.hsi import Hsi as _Hsi
+ from crikit.data.spectra import Hsi as _Hsi
pth = '../'
filename = 'mP2_w_small.h5'
img = _Hsi()
- import_hdf_nist_special(pth, filename,'/BCARSImage/mP2_3_5ms_Pos_2_0/mP2_3_5ms_Pos_2_0_small',img)
+ import_hdf_nist_special(pth, filename, '/BCARSImage/mP2_3_5ms_Pos_2_0/mP2_3_5ms_Pos_2_0_small', img)
print('Shape of img: {}'.format(img.shape))
print('Shape of img.mean(): {}'.format(img.mean().shape))
print(img.y_rep.data)
-
- # from crikit.data.spectrum import Spectrum as _Spectrum
-
+
+ # from crikit.data.spectra import Spectrum as _Spectrum
+
# sp = _Spectrum()
# pth = '../../../Young_150617/'
# filename_header = 'SH-03.h'
# filename_data = 'base061715_152213_60ms.txt'
-
+
# import_csv_nist_special1(pth, filename_header, filename_data,
# output_cls_instance=sp)
-
\ No newline at end of file
diff --git a/crikit/io/meta_configs.py b/crikit/io/meta_configs.py
index e64982a..6af213f 100644
--- a/crikit/io/meta_configs.py
+++ b/crikit/io/meta_configs.py
@@ -9,50 +9,47 @@
@author: chc
"""
+
def special_nist_bcars2():
"""
Return import attributes particular to the "BCARS 2" system at NIST
"""
rosetta = {}
- rosetta['XPixelSize'] = ['RasterScanParams.FastAxisStepSize',
- 'Raster.Fast.StepSize','!',1]
- rosetta['XStart'] = ['RasterScanParams.FastAxisStart', 'Raster.Fast.Start']
- rosetta['XStop'] = ['RasterScanParams.FastAxisStop', 'Raster.Fast.Stop']
- rosetta['XLength'] = ['RasterScanParams.FastAxisSteps', 'Raster.Fast.Steps']
- rosetta['XLabel'] = ['RasterScanParams.FastAxis','Raster.Fast.Axis','!','X']
- rosetta['XUnits'] = ['RasterScanParams.FastAxisUnits','!','$\\mu$m']
-
- rosetta['YPixelSize'] = ['RasterScanParams.SlowAxisStepSize',
- 'Raster.Slow.StepSize','!',1]
- rosetta['YStart'] = ['RasterScanParams.SlowAxisStart', 'Raster.Slow.Start']
- rosetta['YStop'] = ['RasterScanParams.SlowAxisStop', 'Raster.Slow.Stop']
- rosetta['YLength'] = ['RasterScanParams.SlowAxisSteps', 'Raster.Slow.Steps']
- rosetta['YLabel'] = ['RasterScanParams.SlowAxis','Raster.Slow.Axis','!','Y']
- rosetta['YUnits'] = ['RasterScanParams.SlowAxisUnits','!','$\\mu$m']
-
- rosetta['ZPosition'] = ['RasterScanParams.FixedAxisPosition',
- 'Raster.Stack.Position','!',0]
- rosetta['ZLabel'] = ['RasterScanParams.FixedAxis', 'Raster.Stack.Axis','!','Z']
+ rosetta['XPixelSize'] = ['Macro.Raster.Fast.StepSize', 'RasterScanParams.FastAxisStepSize',
+ 'Raster.Fast.StepSize', '!', 1]
+ rosetta['XStart'] = ['Macro.Raster.Fast.Start', 'RasterScanParams.FastAxisStart', 'Raster.Fast.Start']
+ rosetta['XStop'] = ['Macro.Raster.Fast.Stop', 'RasterScanParams.FastAxisStop', 'Raster.Fast.Stop']
+ rosetta['XLength'] = ['Macro.Raster.Fast.Steps', 'RasterScanParams.FastAxisSteps', 'Raster.Fast.Steps']
+ rosetta['XLabel'] = ['Macro.Raster.Fast.Axis', 'RasterScanParams.FastAxis', 'Raster.Fast.Axis', '!', 'X']
+ rosetta['XUnits'] = ['MicroStage.raster.fast.units', 'RasterScanParams.FastAxisUnits', '!', '$\\mu$m']
+
+ rosetta['YPixelSize'] = ['Macro.Raster.Slow.StepSize', 'RasterScanParams.SlowAxisStepSize', 'Raster.Slow.StepSize', '!', 1]
+ rosetta['YStart'] = ['Macro.Raster.Slow.Start', 'RasterScanParams.SlowAxisStart', 'Raster.Slow.Start']
+ rosetta['YStop'] = ['Macro.Raster.Slow.Stop', 'RasterScanParams.SlowAxisStop', 'Raster.Slow.Stop']
+ rosetta['YLength'] = ['Macro.Raster.Slow.Steps', 'RasterScanParams.SlowAxisSteps', 'Raster.Slow.Steps']
+ rosetta['YLabel'] = ['Macro.Raster.Slow.Axis', 'RasterScanParams.SlowAxis', 'Raster.Slow.Axis', '!', 'Y']
+ rosetta['YUnits'] = ['MicroStage.raster.slow.units', 'RasterScanParams.SlowAxisUnits', '!', '$\\mu$m']
+
+ # TODO: Set an actual Z fixed position for Macro raster scan
+ rosetta['ZPosition'] = ['Macro.Raster.Fixed.Start', 'Raster.Fixed.Position','RasterScanParams.FixedAxisPosition',
+ 'Raster.Stack.Position', '!', 0]
+ rosetta['ZLabel'] = ['Macro.Raster.Fixed.Axis', 'RasterScanParams.FixedAxis', 'Raster.Fixed.Axis', 'Raster.Stack.Axis', '!', 'Z']
# Color Calibration
- rosetta['ColorCenterWL'] = ['Spectro.CenterWavelength',
- 'Spectro.CurrentWavelength', 'Calib.ctr_wl',
- '!', 729.994]
+ rosetta['ColorCenterWL'] = ['Spectrometer.calib.ctr_wl', 'Spectro.CenterWavelength', 'Spectro.CurrentWavelength', 'Calib.ctr_wl', '!', 729.994]
rosetta['ColorUnits'] = ['Calib.units', '!', 'nm']
- rosetta['ColorChannels'] = ['Calib.n_pix', 'Spectro.SpectralPixels','!', 1600]
- rosetta['ColorCalibWL'] = ['Calib.ctr_wl0', 'Spectro.CalibWavelength', '!', 729.994]
- rosetta['ColorPolyVals'] = ['Calib.a_vec', 'Spectro.Avec', '!', [-0.167740721307557,
- 863.8736708961577]]
+ rosetta['ColorChannels'] = ['Spectrometer.calib.n_pix', 'Calib.n_pix', 'Spectro.SpectralPixels', '!', 1600]
+ rosetta['ColorCalibWL'] = ['Spectrometer.calib.ctr_wl0', 'Calib.ctr_wl0', 'Spectro.CalibWavelength', '!', 729.994]
+ rosetta['ColorPolyVals'] = ['Spectrometer.calib.a_vec', 'Calib.a_vec', 'Spectro.Avec', '!', [-0.167740721307557, 863.8736708961577]]
- rosetta['ColorProbe'] = ['Calib.probe', 'Spectro.ProbeWavelength', '!', 771.461]
+ rosetta['ColorProbe'] = ['Spectrometer.calib.probe', 'Calib.probe', 'Spectro.ProbeWavelength', '!', 771.461]
rosetta['ColorWnMode'] = ['!', True]
# Color Calibration Original
rosetta['OrigColorCenterWL'] = ['CalibOrig.ctr_wl',
'Spectro.CenterWavelength',
- 'Spectro.CurrentWavelength',
- ]
+ 'Spectro.CurrentWavelength']
rosetta['OrigColorUnits'] = ['CalibOrig.units']
rosetta['OrigColorChannels'] = ['CalibOrig.n_pix']
rosetta['OrigColorCalibWL'] = ['CalibOrig.ctr_wl']
@@ -64,6 +61,7 @@ def special_nist_bcars2():
return rosetta
+
def special_nist_bcars1_sample_scan():
"""
Return import attributes particular to the "BCARS 1" system at NIST
@@ -88,7 +86,7 @@ def special_nist_bcars1_sample_scan():
rosetta['ZLabel'] = 'RasterScanParams.FixedAxis'
rosetta['ColorCenterWL'] = ['Frequency Calibration.CenterWavelength', '!', 696.831]
- rosetta['ColorUnits'] = ['!','nm']
+ rosetta['ColorUnits'] = ['!', 'nm']
rosetta['ColorChannels'] = ['Frequency Calibration.freq index length', '!', 512]
rosetta['ColorCalibWL'] = ['Frequency Calibration.CenterWavelength', '!', 696.831]
@@ -99,8 +97,8 @@ def special_nist_bcars1_sample_scan():
rosetta['ColorPolyVals'] = ['Frequency Calibration.Polyvals', '!',
[-0.50418919, 825.651318]]
- rosetta['ColorProbe'] = ['Frequency Calibration.probe(nm)','!', 830.0]
+ rosetta['ColorProbe'] = ['Frequency Calibration.probe(nm)', '!', 830.0]
rosetta['ColorWnMode'] = ['!', True]
- # rosetta['ColorCalibWN'] = ['Processing.WNCalib','Processing.WNCalibOrig']
+ # rosetta['ColorCalibWN'] = ['Processing.WNCalib', 'Processing.WNCalibOrig']
- return rosetta
\ No newline at end of file
+ return rosetta
diff --git a/crikit/io/meta_process.py b/crikit/io/meta_process.py
index 8d55af3..a4db4a0 100644
--- a/crikit/io/meta_process.py
+++ b/crikit/io/meta_process.py
@@ -7,9 +7,9 @@
from crikit.data.frequency import (calib_pix_wn as _calib_pix_wn,
calib_pix_wl as _calib_pix_wl)
-from crikit.data.spectrum import Spectrum as _Spectrum
+from crikit.data.spectra import Spectrum as _Spectrum
from crikit.data.spectra import Spectra as _Spectra
-from crikit.data.hsi import Hsi as _Hsi
+from crikit.data.spectra import Hsi as _Hsi
import numpy as _np
@@ -38,7 +38,7 @@ def rosetta_query(key, rosetta, output_cls_instance):
break
else:
pass
- except:
+ except Exception:
temp_val = None
temp_key = None
@@ -50,7 +50,7 @@ def rosetta_query(key, rosetta, output_cls_instance):
elif isinstance(rosetta[key],str):
try:
temp = output_cls_instance._meta[rosetta[key]]
- except:
+ except Exception:
return None
else:
return (temp, rosetta[key])
@@ -72,7 +72,7 @@ def meta_process(rosetta, output_cls_instance):
calib_dict['a_vec'] = temp[0]
del temp
- temp = rosetta_query('ColorChannels',rosetta, output_cls_instance)
+ temp = rosetta_query('ColorChannels', rosetta, output_cls_instance)
print('Color/Frequency-Channels: {} from {}'.format(temp[0], temp[1]))
if temp[0] != output_cls_instance.shape[-1]:
print('WARNING: Number of color channels assigned in meta data ({}) disagrees with datacube size ({})'.format(temp[0], output_cls_instance.shape[-1]))
@@ -136,7 +136,7 @@ def meta_process(rosetta, output_cls_instance):
calib_orig_dict['probe'] = rosetta_query('OrigColorProbe',rosetta, output_cls_instance)[0]
calib_orig_dict['units'] = rosetta_query('OrigColorUnits',rosetta, output_cls_instance)[0]
- except:
+ except Exception:
print('Original calibration not found.')
else:
print('Original calibration found.')
@@ -256,7 +256,7 @@ def meta_process(rosetta, output_cls_instance):
output_cls_instance.reps.data = _np.arange(output_cls_instance.data.shape[0])
# print(output_cls_instance.reps.data.shape)
output_cls_instance.reps.update_calib_from_data()
- except:
+ except Exception:
print('Something failed in meta_process: Spectra rep-calib')
elif type(output_cls_instance) == _Spectrum:
diff --git a/crikit/io/tests/test_hdf5.py b/crikit/io/tests/test_hdf5.py
index f0c7acd..5600d1c 100644
--- a/crikit/io/tests/test_hdf5.py
+++ b/crikit/io/tests/test_hdf5.py
@@ -4,12 +4,12 @@
import pytest
import h5py
-import lazy5
+import crikit.io.lazy5 as lazy5
from crikit.io.hdf5 import hdf_import_data
-from crikit.data.spectrum import Spectrum
+from crikit.data.spectra import Spectrum
from crikit.data.spectra import Spectra
-from crikit.data.hsi import Hsi
+from crikit.data.spectra import Hsi
@pytest.fixture(scope="module")
def hdf_dataset():
diff --git a/crikit/measurement/peakfind.py b/crikit/measurement/peakfind.py
index 0c4e1cd..c8d448a 100644
--- a/crikit/measurement/peakfind.py
+++ b/crikit/measurement/peakfind.py
@@ -129,7 +129,7 @@ def cwt_diff(signal, wv_width, order=1, method='auto'):
try:
deriv = _convolve(deriv, PeakFinder.haar(wv_width), mode='same',
method=method)
- except:
+ except Exception:
print('peakfind.py | cwt_diff: Likely using an old version of SciPy (no convolve method parameter)')
deriv = _convolve(deriv, PeakFinder.haar(wv_width), mode='same')
return deriv
@@ -185,7 +185,7 @@ def calculate(self, y, x=None, recalc_cutoff=True, method='auto'):
amps_retr = []
for l, s in zip(omegas_retr_locs, sigma_retr_locs):
- dl = _np.abs(_np.ceil((l - s)/10)).astype(_np.integer)
+ dl = _np.abs(_np.ceil((l - s)/10)).astype(_np.int32)
amps_retr.append(_np.median(y[l-dl:l+dl+1]))
amps_retr = _np.array(amps_retr)
diff --git a/crikit/measurement/tests/test_measurement.py b/crikit/measurement/tests/test_measurement.py
index b899a9a..34976be 100644
--- a/crikit/measurement/tests/test_measurement.py
+++ b/crikit/measurement/tests/test_measurement.py
@@ -3,7 +3,7 @@
import pytest
-from crikit.data.hsi import Hsi
+from crikit.data.spectra import Hsi
from crikit.measurement.peakamps import (MeasurePeak, MeasurePeakAdd, MeasurePeakMinus,
MeasurePeakMultiply, MeasurePeakDivide,
MeasurePeakMax, MeasurePeakMin,
diff --git a/crikit/measurement/tests/test_measurements_complex.py b/crikit/measurement/tests/test_measurements_complex.py
index 2974dbe..d2f74cc 100644
--- a/crikit/measurement/tests/test_measurements_complex.py
+++ b/crikit/measurement/tests/test_measurements_complex.py
@@ -3,7 +3,7 @@
import pytest
-from crikit.data.hsi import Hsi
+from crikit.data.spectra import Hsi
from crikit.measurement.peakamps import (MeasurePeak, MeasurePeakAdd, MeasurePeakMinus,
MeasurePeakMultiply, MeasurePeakDivide,
MeasurePeakMax, MeasurePeakMin,
diff --git a/crikit/preprocess/algorithms/abstract_als.py b/crikit/preprocess/algorithms/abstract_als.py
index 96edfd2..15fe796 100644
--- a/crikit/preprocess/algorithms/abstract_als.py
+++ b/crikit/preprocess/algorithms/abstract_als.py
@@ -63,7 +63,7 @@ def calculate(self, signal):
# Dummy indep variable
x = _np.arange(self.rng.size)
x_sub = _np.linspace(x[0], x[-1], _np.round(x.size /
- self.redux).astype(_np.integer))
+ self.redux).astype(_np.int32))
self.redux_sig_shape = list(self.full_sig_shape)
self.redux_sig_shape[-1] = x_sub.size
self.redux_sig_spectral_size = self.redux_sig_shape[-1]
@@ -82,7 +82,7 @@ def calculate(self, signal):
# Spline interpolation/super-sampling
for coords in _np.ndindex(output_sampled.shape[0:-1]):
spl2 = _USpline(x_sub,output_sampled[coords],s=0)
- output[[*coords, self.rng]] = spl2(x)
+ output[(*coords, self.rng)] = spl2(x)
tmr -= _timeit.default_timer()
self.t = -tmr
@@ -99,5 +99,5 @@ def fix_rng(self):
return self._fix_rng
else:
redux_fix_rng = self._fix_rng / self.redux
- redux_fix_rng = _np.unique(redux_fix_rng).astype(_np.integer)
+ redux_fix_rng = _np.unique(redux_fix_rng).astype(_np.int32)
return redux_fix_rng
diff --git a/crikit/preprocess/algorithms/als.py b/crikit/preprocess/algorithms/als.py
index abbe12c..fda37bd 100644
--- a/crikit/preprocess/algorithms/als.py
+++ b/crikit/preprocess/algorithms/als.py
@@ -88,7 +88,7 @@ def asym_param(self):
elif self.redux > 1:
x = _np.arange(self.rng.size)
x_sub = _np.linspace(x[0], x[-1], _np.round(x.size /
- self.redux).astype(_np.integer))
+ self.redux).astype(_np.int32))
spl = _USpline(x,self._asym_param[self.rng],s=0)
return spl(x_sub)
@@ -159,7 +159,7 @@ def _calc(self, signal):
# Solve A * baseline_current = w_sp * Signal
_cholmod.linsolve(minimazation_matrix,x,uplo='U')
- except:
+ except Exception:
print('Failure in Cholesky factorization')
break
else:
diff --git a/crikit/preprocess/crop.py b/crikit/preprocess/crop.py
index 62dc856..2bd07a2 100644
--- a/crikit/preprocess/crop.py
+++ b/crikit/preprocess/crop.py
@@ -32,7 +32,7 @@ def _calc(self, data, ret_obj):
self.zero_col = _np.nonzero(row_sums)[0][self.fol]
ret_obj[:, self.zero_col, :] *= 0
- except:
+ except Exception:
return False
else:
return True
@@ -74,7 +74,7 @@ def _calc(self, data, ret_obj):
self.zero_row = _np.nonzero(col_sums)[0][self.fol]
ret_obj[self.zero_row, :, :] *= 0
- except:
+ except Exception:
return False
else:
return True
diff --git a/crikit/preprocess/denoise.py b/crikit/preprocess/denoise.py
index 4a28ba9..a631d57 100644
--- a/crikit/preprocess/denoise.py
+++ b/crikit/preprocess/denoise.py
@@ -75,7 +75,7 @@ def _calc(self, data, ret_obj):
else:
self._U, self._s, self._Vh = _svd(data[..., self.rng],
full_matrices=False)
- except:
+ except Exception:
return False
else:
return True
@@ -180,7 +180,7 @@ def _calc(self, U, s, Vh, ret_obj):
# shp)
ret_obj[..., self.rng] += _np.reshape(_np.dot(U[:, self.svs], _np.dot(_np.diag(s[self.svs]),Vh[self.svs, :])), shp)
- except:
+ except Exception:
return False
else:
return True
diff --git a/crikit/preprocess/standardize.py b/crikit/preprocess/standardize.py
index d3193e1..e099279 100644
--- a/crikit/preprocess/standardize.py
+++ b/crikit/preprocess/standardize.py
@@ -156,7 +156,7 @@ def _calc(self, data, ret_obj):
try:
ret_obj *= 0
ret_obj[..., self.rng] = out
- except:
+ except Exception:
return False
else:
return True
@@ -283,7 +283,7 @@ def _calc(self, data, ret_obj):
try:
ret_obj *= 0
ret_obj[..., self.rng] = out
- except:
+ except Exception:
return False
else:
return True
@@ -330,7 +330,7 @@ def calculate(self, data):
if __name__ == '__main__': # pragma: no cover
- from crikit.data.spectrum import Spectrum as _Spectrum
+ from crikit.data.spectra import Spectrum as _Spectrum
stddev = 20
gain = 1
diff --git a/crikit/preprocess/subtract_baseline.py b/crikit/preprocess/subtract_baseline.py
index fa99f33..c749e8d 100644
--- a/crikit/preprocess/subtract_baseline.py
+++ b/crikit/preprocess/subtract_baseline.py
@@ -83,7 +83,7 @@ def _calc(self, data, ret_obj, **kwargs):
# ret_obj[idx] -= self._inst_als.calculate(data[idx].real)
# else:
ret_obj[idx] -= self._inst_als.calculate(data[idx].real)
- except:
+ except Exception:
return False
else:
# print(self._inst_als.__dict__)
diff --git a/crikit/preprocess/subtract_dark.py b/crikit/preprocess/subtract_dark.py
index 6db990c..25cfeff 100644
--- a/crikit/preprocess/subtract_dark.py
+++ b/crikit/preprocess/subtract_dark.py
@@ -7,9 +7,9 @@
import numpy as _np
import copy as _copy
-from crikit.data.spectrum import Spectrum as _Spectrum
+from crikit.data.spectra import Spectrum as _Spectrum
from crikit.data.spectra import Spectra as _Spectra
-from crikit.data.hsi import Hsi as _Hsi
+from crikit.data.spectra import Hsi as _Hsi
from crikit.utils.general import (expand_1d_to_ndim as _expand_1d_to_ndim,
mean_nd_to_1d as _mean_nd_to_1d)
diff --git a/crikit/preprocess/subtract_mean.py b/crikit/preprocess/subtract_mean.py
index 762d85a..86fa73b 100644
--- a/crikit/preprocess/subtract_mean.py
+++ b/crikit/preprocess/subtract_mean.py
@@ -25,7 +25,7 @@ def _calc(self, data, ret_obj):
try:
ret_obj -= meaner[..., None]
- except:
+ except Exception:
return False
else:
return True
@@ -75,9 +75,9 @@ def calculate(self, data):
if __name__ == '__main__': # pragma: no cover
- from crikit.data.spectrum import Spectrum as _Spectrum
+ from crikit.data.spectra import Spectrum as _Spectrum
from crikit.data.spectra import Spectra as _Spectra
- from crikit.data.hsi import Hsi as _Hsi
+ from crikit.data.spectra import Hsi as _Hsi
diff --git a/crikit/preprocess/tests/test_subtract_dark.py b/crikit/preprocess/tests/test_subtract_dark.py
index a560d74..069c188 100644
--- a/crikit/preprocess/tests/test_subtract_dark.py
+++ b/crikit/preprocess/tests/test_subtract_dark.py
@@ -234,8 +234,8 @@ def test_sub_hsi_int_from_hsi_float(make_datasets):
""" Dark is HSI. Data is HSI. """
spectrum, spectra, hsi, shape = make_datasets
- data = (1*hsi).astype(np.float)
- dark = (1*hsi).astype(np.int)
+ data = (1*hsi).astype(float)
+ dark = (1*hsi).astype(int)
subdark = SubtractDark(dark)
@@ -250,7 +250,7 @@ def test_sub_hsi_int_from_hsi_float(make_datasets):
np.testing.assert_array_almost_equal(out, data - dark.mean(axis=(0,1))[None,None,:])
# Transform
- dark = (1*hsi).astype(np.int)
+ dark = (1*hsi).astype(int)
subdark = SubtractDark(dark)
success = subdark.transform(data)
np.testing.assert_array_almost_equal(data, out)
@@ -268,59 +268,59 @@ def test_transform_incompatible_dtypes(make_datasets):
spectrum, spectra, hsi, shape = make_datasets
# DOES NOT RAISE ERROR
- data = (1*hsi).astype(np.int)
- dark = (1*spectrum).astype(np.int)
+ data = (1*hsi).astype(int)
+ dark = (1*spectrum).astype(int)
subdark = SubtractDark(dark)
subdark.transform(data)
# DOES NOT RAISE ERROR
- data = (1*hsi).astype(np.float)
- dark = (1*spectrum).astype(np.int)
+ data = (1*hsi).astype(float)
+ dark = (1*spectrum).astype(int)
subdark = SubtractDark(dark)
subdark.transform(data)
# DOES NOT RAISE ERROR
- data = (1*hsi).astype(np.complex)
- dark = (1*spectrum).astype(np.int)
+ data = (1*hsi).astype(complex)
+ dark = (1*spectrum).astype(int)
subdark = SubtractDark(dark)
subdark.transform(data)
# DOES RAISE ERROR
- data = (1*hsi).astype(np.int)
- dark = (1*spectrum).astype(np.float)
+ data = (1*hsi).astype(int)
+ dark = (1*spectrum).astype(float)
subdark = SubtractDark(dark)
with pytest.raises(TypeError):
subdark.transform(data)
# DOES NOT RAISE ERROR
- data = (1*hsi).astype(np.float)
- dark = (1*spectrum).astype(np.float)
+ data = (1*hsi).astype(float)
+ dark = (1*spectrum).astype(float)
subdark = SubtractDark(dark)
subdark.transform(data)
# DOES NOT RAISE ERROR
- data = (1*hsi).astype(np.complex)
- dark = (1*spectrum).astype(np.float)
+ data = (1*hsi).astype(complex)
+ dark = (1*spectrum).astype(float)
subdark = SubtractDark(dark)
subdark.transform(data)
# DOES RAISE ERROR
- data = (1*hsi).astype(np.int)
- dark = (1*spectrum).astype(np.complex)
+ data = (1*hsi).astype(int)
+ dark = (1*spectrum).astype(complex)
subdark = SubtractDark(dark)
with pytest.raises(TypeError):
subdark.transform(data)
# DOES RAISE ERROR
- data = (1*hsi).astype(np.float)
- dark = (1*spectrum).astype(np.complex)
+ data = (1*hsi).astype(float)
+ dark = (1*spectrum).astype(complex)
subdark = SubtractDark(dark)
with pytest.raises(TypeError):
subdark.transform(data)
# DOES NOT RAISE ERROR
- data = (1*hsi).astype(np.complex)
- dark = (1*spectrum).astype(np.complex)
+ data = (1*hsi).astype(complex)
+ dark = (1*spectrum).astype(complex)
subdark = SubtractDark(dark)
subdark.transform(data)
diff --git a/crikit/ui/classes_ui.py b/crikit/ui/classes_ui.py
index b8c2bac..d25f520 100644
--- a/crikit/ui/classes_ui.py
+++ b/crikit/ui/classes_ui.py
@@ -80,7 +80,7 @@ def std(self):
try:
temp = self.grayscaleimage.std()
return temp
- except:
+ except Exception:
return None
@property
@@ -88,7 +88,7 @@ def mean(self):
try:
temp = self.grayscaleimage.mean()
return temp
- except:
+ except Exception:
return None
@property
@@ -119,7 +119,7 @@ def grayscaleimage(self, value):
else:
pass
- except:
+ except Exception:
print('Set grayscaleimage error')
@grayscaleimage.deleter
diff --git a/crikit/ui/dialog_AbstractFactorization.py b/crikit/ui/dialog_AbstractFactorization.py
index 9d070bf..f85e4e6 100644
--- a/crikit/ui/dialog_AbstractFactorization.py
+++ b/crikit/ui/dialog_AbstractFactorization.py
@@ -144,7 +144,7 @@ def applyCheckBoxes(self):
else:
try:
self.selected_factors.remove(self._first_factor_visible+count)
- except:
+ except Exception:
pass
#print('Self.S: {}'.format(self.svddata.S[0:3]))
diff --git a/crikit/ui/dialog_SVD.py b/crikit/ui/dialog_SVD.py
index a0b1c25..43934b6 100644
--- a/crikit/ui/dialog_SVD.py
+++ b/crikit/ui/dialog_SVD.py
@@ -75,7 +75,7 @@ def max_factors(self):
"""
try:
return self.s.size
- except:
+ except Exception:
return None
def combiner(self, selections=None):
diff --git a/crikit/ui/dialog_kkOptions.py b/crikit/ui/dialog_kkOptions.py
index 4b249bb..65067a0 100644
--- a/crikit/ui/dialog_kkOptions.py
+++ b/crikit/ui/dialog_kkOptions.py
@@ -142,7 +142,7 @@ def dialogKKOptions(parent=None, data=None, conjugate=False):
# winDark = DialogDarkOptions.dialogDarkOptions(darkloaded=True)
- from crikit.data.hsi import Hsi as _Hsi
+ from crikit.data.spectra import Hsi as _Hsi
temp = _Hsi()
diff --git a/crikit/ui/dialog_ploteffect.py b/crikit/ui/dialog_ploteffect.py
index 0f6dc7f..8c05663 100644
--- a/crikit/ui/dialog_ploteffect.py
+++ b/crikit/ui/dialog_ploteffect.py
@@ -141,7 +141,7 @@ def make_plots(self, canvas, data, lw=1, ls='-'):
self.plot_labels()
try:
canvas.fig.tight_layout()
- except:
+ except Exception:
print('Tight layout failed (dialog_ploteffect')
diff --git a/crikit/ui/main_Mosaic.py b/crikit/ui/main_Mosaic.py
index 90de4d4..434bb64 100644
--- a/crikit/ui/main_Mosaic.py
+++ b/crikit/ui/main_Mosaic.py
@@ -30,9 +30,9 @@
from sciplot.ui.widget_mpl import MplCanvas as _MplCanvas
-import lazy5
-from lazy5.utils import FidOrFile, fullpath
-from lazy5.ui.QtHdfLoad import HdfLoad
+import crikit.io.lazy5 as lazy5
+from crikit.io.lazy5.utils import FidOrFile, fullpath
+from crikit.io.lazy5.ui.QtHdfLoad import HdfLoad
class DnDReorderListWidget(_QListWidget):
diff --git a/crikit/ui/qt5/make_uis.bat b/crikit/ui/qt5/make_uis.bat
new file mode 100644
index 0000000..24db8e5
--- /dev/null
+++ b/crikit/ui/qt5/make_uis.bat
@@ -0,0 +1 @@
+call pyuic5.bat ./crikit/ui/qt5/ui_CRIkit.ui -o ./crikit/ui/qt_CRIkit.py --from-imports
\ No newline at end of file
diff --git a/crikit/ui/qt5/ui_CRIkit.ui b/crikit/ui/qt5/ui_CRIkit.ui
index b64986e..6a4567d 100644
--- a/crikit/ui/qt5/ui_CRIkit.ui
+++ b/crikit/ui/qt5/ui_CRIkit.ui
@@ -396,7 +396,7 @@ This will be saved to the processed file.
0
0
1200
- 19
+ 20
diff --git a/crikit/ui/qt_CRIkit.py b/crikit/ui/qt_CRIkit.py
index 4175e9a..5fb1041 100644
--- a/crikit/ui/qt_CRIkit.py
+++ b/crikit/ui/qt_CRIkit.py
@@ -1,15 +1,13 @@
# -*- coding: utf-8 -*-
-# Form implementation generated from reading ui file '.\ui_CRIkit.ui'
+# Form implementation generated from reading ui file './crikit/ui/qt5/ui_CRIkit.ui'
#
-# Created by: PyQt5 UI code generator 5.13.0
+# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
-
from PyQt5 import QtCore, QtGui, QtWidgets
-
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
@@ -187,7 +185,7 @@ def setupUi(self, MainWindow):
self.gridLayout_7.addLayout(self.verticalLayout_3, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
- self.menubar.setGeometry(QtCore.QRect(0, 0, 1200, 19))
+ self.menubar.setGeometry(QtCore.QRect(0, 0, 1200, 20))
self.menubar.setObjectName("menubar")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
@@ -542,8 +540,12 @@ def setupUi(self, MainWindow):
self.actionAutoCropDarkSpectra.setObjectName("actionAutoCropDarkSpectra")
self.actionAutoCropNRBSpectra = QtWidgets.QAction(MainWindow)
self.actionAutoCropNRBSpectra.setObjectName("actionAutoCropNRBSpectra")
+ self.actionOpen_HDF_Macro_Raster_NIST = QtWidgets.QAction(MainWindow)
+ self.actionOpen_HDF_Macro_Raster_NIST.setIcon(icon)
+ self.actionOpen_HDF_Macro_Raster_NIST.setObjectName("actionOpen_HDF_Macro_Raster_NIST")
self.menuFile.addAction(self.actionOpenHDFNIST)
self.menuFile.addAction(self.actionOpenDLMNIST)
+ self.menuFile.addAction(self.actionOpen_HDF_Macro_Raster_NIST)
self.menuFile.addAction(self.actionSave)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionOpenHDFNISTOOC)
@@ -770,4 +772,6 @@ def retranslateUi(self, MainWindow):
self.actionCropNRBSpectra.setText(_translate("MainWindow", "Crop NRB Spectra"))
self.actionAutoCropDarkSpectra.setText(_translate("MainWindow", "Auto-Crop Dark Spectra"))
self.actionAutoCropNRBSpectra.setText(_translate("MainWindow", "Auto-Crop NRB Spectra"))
+ self.actionOpen_HDF_Macro_Raster_NIST.setText(_translate("MainWindow", "Open HDF Macro Raster (NIST)..."))
+
from . import icons_all_rc
diff --git a/crikit/ui/utils/check_requirements.py b/crikit/ui/utils/check_requirements.py
index d5b5f7e..8322be2 100644
--- a/crikit/ui/utils/check_requirements.py
+++ b/crikit/ui/utils/check_requirements.py
@@ -16,8 +16,7 @@ def check_requirements():
'matplotlib':None,
'scipy':None,
'sciplot': '0.2.2',
- 'cvxopt':None,
- 'lazy5':'0.2.2'}
+ 'cvxopt':None}
output = []
for r in requirement_dict:
diff --git a/crikit/ui/widget_DeTrending.py b/crikit/ui/widget_DeTrending.py
index 662ac3a..355d312 100644
--- a/crikit/ui/widget_DeTrending.py
+++ b/crikit/ui/widget_DeTrending.py
@@ -23,7 +23,7 @@
# Try to add arPLS to detrend options
try:
from crikit.ui.widget_ArPLS import widgetArPLS
-except:
+except Exception:
pass
else:
_widget_list_names.append('ArPLS')
@@ -72,13 +72,13 @@ def __init__(self, parent = None):
# to this container's version of the change-signal
try:
wdgt.changed.connect(self.widgetOptionsChanged)
- except:
+ except Exception:
pass
else:
wdgt.setVisible(False)
try:
wdgt.changed.disconnect(self.widgetOptionsChanged)
- except:
+ except Exception:
pass
# SIGNALS & SLOTS
@@ -113,7 +113,7 @@ def changeWidget(self, buttonId):
# to this container's version of the change-signal
try:
wdgt.changed.connect(self.widgetOptionsChanged)
- except:
+ except Exception:
pass
else:
wdgt.setVisible(False)
@@ -121,7 +121,7 @@ def changeWidget(self, buttonId):
# Disconnect non-active widget's changed-signal
try:
wdgt.changed.disconnect(self.widgetOptionsChanged)
- except:
+ except Exception:
pass
self.changed.emit()
diff --git a/crikit/ui/widget_KK.py b/crikit/ui/widget_KK.py
index 17e5c62..3face4d 100644
--- a/crikit/ui/widget_KK.py
+++ b/crikit/ui/widget_KK.py
@@ -136,7 +136,7 @@ def fcn(self, data_in):
nrb = data_in[0]
cars = data_in[1]
- data_out = _np.zeros(cars.shape, dtype=_np.complex)
+ data_out = _np.zeros(cars.shape, dtype=complex)
cars_amp_offset = self.parameters['cars_amp_offset']
nrb_amp_offset = self.parameters['nrb_amp_offset']
diff --git a/crikit/ui/widget_images.py b/crikit/ui/widget_images.py
index e7679f4..dc86a55 100644
--- a/crikit/ui/widget_images.py
+++ b/crikit/ui/widget_images.py
@@ -246,7 +246,7 @@ def __init__(self, parent = None, **kwargs):
cmap=self.colormode.ui.comboBoxColormap.currentText())
try:
self.mpl.fig.tight_layout()
- except:
+ except Exception:
print('tight_layout failed (widget_images 1')
# Insert canvas widget into this widget
@@ -337,7 +337,7 @@ def spinBoxMinMaxSet(self):
cmap=self.colormode.ui.comboBoxColormap.currentText())
self.mpl.draw()
- except:
+ except Exception:
print('Error in spinBoxMinMaxSet')
def checkBoxRemOutliers(self):
@@ -380,7 +380,7 @@ def checkBoxFixed(self):
self.data.compress_low = False
else:
self.data.compress_low = True
- except:
+ except Exception:
pass
else:
self.data.setmin = None
@@ -456,7 +456,7 @@ def __init__(self, parent=None, **kwargs):
self.popimage.ui.pushButtonGSPop.setVisible(True)
try:
self.popimage.ui.pushButtonPop.pressed.disconnect()
- except:
+ except Exception:
pass
self.popimage.ui.pushButtonPop.pressed.connect(lambda: self.createImg_Ext(img = self.data.image,
showcbar=False,
@@ -570,7 +570,7 @@ def __init__(self, sgl_color_widget_list = None, parent = None, **kwargs):
extent = winextent)
try:
self.mpl.fig.tight_layout()
- except:
+ except Exception:
print('tight_layout failed (widget_image: 3')
# Insert mpl widget into this widget
@@ -659,7 +659,7 @@ def changeMode(self):
extent = self.data.winextent)
try:
self.mpl.fig.tight_layout()
- except:
+ except Exception:
print('tight_layout failed (widget_image: 3')
self.mpl.draw()
diff --git a/crikit/ui/widget_mergeNRBs.py b/crikit/ui/widget_mergeNRBs.py
index 70f1f51..99d8cf0 100644
--- a/crikit/ui/widget_mergeNRBs.py
+++ b/crikit/ui/widget_mergeNRBs.py
@@ -156,12 +156,12 @@ def fcn(self, data_in):
left_side_scale=self.parameters['scale_left'])
if self.fullRange:
- pix = _np.arange(self.wn.size, dtype=_np.integer)
+ pix = _np.arange(self.wn.size, dtype=_np.int32)
else:
list_rng_pix = _find_nearest(self.wn, self.rng)[1]
pix = _np.arange(list_rng_pix[0],list_rng_pix[1]+1,
- dtype=_np.integer)
+ dtype=_np.int32)
nrb_merged = inst_nrb_merge.calculate()
kkd = _np.zeros(data_in.shape)
diff --git a/crikit/utils/breadcrumb.py b/crikit/utils/breadcrumb.py
index 88c4236..bf344ba 100644
--- a/crikit/utils/breadcrumb.py
+++ b/crikit/utils/breadcrumb.py
@@ -125,14 +125,14 @@ def dset_name_suffix(self):
#print(step[0])
temp = temp + '_' + step[0]
return temp
- except:
+ except Exception:
return None
# METHODS
@staticmethod
def backup_pickle(data, fname, addl_attr = None):
"""
- Dump current state of data (class of type crikit.data.spectrum or
+ Dump current state of data (class of type crikit.data.spectra or
subclass)to pickle file (filename= fname).
Can append additional attributes (addl_attr) to \
@@ -250,14 +250,14 @@ def pop_to_last(self, all=False):
test = BCPre(offset=10)
try:
test.add_step('Test1')
- except:
+ except Exception:
print('Expected Error\n')
else:
print('Should have raised an error')
try:
test.add_step(['Test',1])
- except:
+ except Exception:
print('Expected Error\n')
else:
print('Should have raised an error')
diff --git a/crikit/utils/general.py b/crikit/utils/general.py
index 716c59b..25eed87 100644
--- a/crikit/utils/general.py
+++ b/crikit/utils/general.py
@@ -46,7 +46,7 @@ def pad(y, pad_width, mode):
shaper_out = list(y.shape)
shaper_out[-1] += 2*pad_width
y_pad = _np.zeros(shaper_out, dtype=y.dtype)
- window = _np.zeros(shaper_out[-1], dtype=_np.integer)
+ window = _np.zeros(shaper_out[-1], dtype=_np.int32)
y_pad[...,pad_width:shaper[-1]+pad_width] = 1*y
window[pad_width:shaper[-1]+pad_width] = 1
@@ -120,14 +120,14 @@ def pad_edge_mean(y, pad_width, n_edge=1, axis=-1):
"""
if pad_width == 0: # No padding
- window = _np.ones((y.shape[axis]), dtype=_np.integer)
+ window = _np.ones((y.shape[axis]), dtype=_np.int32)
y_pad = y
elif pad_width > 0:
orig_shape = y.shape
pad_shape = list(orig_shape)
pad_shape[axis] += pad_width*2
- window = _np.zeros((pad_shape[axis]), dtype=_np.integer)
+ window = _np.zeros((pad_shape[axis]), dtype=_np.int32)
window[pad_width:-pad_width] = 1
y_pad = _np.zeros(pad_shape, dtype=y.dtype)
@@ -183,7 +183,7 @@ def std_nd_to_1d(data, axis=-1):
return vec
-def arange_nonzero(start, stop, dtype=_np.float):
+def arange_nonzero(start, stop, dtype=float):
"""
Similar to numpy arange but only returns non-zero elements
"""
diff --git a/docs/build/doctrees/algorithms.doctree b/docs/build/doctrees/algorithms.doctree
index dac9ad9..1ca1f2f 100644
Binary files a/docs/build/doctrees/algorithms.doctree and b/docs/build/doctrees/algorithms.doctree differ
diff --git a/docs/build/doctrees/api.doctree b/docs/build/doctrees/api.doctree
index f19fe69..da04ece 100644
Binary files a/docs/build/doctrees/api.doctree and b/docs/build/doctrees/api.doctree differ
diff --git a/docs/build/doctrees/cri_walkthru.doctree b/docs/build/doctrees/cri_walkthru.doctree
index 32ab372..1b49e45 100644
Binary files a/docs/build/doctrees/cri_walkthru.doctree and b/docs/build/doctrees/cri_walkthru.doctree differ
diff --git a/docs/build/doctrees/crikit.cri.algorithms.doctree b/docs/build/doctrees/crikit.cri.algorithms.doctree
index f887f67..a465135 100644
Binary files a/docs/build/doctrees/crikit.cri.algorithms.doctree and b/docs/build/doctrees/crikit.cri.algorithms.doctree differ
diff --git a/docs/build/doctrees/crikit.cri.doctree b/docs/build/doctrees/crikit.cri.doctree
index 5ff7b53..29f7f44 100644
Binary files a/docs/build/doctrees/crikit.cri.doctree and b/docs/build/doctrees/crikit.cri.doctree differ
diff --git a/docs/build/doctrees/crikit.cri.tests.doctree b/docs/build/doctrees/crikit.cri.tests.doctree
index 996a093..26f5f3c 100644
Binary files a/docs/build/doctrees/crikit.cri.tests.doctree and b/docs/build/doctrees/crikit.cri.tests.doctree differ
diff --git a/docs/build/doctrees/crikit.data.doctree b/docs/build/doctrees/crikit.data.doctree
index 92c6778..3372ea7 100644
Binary files a/docs/build/doctrees/crikit.data.doctree and b/docs/build/doctrees/crikit.data.doctree differ
diff --git a/docs/build/doctrees/crikit.data.tests.doctree b/docs/build/doctrees/crikit.data.tests.doctree
index 94ec82f..2d1f76b 100644
Binary files a/docs/build/doctrees/crikit.data.tests.doctree and b/docs/build/doctrees/crikit.data.tests.doctree differ
diff --git a/docs/build/doctrees/crikit.datasets.doctree b/docs/build/doctrees/crikit.datasets.doctree
index f5edc6d..9723d79 100644
Binary files a/docs/build/doctrees/crikit.datasets.doctree and b/docs/build/doctrees/crikit.datasets.doctree differ
diff --git a/docs/build/doctrees/crikit.doctree b/docs/build/doctrees/crikit.doctree
index e825bf8..5437c4f 100644
Binary files a/docs/build/doctrees/crikit.doctree and b/docs/build/doctrees/crikit.doctree differ
diff --git a/docs/build/doctrees/crikit.io.doctree b/docs/build/doctrees/crikit.io.doctree
index 49ea8cf..34062d0 100644
Binary files a/docs/build/doctrees/crikit.io.doctree and b/docs/build/doctrees/crikit.io.doctree differ
diff --git a/docs/build/doctrees/crikit.io.lazy5.doctree b/docs/build/doctrees/crikit.io.lazy5.doctree
new file mode 100644
index 0000000..1e38f45
Binary files /dev/null and b/docs/build/doctrees/crikit.io.lazy5.doctree differ
diff --git a/docs/build/doctrees/crikit.io.lazy5.tests.doctree b/docs/build/doctrees/crikit.io.lazy5.tests.doctree
new file mode 100644
index 0000000..9b97cae
Binary files /dev/null and b/docs/build/doctrees/crikit.io.lazy5.tests.doctree differ
diff --git a/docs/build/doctrees/crikit.io.lazy5.ui.doctree b/docs/build/doctrees/crikit.io.lazy5.ui.doctree
new file mode 100644
index 0000000..2a69670
Binary files /dev/null and b/docs/build/doctrees/crikit.io.lazy5.ui.doctree differ
diff --git a/docs/build/doctrees/crikit.io.tests.doctree b/docs/build/doctrees/crikit.io.tests.doctree
index f44a16a..3948bb6 100644
Binary files a/docs/build/doctrees/crikit.io.tests.doctree and b/docs/build/doctrees/crikit.io.tests.doctree differ
diff --git a/docs/build/doctrees/crikit.measurement.doctree b/docs/build/doctrees/crikit.measurement.doctree
index d220697..1b8386a 100644
Binary files a/docs/build/doctrees/crikit.measurement.doctree and b/docs/build/doctrees/crikit.measurement.doctree differ
diff --git a/docs/build/doctrees/crikit.measurement.tests.doctree b/docs/build/doctrees/crikit.measurement.tests.doctree
index 784ca6a..3170e97 100644
Binary files a/docs/build/doctrees/crikit.measurement.tests.doctree and b/docs/build/doctrees/crikit.measurement.tests.doctree differ
diff --git a/docs/build/doctrees/crikit.preprocess.algorithms.doctree b/docs/build/doctrees/crikit.preprocess.algorithms.doctree
index 5b4c89f..f09204c 100644
Binary files a/docs/build/doctrees/crikit.preprocess.algorithms.doctree and b/docs/build/doctrees/crikit.preprocess.algorithms.doctree differ
diff --git a/docs/build/doctrees/crikit.preprocess.doctree b/docs/build/doctrees/crikit.preprocess.doctree
index 253f9f1..e27227b 100644
Binary files a/docs/build/doctrees/crikit.preprocess.doctree and b/docs/build/doctrees/crikit.preprocess.doctree differ
diff --git a/docs/build/doctrees/crikit.preprocess.tests.doctree b/docs/build/doctrees/crikit.preprocess.tests.doctree
index 1149be0..4510327 100644
Binary files a/docs/build/doctrees/crikit.preprocess.tests.doctree and b/docs/build/doctrees/crikit.preprocess.tests.doctree differ
diff --git a/docs/build/doctrees/crikit.transform.doctree b/docs/build/doctrees/crikit.transform.doctree
index 804a03c..437a714 100644
Binary files a/docs/build/doctrees/crikit.transform.doctree and b/docs/build/doctrees/crikit.transform.doctree differ
diff --git a/docs/build/doctrees/crikit.ui.doctree b/docs/build/doctrees/crikit.ui.doctree
index 744327b..b8d0467 100644
Binary files a/docs/build/doctrees/crikit.ui.doctree and b/docs/build/doctrees/crikit.ui.doctree differ
diff --git a/docs/build/doctrees/crikit.ui.utils.doctree b/docs/build/doctrees/crikit.ui.utils.doctree
index 84b2698..ede53cf 100644
Binary files a/docs/build/doctrees/crikit.ui.utils.doctree and b/docs/build/doctrees/crikit.ui.utils.doctree differ
diff --git a/docs/build/doctrees/crikit.utils.doctree b/docs/build/doctrees/crikit.utils.doctree
index 35ef262..7f89dea 100644
Binary files a/docs/build/doctrees/crikit.utils.doctree and b/docs/build/doctrees/crikit.utils.doctree differ
diff --git a/docs/build/doctrees/crikit.utils.tests.doctree b/docs/build/doctrees/crikit.utils.tests.doctree
index cdb70ff..91ffb76 100644
Binary files a/docs/build/doctrees/crikit.utils.tests.doctree and b/docs/build/doctrees/crikit.utils.tests.doctree differ
diff --git a/docs/build/doctrees/index.doctree b/docs/build/doctrees/index.doctree
index b3998f9..e372c15 100644
Binary files a/docs/build/doctrees/index.doctree and b/docs/build/doctrees/index.doctree differ
diff --git a/docs/build/doctrees/installing.doctree b/docs/build/doctrees/installing.doctree
index 2957df6..9f2ded4 100644
Binary files a/docs/build/doctrees/installing.doctree and b/docs/build/doctrees/installing.doctree differ
diff --git a/docs/build/doctrees/io.doctree b/docs/build/doctrees/io.doctree
index 0021039..60121c2 100644
Binary files a/docs/build/doctrees/io.doctree and b/docs/build/doctrees/io.doctree differ
diff --git a/docs/build/doctrees/license.doctree b/docs/build/doctrees/license.doctree
index 8ff6c5c..0a50841 100644
Binary files a/docs/build/doctrees/license.doctree and b/docs/build/doctrees/license.doctree differ
diff --git a/docs/build/doctrees/modules.doctree b/docs/build/doctrees/modules.doctree
index 6e6622e..0a96a98 100644
Binary files a/docs/build/doctrees/modules.doctree and b/docs/build/doctrees/modules.doctree differ
diff --git a/docs/build/doctrees/running.doctree b/docs/build/doctrees/running.doctree
index e390fb9..32cd40d 100644
Binary files a/docs/build/doctrees/running.doctree and b/docs/build/doctrees/running.doctree differ
diff --git a/docs/build/html/.buildinfo b/docs/build/html/.buildinfo
index 2f65d65..4f7216a 100644
--- a/docs/build/html/.buildinfo
+++ b/docs/build/html/.buildinfo
@@ -1,4 +1,4 @@
# Sphinx build info version 1
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
-config: b094f045ad6f469eb719c812c762f46b
+config: 79acee3da732a2077d4e910c14203444
tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/docs/build/html/_modules/crikit/CRIkitUI.html b/docs/build/html/_modules/crikit/CRIkitUI.html
index 41f3628..8b9477e 100644
--- a/docs/build/html/_modules/crikit/CRIkitUI.html
+++ b/docs/build/html/_modules/crikit/CRIkitUI.html
@@ -5,15 +5,13 @@
- crikit.CRIkitUI — CRIKit2 0.4.1 documentation
-
-
-
+ crikit.CRIkitUI — CRIKit2 0.4.3 documentation
+
+
+
-
-
@@ -26,7 +24,7 @@ Navigation
modules |
- CRIKit2 0.4.1 documentation »
+ CRIKit2 0.4.3 documentation »
Module code »
crikit.CRIkitUI
@@ -100,14 +98,14 @@ Source code for crikit.CRIkitUI
from crikit.data.frequency import ( calib_pix_wn as _calib_pix_wn ,
calib_pix_wl as _calib_pix_wl )
-from crikit.data.hsi import Hsi
+from crikit.data.spectra import Hsi
from crikit.data.spectra import Spectra
-from crikit.data.spectrum import Spectrum
+from crikit.data.spectra import Spectrum
from crikit.datasets.model import Model as _Model
from crikit.io.macros import import_csv_nist_special1 as io_nist_dlm
-from crikit.io.macros import import_hdf_nist_special as io_nist
+from crikit.io.macros import ( import_hdf_nist_special as io_nist , hdf_nist_special_macroraster as io_nist_macro )
from crikit.io.macros import import_hdf_nist_special_ooc as io_nist_ooc
# from crikit.io.meta_configs import special_nist_bcars2 as _snb2
@@ -153,14 +151,14 @@ Source code for crikit.CRIkitUI
from sciplot.sciplotUI import SciPlotUI as _SciPlotUI
-import lazy5
-from lazy5.ui.QtHdfLoad import HdfLoad
+from crikit.io.lazy5.ui.QtHdfLoad import HdfLoad
+import crikit.io.lazy5 as lazy5
force_not_sw = False
try :
import crikit2_sw
-except :
+except Exception :
__sw_installed = False
# print('SW package not installed, using standard')
from crikit.ui.dialog_SVD import DialogSVD
@@ -183,7 +181,7 @@ Source code for crikit.CRIkitUI
try :
from crikit.ui.widget_Jupyter import QJupyterWidget
jupyter_flag = 1
-except :
+except Exception :
print ( 'No appropriate Jupyter/IPython installation found. Console will not be available' )
jupyter_flag = - 1
@@ -268,7 +266,7 @@ Source code for crikit.CRIkitUI
self . ui . sweeperVL . insertWidget ( 0 , self . img_BW , stretch = 1 , alignment = _QtCore . Qt . AlignHCenter )
try :
self . img_BW . mpl . fig . tight_layout ( pad = 2 )
- except :
+ except Exception :
print ( 'tight_layout failed (CrikitUI: 1' )
# ID used for matplotlib to connect to a figure
@@ -330,6 +328,7 @@ Source code for crikit.CRIkitUI
# Load Data
self . ui . actionOpenHDFNIST . triggered . connect ( self . fileOpenHDFNIST )
self . ui . actionOpenHDFNISTOOC . triggered . connect ( self . fileOpenHDFNISTOOC )
+ self . ui . actionOpen_HDF_Macro_Raster_NIST . triggered . connect ( self . fileOpenHDFMacroRasterNIST )
self . ui . actionLoadNRB . triggered . connect ( self . loadNRB )
self . ui . actionLoadDark . triggered . connect ( self . loadDark )
@@ -462,7 +461,7 @@ Source code for crikit.CRIkitUI
try :
str_banner = 'Welcome to the embedded ipython console \n\n '
self . jupyterConsole = QJupyterWidget ( customBanner = str_banner )
- except :
+ except Exception :
print ( 'Error loading embedded IPython Notebook' )
else :
self . ui . tabMain . addTab ( self . jupyterConsole , 'Jupyter/IPython Console' )
@@ -495,7 +494,7 @@ Source code for crikit.CRIkitUI
if temp is not None :
try :
self . fileOpenSuccess ( True )
- except :
+ except Exception :
print ( 'Error in input hsi' )
self . hsi = Hsi ()
@@ -506,7 +505,7 @@ Source code for crikit.CRIkitUI
self . hsi . x = temp
self . hsi . _x_rep . units = kwargs . get ( 'x_units' )
self . hsi . _x_rep . label = kwargs . get ( 'x_label' )
- except :
+ except Exception :
print ( 'Error in input x-array' )
self . hsi . x = None
@@ -517,7 +516,7 @@ Source code for crikit.CRIkitUI
self . hsi . y = temp
self . hsi . _y_rep . units = kwargs . get ( 'y_units' )
self . hsi . _y_rep . label = kwargs . get ( 'y_label' )
- except :
+ except Exception :
print ( 'Error in input y-array' )
self . hsi . y = None
@@ -528,7 +527,7 @@ Source code for crikit.CRIkitUI
self . hsi . freq . _data = temp
self . hsi . freq . _units = kwargs . get ( 'f_units' )
self . hsi . freq . _label = kwargs . get ( 'f_label' )
- except :
+ except Exception :
print ( 'Error in input freq-array (f)' )
self . hsi . freq . _data = None
@@ -538,7 +537,7 @@ Source code for crikit.CRIkitUI
self . hsi . data = kwargs . get ( 'data' )
self . hsi . check ()
self . fileOpenSuccess ( True )
- except :
+ except Exception :
print ( 'Error in input data' )
self . hsi = Hsi ()
@@ -685,7 +684,7 @@ Source code for crikit.CRIkitUI
for count in self . bcpre . cut_list :
try :
_os . remove ( count + '.pickle' )
- except :
+ except Exception :
print ( 'Error in deleting old pickle files' )
else :
del_flag += 1
@@ -698,11 +697,54 @@ Source code for crikit.CRIkitUI
print ( 'Closing HDF File' )
try :
self . fid . close ()
- except :
+ except Exception :
print ( 'Something failed in closing the file' )
else :
print ( 'Successfully closed HDF File' )
+[docs] def fileOpenHDFMacroRasterNIST ( self , * args , dialog = True ):
+
"""
+
Open and load multiple datasets from HDF file that describe a single image.
+
Used for a macrostage rastering mode at NIST.
+
+
dialog : bool
+
Present a gui for file and dataset selection
+
"""
+
+
# Get data and load into CRI_HSI class
+
# This will need to change to accomodate multiple-file selection
+
+
if dialog :
+
try :
+
if ( self . filename is not None ) & ( self . path is not None ):
+
to_open = HdfLoad . getFileDataSets ( _os . path . join ( self . path , self . filename ), parent = self , title = 'Hyperspectral Image' )
+
else :
+
to_open = HdfLoad . getFileDataSets ( self . path , parent = self , title = 'Hyperspectral Image' )
+
+
print ( 'to_open: {} ' . format ( to_open ))
+
if to_open is not None :
+
self . path , self . filename , self . dataset_name = to_open
+
except Exception as e :
+
_traceback . print_exc ( limit = 1 )
+
print ( 'Could not open file. Corrupt or not appropriate file format: {} ' . format ( e ))
+
else :
+
if to_open is not None :
+
self . hsi = Hsi ()
+
print ( 'Path: {} ' . format ( self . path ))
+
print ( 'filename: {} ' . format ( self . filename ))
+
print ( 'dset name: {} ' . format ( self . dataset_name ))
+
success = io_nist_macro ( self . path , self . filename , self . dataset_name ,
+
self . hsi )
+
print ( 'Was successful: {} ' . format ( success ))
+
print ( 'HSI shape: {} ' . format ( self . hsi . shape ))
+
print ( 'Succes: {} ' . format ( success ))
+
self . fileOpenSuccess ( success )
+
else :
+
self . hsi = Hsi ()
+
success = io_nist_macro ( self . path , self . filename , self . dataset_name ,
+
self . hsi )
+
self . fileOpenSuccess ( success )
+
[docs] def fileOpenHDFNIST ( self , * args , dialog = True ):
"""
Open and load HDF5 File
@@ -765,7 +807,7 @@
Source code for crikit.CRIkitUI
if to_open is not None :
self . path , self . filename , self . dataset_name = to_open
self . dataset_name = self . dataset_name [ 0 ]
- except :
+ except Exception :
print ( 'Could not open file. Corrupt or not appropriate file format.' )
else :
if to_open is not None :
@@ -814,9 +856,9 @@ Source code for crikit.CRIkitUI
"""
if success :
# * If HSI is integer dtype, convert to float
- if ( self . hsi . data . dtype . kind == 'i' ) & isinstance ( self . hsi . data , _np . ndarray ):
+ if ( self . hsi . data . dtype . kind in [ 'i' , 'u' ]) & isinstance ( self . hsi . data , _np . ndarray ):
print ( 'Converting HSI from int to float' )
- self . hsi . data = 1.0 * self . hsi . data
+ self . hsi . data = 1.0 * self . hsi . data
self . setWindowTitle ( ' {} : {} ' . format ( self . windowTitle (), self . filename ))
# FILE
@@ -980,7 +1022,7 @@ Source code for crikit.CRIkitUI
# signal then reconnect (or could have ignored, but this is easier)
try :
rgb_img . popimage . ui . pushButtonSpectrum . pressed . disconnect ()
- except :
+ except Exception :
pass
rgb_img . popimage . ui . pushButtonSpectrum . pressed . connect ( self . spectrumColorImg )
@@ -1015,7 +1057,7 @@ Source code for crikit.CRIkitUI
if success :
# If Dark is integer dtype, convert to float
- if self . dark . data . dtype . kind == 'i' :
+ if self . dark . data . dtype . kind in [ 'u' , 'i' ]:
print ( 'Converting Dark from int to float' )
self . dark . data = 1.0 * self . dark . data
@@ -1060,7 +1102,7 @@ Source code for crikit.CRIkitUI
if success :
# If Dark is integer dtype, convert to float
- if self . dark . data . dtype . kind == 'i' :
+ if self . dark . data . dtype . kind in [ 'u' , 'i' ]:
print ( 'Converting Dark from int to float' )
self . dark . data = 1.0 * self . dark . data
@@ -1104,7 +1146,7 @@ Source code for crikit.CRIkitUI
success = io_nist ( pth , filename , datasets , nrb )
if success :
# If NRB is integer dtype, convert to float
- if nrb . data . dtype . kind == 'i' :
+ if nrb . data . dtype . kind in [ 'u' , 'i' ]:
print ( 'Converting NRB from int to float' )
nrb . data = 1.0 * nrb . data
@@ -1239,7 +1281,7 @@ Source code for crikit.CRIkitUI
self . img_BW . mpl . ax . add_artist ( lg )
try :
self . img_BW . mpl . fig . tight_layout ( pad = 1 )
- except :
+ except Exception :
print ( 'tight_layout failed (CrikitUI: 2' )
self . img_BW . mpl . draw ()
@@ -1259,8 +1301,8 @@ Source code for crikit.CRIkitUI
if len ( self . x_loc_list ) == 1 :
self . img_BW . mpl . ax . plot ( self . x_loc_list , self . y_loc_list ,
- markerfacecolor = [ . 9 , . 9 , 0 ],
- markeredgecolor = [ . 9 , . 9 , 0 ],
+ markerfacecolor = [ .9 , .9 , 0 ],
+ markeredgecolor = [ .9 , .9 , 0 ],
marker = '+' ,
markersize = 10 ,
linestyle = 'None' )
@@ -1273,9 +1315,9 @@ Source code for crikit.CRIkitUI
linewidth = 2 ,
marker = '+' ,
markersize = 10 ,
- color = [ . 9 , . 9 , 0 ],
- markerfacecolor = [ . 9 , . 9 , 0 ],
- markeredgecolor = [ . 9 , . 9 , 0 ])
+ color = [ .9 , .9 , 0 ],
+ markerfacecolor = [ .9 , .9 , 0 ],
+ markeredgecolor = [ .9 , .9 , 0 ])
self . img_BW . mpl . ax . set_xlim ( getx )
self . img_BW . mpl . ax . set_ylim ( gety )
@@ -1290,9 +1332,9 @@ Source code for crikit.CRIkitUI
linewidth = 2 ,
marker = '+' ,
markersize = 10 ,
- color = [ . 9 , . 9 , 0 ],
- markerfacecolor = [ . 9 , . 9 , 0 ],
- markeredgecolor = [ . 9 , . 9 , 0 ])
+ color = [ .9 , .9 , 0 ],
+ markerfacecolor = [ .9 , .9 , 0 ],
+ markeredgecolor = [ .9 , .9 , 0 ])
self . img_BW . mpl . ax . set_xlim ( getx )
self . img_BW . mpl . ax . set_ylim ( gety )
self . img_BW . mpl . draw ()
@@ -1651,7 +1693,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -1955,8 +1997,8 @@ Source code for crikit.CRIkitUI
if len ( self . x_loc_list ) == 1 :
self . img_BW . mpl . ax . plot ( self . x_loc_list , self . y_loc_list ,
- markerfacecolor = [ . 9 , . 9 , 0 ],
- markeredgecolor = [ . 9 , . 9 , 0 ],
+ markerfacecolor = [ .9 , .9 , 0 ],
+ markeredgecolor = [ .9 , .9 , 0 ],
marker = '+' ,
markersize = 10 ,
linestyle = 'None' )
@@ -1969,9 +2011,9 @@ Source code for crikit.CRIkitUI
linewidth = 2 ,
marker = '+' ,
markersize = 10 ,
- color = [ . 9 , . 9 , 0 ],
- markerfacecolor = [ . 9 , . 9 , 0 ],
- markeredgecolor = [ . 9 , . 9 , 0 ])
+ color = [ .9 , .9 , 0 ],
+ markerfacecolor = [ .9 , .9 , 0 ],
+ markeredgecolor = [ .9 , .9 , 0 ])
self . img_BW . mpl . ax . set_xlim ( getx )
self . img_BW . mpl . ax . set_ylim ( gety )
@@ -2027,7 +2069,7 @@ Source code for crikit.CRIkitUI
self . ui . freqSlider . setSliderPosition ( pos )
self . changeSlider ()
- except :
+ except Exception :
pass
[docs] def lineEditPixChanged ( self ):
@@ -2100,7 +2142,7 @@
Source code for crikit.CRIkitUI
try :
currentop = self . img_RGB_list [ rgbnum ] . math . ui . comboBoxOperations . currentText ()
self . img_RGB_list [ rgbnum ] . data . operation = currentop
- except :
+ except Exception :
pass
[docs] def condOpChange ( self ):
@@ -2112,7 +2154,7 @@
Source code for crikit.CRIkitUI
try :
currentop = self . img_RGB_list [ rgbnum ] . math . ui . comboBoxCondOps . currentText ()
self . img_RGB_list [ rgbnum ] . data . condoperation = currentop
- except :
+ except Exception :
pass
[docs] def condInEqualityChange ( self ):
@@ -2124,7 +2166,7 @@
Source code for crikit.CRIkitUI
try :
currentop = self . img_RGB_list [ rgbnum ] . math . ui . comboBoxCondInEquality . currentText ()
self . img_RGB_list [ rgbnum ] . data . inequality = currentop
- except :
+ except Exception :
pass
[docs] def spinBoxInEqualityChange ( self ):
@@ -2136,7 +2178,7 @@
Source code for crikit.CRIkitUI
try :
self . img_RGB_list [ rgbnum ] . data . inequalityval = \
self . img_RGB_list [ rgbnum ] . math . ui . spinBoxInEquality . value ()
- except :
+ except Exception :
pass
[docs] def doKK ( self ):
@@ -2162,7 +2204,7 @@
Source code for crikit.CRIkitUI
conj = True
else :
conj = False
- except :
+ except Exception :
conj = False
out = DialogKKOptions . dialogKKOptions ( data = [ self . hsi . f ,
@@ -2206,7 +2248,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -2307,7 +2349,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -2378,7 +2420,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -2424,7 +2466,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -2499,7 +2541,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -2519,7 +2561,7 @@ Source code for crikit.CRIkitUI
for count in self . bcpre . cut_list :
try :
_os . remove ( count + '.pickle' )
- except :
+ except Exception :
print ( 'Error in deleting old pickle files' )
else :
del_flag += 1
@@ -2565,7 +2607,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -2700,7 +2742,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -2741,7 +2783,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -2773,7 +2815,7 @@ Source code for crikit.CRIkitUI
if self . ui . actionUndo_Backup_Enabled . isChecked ():
try :
_BCPre . backup_pickle ( self . hsi , self . bcpre . id_list [ - 1 ])
- except :
+ except Exception :
print ( 'Error in pickle backup (Undo functionality)' )
else :
self . bcpre . backed_up ()
@@ -3056,7 +3098,7 @@ Source code for crikit.CRIkitUI
self . img_RGB_list [ rgbnum ] . mpl . draw ()
- except :
+ except Exception :
print ( 'Error' )
self . doComposite ()
@@ -3071,7 +3113,7 @@ Source code for crikit.CRIkitUI
self . img_RGB_list [ rgbnum ] . data . opfreq2 = currentfreq
self . img_RGB_list [ rgbnum ] . math . ui . pushButtonOpFreq2 . setText ( str ( round ( currentfreq , 1 )))
- except :
+ except Exception :
pass
[docs] def setOpFreq3 ( self ):
@@ -3086,7 +3128,7 @@
Source code for crikit.CRIkitUI
self . img_RGB_list [ rgbnum ] . data . opfreq3 = currentfreq
self . img_RGB_list [ rgbnum ] . math . ui . pushButtonOpFreq3 . setText ( str ( round ( currentfreq , 1 )))
- except :
+ except Exception :
pass
[docs] def setCondFreq1 ( self ):
@@ -3101,7 +3143,7 @@
Source code for crikit.CRIkitUI
self . img_RGB_list [ rgbnum ] . data . condfreq1 = currentfreq
self . img_RGB_list [ rgbnum ] . math . ui . pushButtonCondFreq1 . setText ( str ( round ( currentfreq , 1 )))
- except :
+ except Exception :
print ( 'Error' )
[docs] def setCondFreq2 ( self ):
@@ -3116,7 +3158,7 @@
Source code for crikit.CRIkitUI
self . img_RGB_list [ rgbnum ] . data . condfreq2 = currentfreq
self . img_RGB_list [ rgbnum ] . math . ui . pushButtonCondFreq2 . setText ( str ( round ( currentfreq , 1 )))
- except :
+ except Exception :
print ( 'Error' )
[docs] def setCondFreq3 ( self ):
@@ -3131,7 +3173,7 @@
Source code for crikit.CRIkitUI
self . img_RGB_list [ rgbnum ] . data . condfreq3 = currentfreq
self . img_RGB_list [ rgbnum ] . math . ui . pushButtonCondFreq3 . setText ( str ( round ( currentfreq , 1 )))
- except :
+ except Exception :
print ( 'Error' )
[docs] def spectrumColorImg ( self ):
@@ -3154,7 +3196,7 @@
Source code for crikit.CRIkitUI
else :
Mask = Mask > 0
- Mask = Mask . astype ( _np . integer )
+ Mask = Mask . astype ( _np . int32 )
mask_hits = Mask . sum ()
@@ -3296,7 +3338,7 @@ Source code for crikit.CRIkitUI
if self . _mpl_v1 :
self . img_BW . mpl . ax . hold ( True )
- except :
+ except Exception :
print ( 'Error in changeSlider: display img_BW' )
try :
@@ -3338,11 +3380,11 @@ Source code for crikit.CRIkitUI
self . img_BW . mpl . ax . add_artist ( lg )
try :
self . img_BW . mpl . fig . tight_layout ( pad = 1 )
- except :
+ except Exception :
print ( 'tight_layout failed (CrikitUI: 3' )
- except :
+ except Exception :
print ( 'Error in showing overlay legend' )
- except :
+ except Exception :
print ( 'Error in changeSlider: display overlays' )
self . img_BW . mpl . draw ()
@@ -3418,7 +3460,7 @@ Source code for crikit.CRIkitUI
yunits = self . img_Composite2 . data . yunits ,
extent = self . img_BW . data . winextent )
self . img_Composite2 . mpl . draw ()
- except :
+ except Exception :
print ( 'Error in doComposite' )
[docs] def updateOverlays ( self ):
@@ -3703,7 +3745,7 @@
Source code for crikit.CRIkitUI
img_shape = fid [ dataset_name ] . shape
self . _mosaic_mask = _np . zeros ( img_shape )
fid [ dataset_name ] . read_direct ( self . _mosaic_mask )
- n_imgs = self . _mosaic_mask . max () . astype ( _np . int )
+ n_imgs = self . _mosaic_mask . max () . astype ( int )
fid . close ()
msg = _QMessageBox ( self )
@@ -3750,7 +3792,7 @@ Source code for crikit.CRIkitUI
Input kwargs (Optional)
------------------------
- hsi : crikit.data.Hsi
+ hsi : crikit.data.spectra.Hsi
Hsi instance
data : ndarray (3D)
@@ -3847,7 +3889,7 @@ Source code for crikit.CRIkitUI
\ No newline at end of file
diff --git a/docs/build/html/_modules/crikit/cri/algorithms/kk.html b/docs/build/html/_modules/crikit/cri/algorithms/kk.html
index 08d6348..eed0de9 100644
--- a/docs/build/html/_modules/crikit/cri/algorithms/kk.html
+++ b/docs/build/html/_modules/crikit/cri/algorithms/kk.html
@@ -5,15 +5,13 @@
- crikit.cri.algorithms.kk — CRIKit2 0.4.1 documentation
-
-
-
+ crikit.cri.algorithms.kk — CRIKit2 0.4.3 documentation
+
+
+
-
-
@@ -26,7 +24,7 @@ Navigation
modules |
-
CRIKit2 0.4.1 documentation »
+
CRIKit2 0.4.3 documentation »
Module code »
crikit.cri.algorithms.kk
@@ -136,7 +134,7 @@
Source code for crikit.cri.algorithms.kk
ratio [ ratio <= 0 ] = bad_value
if ( ratio . ndim == 3 ) & (( axis == - 1 ) | ( axis == 2 )) & ( not no_iter ):
- ph = _np . zeros ( ratio . shape , dtype = _np . complex )
+ ph = _np . zeros ( ratio . shape , dtype = complex )
for num in range ( ratio . shape [ 0 ]):
ph [ num , ... ] = _np . exp ( 1 j * ( hilbertfft ( 0.5 * _np . log ( ratio [ num , ... ]), ** hilb_kwargs ) + phase_offset ))
else :
@@ -257,7 +255,7 @@ Source code for crikit.cri.algorithms.kk