From 699111d7abc495d5aee537a55e51e0ccd430e899 Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Sun, 5 Jan 2025 11:46:46 -0500 Subject: [PATCH 01/16] add doc --- reduction/lr_reduction/event_reduction.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/reduction/lr_reduction/event_reduction.py b/reduction/lr_reduction/event_reduction.py index 9b876b1..9f65571 100644 --- a/reduction/lr_reduction/event_reduction.py +++ b/reduction/lr_reduction/event_reduction.py @@ -43,7 +43,22 @@ def get_wl_range(ws): def get_q_binning(q_min=0.001, q_max=0.15, q_step=-0.02): """ - Determine Q binning + Determine Q binning. + + This function calculates the binning for Q values based on the provided minimum, maximum, and step values. + If the step value is positive, it generates a linear binning. If the step value is negative, it generates + a logarithmic binning. + + :param q_min: float, optional + The minimum Q value (default is 0.001). + :param q_max: float, optional + The maximum Q value (default is 0.15). + :param q_step: float, optional + The step size for Q binning. If positive, linear binning is used. If negative, logarithmic binning + is used (default is -0.02). + + :return: numpy.ndarray + An array of Q values based on the specified binning. """ if q_step > 0: n_steps = int((q_max-q_min)/q_step) From ff4e69efb19ec104e237b61e551077bfb1a9474c Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Sun, 5 Jan 2025 11:49:42 -0500 Subject: [PATCH 02/16] add doc --- reduction/lr_reduction/event_reduction.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/reduction/lr_reduction/event_reduction.py b/reduction/lr_reduction/event_reduction.py index 9f65571..1ea44d8 100644 --- a/reduction/lr_reduction/event_reduction.py +++ b/reduction/lr_reduction/event_reduction.py @@ -71,8 +71,15 @@ def get_q_binning(q_min=0.001, q_max=0.15, q_step=-0.02): def get_attenuation_info(ws): """ - Retrieve information about attenuation. - Returns the attenuator thickness found in the meta data + Retrieve information about attenuation from a Mantid workspace. + This function calculates the total thickness of all attenuators that are + in the path of the beam by summing up the thicknesses of the attenuators + specified in the global variable `CD_ATTENUATORS`. + + :param ws: mantid.api.Workspace + Mantid workspace from which to retrieve the attenuation information. + :return: float + The total thickness of the attenuators in the path of the beam. """ run_info = ws.getRun() attenuator_thickness = 0 From 34825813fb39877baebcc28e196eccb499a79650 Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 6 Jan 2025 12:19:41 -0500 Subject: [PATCH 03/16] update docs --- docs/conf.py | 2 +- reduction/lr_reduction/event_reduction.py | 95 +++++++++++++---------- 2 files changed, 56 insertions(+), 41 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 6dc63c6..f47f909 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,7 +12,7 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information project = "lr_reduction" -copyright = "2024, ORNL" # noqa A001 +copyright = "2025, ORNL" # noqa A001 author = "ORNL" version = versioningit.get_version("../") # The full version, including alpha/beta/rc tags diff --git a/reduction/lr_reduction/event_reduction.py b/reduction/lr_reduction/event_reduction.py index 1ea44d8..354e4c7 100644 --- a/reduction/lr_reduction/event_reduction.py +++ b/reduction/lr_reduction/event_reduction.py @@ -27,7 +27,10 @@ def get_wl_range(ws): """ Determine TOF range from the data - :param workspace ws: workspace to work with + + :param ws: (Mantid workspace) workspace to work with + + :return: (list) [min, max] wavelength range """ run_object = ws.getRun() @@ -49,13 +52,10 @@ def get_q_binning(q_min=0.001, q_max=0.15, q_step=-0.02): If the step value is positive, it generates a linear binning. If the step value is negative, it generates a logarithmic binning. - :param q_min: float, optional - The minimum Q value (default is 0.001). - :param q_max: float, optional - The maximum Q value (default is 0.15). - :param q_step: float, optional - The step size for Q binning. If positive, linear binning is used. If negative, logarithmic binning - is used (default is -0.02). + :param q_min: (float, optional) The minimum Q value (default is 0.001). + :param q_max: (float, optional) The maximum Q value (default is 0.15). + :param q_step: (float, optional) The step size for Q binning. If positive, + linear binning is used. If negative, logarithmic binning is used (default is -0.02). :return: numpy.ndarray An array of Q values based on the specified binning. @@ -76,7 +76,7 @@ def get_attenuation_info(ws): in the path of the beam by summing up the thicknesses of the attenuators specified in the global variable `CD_ATTENUATORS`. - :param ws: mantid.api.Workspace + :param ws: (Mantid workspace) Mantid workspace from which to retrieve the attenuation information. :return: float The total thickness of the attenuators in the path of the beam. @@ -95,7 +95,9 @@ def get_attenuation_info(ws): def read_settings(ws): """ Read settings file and return values for the given timestamp + :param ws: Mantid workspace + :return: (dict) dictionary with settings """ settings = dict() package_dir, _ = os.path.split(__file__) @@ -121,8 +123,11 @@ def read_settings(ws): def process_attenuation(ws, thickness=0): """ Correct for absorption by assigning weight to each neutron event - :param ws: workspace to correct - :param thickness: attenuator thickness in cm + + :param ws: (Mantid workspace) workspace to correct + :param thickness: (float) attenuator thickness in cm (default is 0). + + :return: (Mantid workspace) corrected workspace """ settings = read_settings(ws) if "source-det-distance" in settings: @@ -156,8 +161,12 @@ def get_dead_time_correction(ws, template_data): Compute dead time correction to be applied to the reflectivity curve. The method will also try to load the error events from each of the data files to ensure that we properly estimate the dead time correction. - :param ws: workspace with raw data to compute correction for - :param template_data: reduction parameters + + :param ws: (Mantid worksapce) workspace with raw data to compute correction for + :param template_data: (reduction_template_reader.ReductionParameters) + reduction parameters + + :return: (Mantid workspace) workspace with dead time correction to apply """ tof_min = ws.getTofMin() tof_max = ws.getTofMax() @@ -179,8 +188,12 @@ def apply_dead_time_correction(ws, template_data): """ Apply dead time correction, and ensure that it is done only once per workspace. - :param ws: workspace with raw data to compute correction for - :param template_data: reduction parameters + + :param ws: (Mantid workspace) workspace with raw data to compute correction for + :param template_data: (reduction_template_reader.ReductionParameters) + reduction parameters + + :return: (Mantid workspace) workspace with dead time correction applied """ if 'dead_time_applied' not in ws.getRun(): corr_ws = get_dead_time_correction(ws, template_data) @@ -190,14 +203,38 @@ def apply_dead_time_correction(ws, template_data): class EventReflectivity(object): - r""" - Event based reflectivity calculation. + """ + Data reduction for the Liquids Reflectometer. List of items to be taken care of outside this class: + - Edge points cropping - Angle offset - Putting runs together in one R(q) curve - Scaling factors + + Pixel ranges include the min and max pixels. + + :param scattering_workspace: Mantid workspace containing the reflected data + :param direct_workspace: Mantid workspace containing the direct beam data [if None, normalization won't be applied] + :param signal_peak: pixel min and max for the specular peak + :param signal_bck: pixel range of the background [if None, the background won't be subtracted] + :param norm_peak: pixel range of the direct beam peak + :param norm_bck: direct background subtraction is not used [deprecated] + :param specular_pixel: pixel of the specular peak + :param signal_low_res: pixel range of the specular peak out of the scattering plane + :param norm_low_res: pixel range of the direct beam out of the scattering plane + :param q_min: value of lowest q point + :param q_step: step size in Q. Enter a negative value to get a log scale + :param q_min: value of largest q point + :param tof_range: TOF range,or None + :param theta: theta scattering angle in radians + :param dead_time: if not zero, dead time correction will be used + :param paralyzable: if True, the dead time calculation will use the paralyzable approach + :param dead_time_value: value of the dead time in microsecond + :param dead_time_tof_step: TOF bin size in microsecond + :param use_emmission_time: if True, the emission time delay will be computed """ + QX_VS_QZ = 0 KZI_VS_KZF = 1 DELTA_KZ_VS_QZ = 3 @@ -215,29 +252,7 @@ def __init__(self, scattering_workspace, direct_workspace, functional_background=False, dead_time=False, paralyzable=True, dead_time_value=4.2, dead_time_tof_step=100, use_emission_time=True): - """ - Pixel ranges include the min and max pixels. - - :param scattering_workspace: Mantid workspace containing the reflected data - :param direct_workspace: Mantid workspace containing the direct beam data [if None, normalization won't be applied] - :param signal_peak: pixel min and max for the specular peak - :param signal_bck: pixel range of the background [if None, the background won't be subtracted] - :param norm_peak: pixel range of the direct beam peak - :param norm_bck: direct background subtraction is not used [deprecated] - :param specular_pixel: pixel of the specular peak - :param signal_low_res: pixel range of the specular peak out of the scattering plane - :param norm_low_res: pixel range of the direct beam out of the scattering plane - :param q_min: value of lowest q point - :param q_step: step size in Q. Enter a negative value to get a log scale - :param q_min: value of largest q point - :param tof_range: TOF range,or None - :param theta: theta scattering angle in radians - :param dead_time: if not zero, dead time correction will be used - :param paralyzable: if True, the dead time calculation will use the paralyzable approach - :param dead_time_value: value of the dead time in microsecond - :param dead_time_tof_step: TOF bin size in microsecond - :param use_emmission_time: if True, the emission time delay will be computed - """ + if instrument in [self.INSTRUMENT_4A, self.INSTRUMENT_4B]: self.instrument = instrument else: From da9475342332cce20b285c46f560c69a4e198e69 Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 6 Jan 2025 12:40:48 -0500 Subject: [PATCH 04/16] ruff it --- reduction/lr_reduction/event_reduction.py | 574 +++++++++++++--------- reduction/lr_reduction/workflow.py | 178 +++---- 2 files changed, 431 insertions(+), 321 deletions(-) diff --git a/reduction/lr_reduction/event_reduction.py b/reduction/lr_reduction/event_reduction.py index 354e4c7..ed69866 100644 --- a/reduction/lr_reduction/event_reduction.py +++ b/reduction/lr_reduction/event_reduction.py @@ -1,6 +1,7 @@ """ - Event based reduction for the Liquids Reflectometer +Event based reduction for the Liquids Reflectometer """ + import datetime import json import os @@ -17,29 +18,30 @@ NEUTRON_MASS = 1.675e-27 # kg # Attenuators: PV name and thickness in cm -CD_ATTENUATORS = [['BL4B:Actuator:50MRb', 0.0058], - ['BL4B:Actuator:100MRb', 0.0122], - ['BL4B:Actuator:200MRb', 0.0244], # Uncalibrated - ['BL4B:Actuator:400MRb', 0.0488], # Uncalibrated - ] +CD_ATTENUATORS = [ + ["BL4B:Actuator:50MRb", 0.0058], + ["BL4B:Actuator:100MRb", 0.0122], + ["BL4B:Actuator:200MRb", 0.0244], # Uncalibrated + ["BL4B:Actuator:400MRb", 0.0488], # Uncalibrated +] def get_wl_range(ws): """ - Determine TOF range from the data + Determine TOF range from the data - :param ws: (Mantid workspace) workspace to work with + :param ws: (Mantid workspace) workspace to work with - :return: (list) [min, max] wavelength range + :return: (list) [min, max] wavelength range """ run_object = ws.getRun() - wl = run_object.getProperty('LambdaRequest').value[0] - chopper_speed = run_object.getProperty('SpeedRequest1').value[0] + wl = run_object.getProperty("LambdaRequest").value[0] + chopper_speed = run_object.getProperty("SpeedRequest1").value[0] # Cut the edges by using a width of 2.6 A - wl_min = (wl - 1.3 * 60.0 / chopper_speed) - wl_max = (wl + 1.3 * 60.0 / chopper_speed) + wl_min = wl - 1.3 * 60.0 / chopper_speed + wl_max = wl + 1.3 * 60.0 / chopper_speed return [wl_min, wl_max] @@ -61,11 +63,11 @@ def get_q_binning(q_min=0.001, q_max=0.15, q_step=-0.02): An array of Q values based on the specified binning. """ if q_step > 0: - n_steps = int((q_max-q_min)/q_step) + n_steps = int((q_max - q_min) / q_step) return q_min + np.asarray([q_step * i for i in range(n_steps)]) else: - _step = 1.0+np.abs(q_step) - n_steps = int(np.log(q_max/q_min)/np.log(_step)) + _step = 1.0 + np.abs(q_step) + n_steps = int(np.log(q_max / q_min) / np.log(_step)) return q_min * np.asarray([_step**i for i in range(n_steps)]) @@ -94,40 +96,40 @@ def get_attenuation_info(ws): def read_settings(ws): """ - Read settings file and return values for the given timestamp + Read settings file and return values for the given timestamp - :param ws: Mantid workspace - :return: (dict) dictionary with settings + :param ws: Mantid workspace + :return: (dict) dictionary with settings """ settings = dict() package_dir, _ = os.path.split(__file__) - t = ws.getRun()['start_time'].value.split('T')[0] + t = ws.getRun()["start_time"].value.split("T")[0] timestamp = datetime.date.fromisoformat(t) - with open(os.path.join(package_dir, 'settings.json'), 'r') as fd: + with open(os.path.join(package_dir, "settings.json"), "r") as fd: data = json.load(fd) for key in data.keys(): chosen_value = None delta_time = None for item in data[key]: - valid_from = datetime.date.fromisoformat(item['from']) + valid_from = datetime.date.fromisoformat(item["from"]) delta = valid_from - timestamp if delta_time is None or (delta.total_seconds() < 0 and delta > delta_time): delta_time = delta - chosen_value = item['value'] + chosen_value = item["value"] settings[key] = chosen_value return settings def process_attenuation(ws, thickness=0): """ - Correct for absorption by assigning weight to each neutron event + Correct for absorption by assigning weight to each neutron event - :param ws: (Mantid workspace) workspace to correct - :param thickness: (float) attenuator thickness in cm (default is 0). + :param ws: (Mantid workspace) workspace to correct + :param thickness: (float) attenuator thickness in cm (default is 0). - :return: (Mantid workspace) corrected workspace + :return: (Mantid workspace) corrected workspace """ settings = read_settings(ws) if "source-det-distance" in settings: @@ -138,19 +140,22 @@ def process_attenuation(ws, thickness=0): constant = 1e-4 * NEUTRON_MASS * SDD / PLANCK_CONSTANT package_dir, _ = os.path.split(__file__) - mu_abs = np.loadtxt(os.path.join(package_dir, 'Cd-abs-factors.txt')).T + mu_abs = np.loadtxt(os.path.join(package_dir, "Cd-abs-factors.txt")).T wl_model = mu_abs[0] # Turn model into a histogram wl_step = wl_model[-1] - wl_model[-2] final_wl = wl_model[-1] + wl_step - wl_model = np.append(wl_model, [final_wl,]) + wl_model = np.append( + wl_model, + [ + final_wl, + ], + ) mu_model = mu_abs[1] tof_model = constant * wl_model - transmission = 1/np.exp(-mu_model * thickness) - transmission_ws = api.CreateWorkspace(OutputWorkspace='transmission', - DataX=tof_model, DataY=transmission, - UnitX='TOF', NSpec=1) + transmission = 1 / np.exp(-mu_model * thickness) + transmission_ws = api.CreateWorkspace(OutputWorkspace="transmission", DataX=tof_model, DataY=transmission, UnitX="TOF", NSpec=1) ws = api.Multiply(ws, transmission_ws, OutputWorkspace=str(ws)) return ws @@ -158,47 +163,50 @@ def process_attenuation(ws, thickness=0): def get_dead_time_correction(ws, template_data): """ - Compute dead time correction to be applied to the reflectivity curve. - The method will also try to load the error events from each of the - data files to ensure that we properly estimate the dead time correction. + Compute dead time correction to be applied to the reflectivity curve. + The method will also try to load the error events from each of the + data files to ensure that we properly estimate the dead time correction. - :param ws: (Mantid worksapce) workspace with raw data to compute correction for - :param template_data: (reduction_template_reader.ReductionParameters) - reduction parameters + :param ws: (Mantid worksapce) workspace with raw data to compute correction for + :param template_data: (reduction_template_reader.ReductionParameters) + reduction parameters - :return: (Mantid workspace) workspace with dead time correction to apply + :return: (Mantid workspace) workspace with dead time correction to apply """ tof_min = ws.getTofMin() tof_max = ws.getTofMax() run_number = ws.getRun().getProperty("run_number").value error_ws = api.LoadErrorEventsNexus("REF_L_%s" % run_number) - corr_ws = mantid_algorithm_exec(DeadTimeCorrection.SingleReadoutDeadTimeCorrection, - InputWorkspace=ws, - InputErrorEventsWorkspace=error_ws, - Paralyzable=template_data.paralyzable, - DeadTime=template_data.dead_time_value, - TOFStep=template_data.dead_time_tof_step, - TOFRange=[tof_min, tof_max], - OutputWorkspace="corr") + corr_ws = mantid_algorithm_exec( + DeadTimeCorrection.SingleReadoutDeadTimeCorrection, + InputWorkspace=ws, + InputErrorEventsWorkspace=error_ws, + Paralyzable=template_data.paralyzable, + DeadTime=template_data.dead_time_value, + TOFStep=template_data.dead_time_tof_step, + TOFRange=[tof_min, tof_max], + OutputWorkspace="corr", + ) corr_ws = api.Rebin(corr_ws, [tof_min, 10, tof_max]) return corr_ws + def apply_dead_time_correction(ws, template_data): """ - Apply dead time correction, and ensure that it is done only once - per workspace. + Apply dead time correction, and ensure that it is done only once + per workspace. - :param ws: (Mantid workspace) workspace with raw data to compute correction for - :param template_data: (reduction_template_reader.ReductionParameters) - reduction parameters + :param ws: (Mantid workspace) workspace with raw data to compute correction for + :param template_data: (reduction_template_reader.ReductionParameters) + reduction parameters - :return: (Mantid workspace) workspace with dead time correction applied + :return: (Mantid workspace) workspace with dead time correction applied """ - if 'dead_time_applied' not in ws.getRun(): + if "dead_time_applied" not in ws.getRun(): corr_ws = get_dead_time_correction(ws, template_data) ws = api.Multiply(ws, corr_ws, OutputWorkspace=str(ws)) - api.AddSampleLog(Workspace=ws, LogName="dead_time_applied", LogText='1', LogType="Number") + api.AddSampleLog(Workspace=ws, LogName="dead_time_applied", LogText="1", LogType="Number") return ws @@ -244,15 +252,30 @@ class EventReflectivity(object): DEFAULT_4B_SAMPLE_DET_DISTANCE = 1.83 DEFAULT_4B_SOURCE_DET_DISTANCE = 15.75 - def __init__(self, scattering_workspace, direct_workspace, - signal_peak, signal_bck, norm_peak, norm_bck, - specular_pixel, signal_low_res, norm_low_res, - q_min=None, q_step=-0.02, q_max=None, - tof_range=None, theta=1.0, instrument=None, - functional_background=False, dead_time=False, - paralyzable=True, dead_time_value=4.2, - dead_time_tof_step=100, use_emission_time=True): - + def __init__( + self, + scattering_workspace, + direct_workspace, + signal_peak, + signal_bck, + norm_peak, + norm_bck, + specular_pixel, + signal_low_res, + norm_low_res, + q_min=None, + q_step=-0.02, + q_max=None, + tof_range=None, + theta=1.0, + instrument=None, + functional_background=False, + dead_time=False, + paralyzable=True, + dead_time_value=4.2, + dead_time_tof_step=100, + use_emission_time=True, + ): if instrument in [self.INSTRUMENT_4A, self.INSTRUMENT_4B]: self.instrument = instrument else: @@ -285,13 +308,13 @@ def __init__(self, scattering_workspace, direct_workspace, # Process workspaces if self.tof_range is not None: - self._ws_sc = api.CropWorkspace(InputWorkspace=scattering_workspace, - XMin=tof_range[0], XMax=tof_range[1], - OutputWorkspace='_'+str(scattering_workspace)) + self._ws_sc = api.CropWorkspace( + InputWorkspace=scattering_workspace, XMin=tof_range[0], XMax=tof_range[1], OutputWorkspace="_" + str(scattering_workspace) + ) if direct_workspace is not None: - self._ws_db = api.CropWorkspace(InputWorkspace=direct_workspace, - XMin=tof_range[0], XMax=tof_range[1], - OutputWorkspace='_'+str(direct_workspace)) + self._ws_db = api.CropWorkspace( + InputWorkspace=direct_workspace, XMin=tof_range[0], XMax=tof_range[1], OutputWorkspace="_" + str(direct_workspace) + ) else: self._ws_db = None else: @@ -303,7 +326,7 @@ def __init__(self, scattering_workspace, direct_workspace, def extract_meta_data(self): """ - Extract meta data from the data file. + Extract meta data from the loaded data file. """ # Get instrument parameters settings = read_settings(self._ws_sc) @@ -324,12 +347,12 @@ def extract_meta_data(self): if self.tof_range is None: self.wl_range = get_wl_range(self._ws_sc) else: - self.wl_range = [self.tof_range[0] / self.constant, self.tof_range[1] / self.constant] + self.wl_range = [self.tof_range[0] / self.constant, self.tof_range[1] / self.constant] # q_min and q_max are the boundaries for the final Q binning # We also hold on to the true Q range covered by the measurement - self.q_min_meas = 4.0*np.pi/self.wl_range[1] * np.fabs(np.sin(self.theta)) - self.q_max_meas = 4.0*np.pi/self.wl_range[0] * np.fabs(np.sin(self.theta)) + self.q_min_meas = 4.0 * np.pi / self.wl_range[1] * np.fabs(np.sin(self.theta)) + self.q_max_meas = 4.0 * np.pi / self.wl_range[0] * np.fabs(np.sin(self.theta)) if self.q_min is None: self.q_min = self.q_min_meas @@ -341,29 +364,29 @@ def extract_meta_data(self): # Catch options that can be turned off if self.signal_low_res is None: - self.signal_low_res = [1, self.n_x-1] + self.signal_low_res = [1, self.n_x - 1] if self.norm_low_res is None: - self.norm_low_res = [1, self.n_x-1] + self.norm_low_res = [1, self.n_x - 1] def extract_meta_data_4A(self): """ - 4A-specific meta data + 4A-specific meta data """ run_object = self._ws_sc.getRun() - self.det_distance = run_object['SampleDetDis'].getStatistics().mean - source_sample_distance = run_object['ModeratorSamDis'].getStatistics().mean - if run_object['SampleDetDis'].units not in ['m', 'meter']: + self.det_distance = run_object["SampleDetDis"].getStatistics().mean + source_sample_distance = run_object["ModeratorSamDis"].getStatistics().mean + if run_object["SampleDetDis"].units not in ["m", "meter"]: self.det_distance /= 1000.0 - if run_object['ModeratorSamDis'].units not in ['m', 'meter']: + if run_object["ModeratorSamDis"].units not in ["m", "meter"]: source_sample_distance /= 1000.0 self.source_detector_distance = source_sample_distance + self.det_distance def extract_meta_data_4B(self): """ - 4B-specific meta data + 4B-specific meta data - Distance from source to sample was 13.63 meters prior to the source - to detector distance being determined with Bragg edges to be 15.75 m. + Distance from source to sample was 13.63 meters prior to the source + to detector distance being determined with Bragg edges to be 15.75 m. """ settings = read_settings(self._ws_sc) @@ -390,6 +413,11 @@ def extract_meta_data_4B(self): self.source_detector_distance = self.DEFAULT_4B_SOURCE_DET_DISTANCE def __repr__(self): + """ + Generate a string representation of the reduction settings. + + :return: (str) string representation of the reduction settings + """ output = "Reduction settings:\n" output += " sample-det: %s\n" % self.det_distance output += " source-det: %s\n" % self.source_detector_distance @@ -403,12 +431,14 @@ def __repr__(self): def to_dict(self): """ - Returns meta-data to be used/stored. + Returns meta-data to be used/stored. + + :return: (dict) dictionary with meta-data """ if self._ws_sc.getRun().hasProperty("start_time"): start_time = self._ws_sc.getRun().getProperty("start_time").value else: - start_time = 'live' + start_time = "live" experiment = self._ws_sc.getRun().getProperty("experiment_identifier").value run_number = self._ws_sc.getRun().getProperty("run_number").value sequence_number = int(self._ws_sc.getRun().getProperty("sequence_number").value[0]) @@ -421,25 +451,42 @@ def to_dict(self): norm_run = 0 dq0 = 0 - return dict(wl_min=self.wl_range[0], wl_max=self.wl_range[1], - q_min=self.q_min_meas, q_max=self.q_max_meas, theta=self.theta, - start_time=start_time, experiment=experiment, run_number=run_number, - run_title=run_title, norm_run=norm_run, time=time.ctime(), - dq0=dq0, dq_over_q=self.dq_over_q, sequence_number=sequence_number, - sequence_id=sequence_id, q_summing=self.q_summing) - - def specular(self, q_summing=False, tof_weighted=False, bck_in_q=False, - clean=False, normalize=True): + return dict( + wl_min=self.wl_range[0], + wl_max=self.wl_range[1], + q_min=self.q_min_meas, + q_max=self.q_max_meas, + theta=self.theta, + start_time=start_time, + experiment=experiment, + run_number=run_number, + run_title=run_title, + norm_run=norm_run, + time=time.ctime(), + dq0=dq0, + dq_over_q=self.dq_over_q, + sequence_number=sequence_number, + sequence_id=sequence_id, + q_summing=self.q_summing, + ) + + def specular(self, q_summing=False, tof_weighted=False, bck_in_q=False, clean=False, normalize=True): """ - Compute specular reflectivity. + Compute specular reflectivity. + + For constant-Q binning, it's preferred to use tof_weighted=True. + + :param q_summing: turns on constant-Q binning + :param tof_weighted: if True, binning will be done by weighting each event to the DB distribution + :param bck_in_q: if True, the background will be estimated in Q space using the constant-Q binning approach + :param clean: if True, and Q summing is True, then leading artifact will be removed + :param normalize: if True, and tof_weighted is False, normalization will be skipped - For constant-Q binning, it's preferred to use tof_weighted=True. + :return: A tuple containing: - :param q_summing: turns on constant-Q binning - :param tof_weighted: if True, binning will be done by weighting each event to the DB distribution - :param bck_in_q: if True, the background will be estimated in Q space using the constant-Q binning approach - :param clean: if True, and Q summing is True, then leading artifact will be removed - :param normalize: if True, and tof_weighted is False, normalization will be skipped + - q_bins: The Q bin boundaries + - refl: The reflectivity values + - d_refl: The uncertainties in the reflectivity values """ if tof_weighted: self.specular_weighted(q_summing=q_summing, bck_in_q=bck_in_q) @@ -447,7 +494,7 @@ def specular(self, q_summing=False, tof_weighted=False, bck_in_q=False, self.specular_unweighted(q_summing=q_summing, normalize=normalize) # Remove leading zeros - r = np.trim_zeros(self.refl, 'f') + r = np.trim_zeros(self.refl, "f") trim = len(self.refl) - len(r) self.refl = self.refl[trim:] self.d_refl = self.d_refl[trim:] @@ -471,15 +518,30 @@ def specular(self, q_summing=False, tof_weighted=False, bck_in_q=False, def specular_unweighted(self, q_summing=False, normalize=True): """ - Simple specular reflectivity calculation. This is the same approach as the - original LR reduction, which sums up pixels without constant-Q binning. - The original approach bins in TOF, then rebins the final results after - transformation to Q. This approach bins directly to Q. + Simple specular reflectivity calculation. This is the same approach as the + original LR reduction, which sums up pixels without constant-Q binning. + The original approach bins in TOF, then rebins the final results after + transformation to Q. This approach bins directly to Q. + + :param q_summing: (bool, optional) If True, sum the data in Q-space (default is False). + :param normalize: (bool, optional) If True, normalize the reflectivity by the direct + beam (default is True). + + :return: A tuple containing: + + - q_bins: The Q bin boundaries + - refl: The reflectivity values + - d_refl: The uncertainties in the reflectivity values """ # Scattering data - refl, d_refl = self._reflectivity(self._ws_sc, peak_position=self.specular_pixel, - peak=self.signal_peak, low_res=self.signal_low_res, - theta=self.theta, q_summing=q_summing) + refl, d_refl = self._reflectivity( + self._ws_sc, + peak_position=self.specular_pixel, + peak=self.signal_peak, + low_res=self.signal_low_res, + theta=self.theta, + q_summing=q_summing, + ) # Remove background if self.signal_bck is not None: @@ -493,19 +555,21 @@ def specular_unweighted(self, q_summing=False, normalize=True): # we can bin the DB according to the same transform instead of binning and dividing in TOF. # This is mathematically equivalent and convenient in terms of abstraction for later # use for the constant-Q calculation elsewhere in the code. - norm, d_norm = self._reflectivity(self._ws_db, peak_position=0, - peak=self.norm_peak, low_res=self.norm_low_res, - theta=self.theta, q_summing=False) + norm, d_norm = self._reflectivity( + self._ws_db, peak_position=0, peak=self.norm_peak, low_res=self.norm_low_res, theta=self.theta, q_summing=False + ) # Direct beam background could be added here. The effect will be negligible. if self.norm_bck is not None: norm_bck, d_norm_bck = self.norm_bck_subtraction() norm -= norm_bck d_norm = np.sqrt(d_norm**2 + d_norm_bck**2) - db_bins = norm>0 + db_bins = norm > 0 - refl[db_bins] = refl[db_bins]/norm[db_bins] - d_refl[db_bins] = np.sqrt(d_refl[db_bins]**2 / norm[db_bins]**2 + refl[db_bins]**2 * d_norm[db_bins]**2 / norm[db_bins]**4) + refl[db_bins] = refl[db_bins] / norm[db_bins] + d_refl[db_bins] = np.sqrt( + d_refl[db_bins] ** 2 / norm[db_bins] ** 2 + refl[db_bins] ** 2 * d_norm[db_bins] ** 2 / norm[db_bins] ** 4 + ) # Hold on to normalization to be able to diagnose issues later self.norm = norm[db_bins] @@ -523,25 +587,40 @@ def specular_unweighted(self, q_summing=False, normalize=True): def specular_weighted(self, q_summing=True, bck_in_q=False): """ - Compute reflectivity by weighting each event by flux. - This allows for summing in Q and to estimate the background in either Q - or pixels next to the peak. + Compute reflectivity by weighting each event by flux. + This allows for summing in Q and to estimate the background in either Q + or pixels next to the peak. + + :param q_summing: (bool, optional) If True, sum the data in Q-space (default is False). + :param bck_in_q: (bool, optional) If True, subtract background along Q lines (default is False). + + :return: A tuple containing: + + - q_bins: The Q bin boundaries + - refl: The reflectivity values + - d_refl: The uncertainties in the reflectivity values """ # Event weights for normalization db_charge = self._ws_db.getRun().getProtonCharge() wl_events, wl_weights = self._get_events(self._ws_db, self.norm_peak, self.norm_low_res) wl_dist, wl_bins = np.histogram(wl_events, bins=100, weights=wl_weights) _bin_width = wl_bins[1:] - wl_bins[:-1] - wl_dist = wl_dist/db_charge/_bin_width - wl_middle = [(wl_bins[i+1]+wl_bins[i])/2.0 for i in range(len(wl_bins)-1)] - - refl, d_refl = self._reflectivity(self._ws_sc, peak_position=self.specular_pixel, - peak=self.signal_peak, low_res=self.signal_low_res, - theta=self.theta, q_summing=q_summing, wl_dist=wl_dist, wl_bins=wl_middle) + wl_dist = wl_dist / db_charge / _bin_width + wl_middle = [(wl_bins[i + 1] + wl_bins[i]) / 2.0 for i in range(len(wl_bins) - 1)] + + refl, d_refl = self._reflectivity( + self._ws_sc, + peak_position=self.specular_pixel, + peak=self.signal_peak, + low_res=self.signal_low_res, + theta=self.theta, + q_summing=q_summing, + wl_dist=wl_dist, + wl_bins=wl_middle, + ) if self.signal_bck is not None: - refl_bck, d_refl_bck = self.bck_subtraction(wl_dist=wl_dist, wl_bins=wl_middle, - q_summing=bck_in_q) + refl_bck, d_refl_bck = self.bck_subtraction(wl_dist=wl_dist, wl_bins=wl_middle, q_summing=bck_in_q) refl -= refl_bck d_refl = np.sqrt(d_refl**2 + d_refl_bck**2) @@ -551,28 +630,34 @@ def specular_weighted(self, q_summing=True, bck_in_q=False): def _roi_integration(self, ws, peak, low_res, q_bins=None, wl_dist=None, wl_bins=None, q_summing=False): """ - Integrate a region of interest and normalize by the number of included pixels. + Integrate a region of interest and normalize by the number of included pixels. - The options are the same as for the reflectivity calculation. - If wl_dist and wl_bins are supplied, the events will be weighted by flux. - If q_summing is True, the angle of each neutron will be recalculated according to - their position on the detector and place in the proper Q bin. + The options are the same as for the reflectivity calculation. + If wl_dist and wl_bins are supplied, the events will be weighted by flux. + If q_summing is True, the angle of each neutron will be recalculated according to + their position on the detector and place in the proper Q bin. """ q_bins = self.q_bins if q_bins is None else q_bins - refl_bck, d_refl_bck = self._reflectivity(ws, peak_position=0, q_bins=q_bins, - peak=peak, low_res=low_res, - theta=self.theta, q_summing=q_summing, - wl_dist=wl_dist, wl_bins=wl_bins) - - _pixel_area = (peak[1]-peak[0]+1.0) + refl_bck, d_refl_bck = self._reflectivity( + ws, + peak_position=0, + q_bins=q_bins, + peak=peak, + low_res=low_res, + theta=self.theta, + q_summing=q_summing, + wl_dist=wl_dist, + wl_bins=wl_bins, + ) + + _pixel_area = peak[1] - peak[0] + 1.0 refl_bck /= _pixel_area d_refl_bck /= _pixel_area return refl_bck, d_refl_bck - def bck_subtraction(self, normalize_to_single_pixel=False, q_bins=None, wl_dist=None, wl_bins=None, - q_summing=False): + def bck_subtraction(self, normalize_to_single_pixel=False, q_bins=None, wl_dist=None, wl_bins=None, q_summing=False): """ - Higher-level call for background subtraction. Hides the ranges needed to define the ROI. + Higher-level call for background subtraction. Hides the ranges needed to define the ROI. """ # Sanity check if len(self.signal_bck) == 2 and self.use_functional_bck: @@ -582,63 +667,87 @@ def bck_subtraction(self, normalize_to_single_pixel=False, q_bins=None, wl_dist= self.use_functional_bck = False if self.use_functional_bck: - return background.functional_background(self._ws_sc, self, self.signal_peak, - self.signal_bck, self.signal_low_res, - normalize_to_single_pixel=normalize_to_single_pixel, - q_bins=q_bins, wl_dist=wl_dist, wl_bins=wl_bins, - q_summing=q_summing) + return background.functional_background( + self._ws_sc, + self, + self.signal_peak, + self.signal_bck, + self.signal_low_res, + normalize_to_single_pixel=normalize_to_single_pixel, + q_bins=q_bins, + wl_dist=wl_dist, + wl_bins=wl_bins, + q_summing=q_summing, + ) else: - return background.side_background(self._ws_sc, self, self.signal_peak, self.signal_bck, - self.signal_low_res, - normalize_to_single_pixel=normalize_to_single_pixel, - q_bins=q_bins, wl_dist=wl_dist, wl_bins=wl_bins, - q_summing=q_summing) + return background.side_background( + self._ws_sc, + self, + self.signal_peak, + self.signal_bck, + self.signal_low_res, + normalize_to_single_pixel=normalize_to_single_pixel, + q_bins=q_bins, + wl_dist=wl_dist, + wl_bins=wl_bins, + q_summing=q_summing, + ) def norm_bck_subtraction(self): """ - Higher-level call for background subtraction for the normalization run. + Higher-level call for background subtraction for the normalization run. """ - return background.side_background(self._ws_db, self, self.norm_peak, self.norm_bck, - self.norm_low_res, normalize_to_single_pixel=False) - - def slice(self, x_min=0.002, x_max=0.004, x_bins=None, z_bins=None, # noqa A003 - refl=None, d_refl=None, normalize=False): + return background.side_background( + self._ws_db, self, self.norm_peak, self.norm_bck, self.norm_low_res, normalize_to_single_pixel=False + ) + + def slice( + self, + x_min=0.002, + x_max=0.004, + x_bins=None, + z_bins=None, # noqa A003 + refl=None, + d_refl=None, + normalize=False, + ): """ - Retrieve a slice from the off-specular data. + Retrieve a slice from the off-specular data. """ x_bins = self._offspec_x_bins if x_bins is None else x_bins z_bins = self._offspec_z_bins if z_bins is None else z_bins refl = self._offspec_refl if refl is None else refl d_refl = self._offspec_d_refl if d_refl is None else d_refl - i_min = len(x_bins[x_bins= seq_list[i]: - data_sets[seq_list[i]-1].data_files = [run_list[i]] - new_data_sets.append(data_sets[seq_list[i]-1]) + data_sets[seq_list[i] - 1].data_files = [run_list[i]] + new_data_sets.append(data_sets[seq_list[i] - 1]) else: print("Too few entries [%s] in template for sequence number %s" % (len(data_sets), seq_list[i])) # Save the template that was used xml_str = reduction_template_reader.to_xml(new_data_sets) - with open(os.path.join(output_dir, 'REF_L_%s_auto_template.xml' % run_list[0]), 'w') as fd: + with open(os.path.join(output_dir, "REF_L_%s_auto_template.xml" % run_list[0]), "w") as fd: fd.write(xml_str) def offset_from_first_run( - ws, - template_file :str, - output_dir: str, - ): + ws, + template_file: str, + output_dir: str, +): """ Find a theta offset from the first peak. Used when sample is misaligned. @@ -153,7 +150,7 @@ def offset_from_first_run( sequence_id = ws.getRun().getProperty("sequence_id").value[0] # Theta value that we are aiming for - ths_value = ws.getRun()['ths'].value[-1] + ths_value = ws.getRun()["ths"].value[-1] # Read template so we can load the direct beam run template_data = template.read_template(template_file, sequence_number) @@ -163,31 +160,30 @@ def offset_from_first_run( ws_db = mtd_api.LoadEventNexus("REF_L_%s" % template_data.norm_file) # Look for parameters that might have been determined earlier for this measurement - options_file = os.path.join(output_dir, 'REFL_%s_options.json' % sequence_id) + options_file = os.path.join(output_dir, "REFL_%s_options.json" % sequence_id) if sequence_number > 1 and os.path.isfile(options_file): - with open(options_file, 'r') as fd: + with open(options_file, "r") as fd: options = json.load(fd) - return options['theta_offset'] + return options["theta_offset"] else: # Fit direct beam position - x_min=template_data.norm_peak_range[0] - x_max=template_data.norm_peak_range[1] + x_min = template_data.norm_peak_range[0] + x_max = template_data.norm_peak_range[1] _, _x, _y = peak_finding.process_data(ws_db, summed=True, tof_step=200) peak_center = np.argmax(_y) - db_center, db_width, _ = peak_finding.fit_signal_flat_bck(_x, _y, x_min=x_min, x_max=x_max, - center=peak_center, - sigma=1.) - print(" DB center: %g\t Width: %g from [%g %g]" % (db_center, db_width, - template_data.norm_peak_range[0], - template_data.norm_peak_range[1])) + db_center, db_width, _ = peak_finding.fit_signal_flat_bck(_x, _y, x_min=x_min, x_max=x_max, center=peak_center, sigma=1.0) + print( + " DB center: %g\t Width: %g from [%g %g]" + % (db_center, db_width, template_data.norm_peak_range[0], template_data.norm_peak_range[1]) + ) # Fit the reflected beam position - x_min=template_data.data_peak_range[0] - x_max=template_data.data_peak_range[1] + x_min = template_data.data_peak_range[0] + x_max = template_data.data_peak_range[1] _, _x, _y = peak_finding.process_data(ws, summed=True, tof_step=200) peak_center = np.argmax(_y[x_min:x_max]) + x_min - sc_center, sc_width, _ = peak_finding.fit_signal_flat_bck(_x, _y, x_min=x_min, x_max=x_max, center=peak_center, sigma=3.) + sc_center, sc_width, _ = peak_finding.fit_signal_flat_bck(_x, _y, x_min=x_min, x_max=x_max, center=peak_center, sigma=3.0) pixel_offset = sc_center - peak_center print(" SC center: %g\t Width: %g" % (sc_center, sc_width)) @@ -195,36 +191,36 @@ def offset_from_first_run( sample_det_distance = settings["sample-det-distance"] pixel_width = settings["pixel-width"] / 1000.0 - theta = np.arctan((sc_center-db_center) * pixel_width / sample_det_distance) / 2.0 * 180 / np.pi + theta = np.arctan((sc_center - db_center) * pixel_width / sample_det_distance) / 2.0 * 180 / np.pi theta_offset = theta - ths_value # If this is the first angle, keep the value for later - options = dict(theta_offset = theta_offset, - pixel_offset = pixel_offset) - with open(options_file, 'w') as fp: + options = dict(theta_offset=theta_offset, pixel_offset=pixel_offset) + with open(options_file, "w") as fp: json.dump(options, fp) return theta_offset def reduce_explorer(ws, ws_db, theta_pv=None, center_pixel=145, db_center_pixel=145, peak_width=10): - """ - """ + """ """ from . import peak_finding if theta_pv is None: - if 'BL4B:CS:ExpPl:OperatingMode' in ws.getRun() \ - and ws.getRun().getProperty('BL4B:CS:ExpPl:OperatingMode').value[0] == 'Free Liquid': - theta_pv = 'thi' + if ( + "BL4B:CS:ExpPl:OperatingMode" in ws.getRun() + and ws.getRun().getProperty("BL4B:CS:ExpPl:OperatingMode").value[0] == "Free Liquid" + ): + theta_pv = "thi" else: - theta_pv = 'ths' + theta_pv = "ths" print("\nProcessing: %s" % ws.getRunNumber()) # Theta value that we are aiming for theta_value = np.fabs(ws.getRun()[theta_pv].value[0]) # Load normalization run - tthd_value = ws.getRun()['tthd'].value[0] + tthd_value = ws.getRun()["tthd"].value[0] # Fit direct beam position x_min = center_pixel - 25 @@ -235,8 +231,8 @@ def reduce_explorer(ws, ws_db, theta_pv=None, center_pixel=145, db_center_pixel= print(" DB center: %g\t Width: %g" % (db_center, db_width)) # Fit the reflected beam position - x_min=db_center_pixel-peak_width - x_max=db_center_pixel+peak_width + x_min = db_center_pixel - peak_width + x_max = db_center_pixel + peak_width tof, _x, _y = peak_finding.process_data(ws, summed=True, tof_step=200) peak_center = np.argmax(_y) sc_center, sc_width, _ = peak_finding.fit_signal_flat_bck(_x, _y, x_min=x_min, x_max=x_max, center=peak_center) @@ -244,41 +240,47 @@ def reduce_explorer(ws, ws_db, theta_pv=None, center_pixel=145, db_center_pixel= pixel_width = float(ws.getInstrument().getNumberParameter("pixel-width")[0]) / 1000.0 sample_det_distance = event_reduction.EventReflectivity.DEFAULT_4B_SAMPLE_DET_DISTANCE - twotheta = np.arctan((db_center-sc_center)*pixel_width / sample_det_distance) / 2.0 * 180 / np.pi + twotheta = np.arctan((db_center - sc_center) * pixel_width / sample_det_distance) / 2.0 * 180 / np.pi # Store the tthd of the direct beam and account for the fact that it may be # different from our reflected beam for this calibration data. # This will allow us to be compatible with both fixed and moving detector arm. - tthd_db = ws_db.getRun()['tthd'].value[0] + tthd_db = ws_db.getRun()["tthd"].value[0] twotheta = twotheta + tthd_value - tthd_db print(" Theta = %g Two-theta = %g" % (theta_value, twotheta)) # Perform the reduction width_mult = 2.5 - peak = [np.rint(sc_center - width_mult*sc_width).astype(int), np.rint(sc_center + width_mult*sc_width).astype(int)] - norm_peak = [np.rint(db_center - width_mult*db_width).astype(int), np.rint(db_center + width_mult*db_width).astype(int)] - peak_bck = [peak[0]-3, peak[1]+3] - norm_bck = [norm_peak[0]-3, norm_peak[1]+3] + peak = [np.rint(sc_center - width_mult * sc_width).astype(int), np.rint(sc_center + width_mult * sc_width).astype(int)] + norm_peak = [np.rint(db_center - width_mult * db_width).astype(int), np.rint(db_center + width_mult * db_width).astype(int)] + peak_bck = [peak[0] - 3, peak[1] + 3] + norm_bck = [norm_peak[0] - 3, norm_peak[1] + 3] tof_min = ws.getTofMin() tof_max = ws.getTofMax() - theta = theta_value * np.pi / 180. + theta = theta_value * np.pi / 180.0 - #TODO: dead time correction should be applied here - event_refl = event_reduction.EventReflectivity(ws, ws_db, - signal_peak=peak, signal_bck=peak_bck, - norm_peak=norm_peak, norm_bck=norm_bck, - specular_pixel=sc_center.value, - signal_low_res=[65,180], norm_low_res=[65,180], - q_min=None, q_max=None, - tof_range = [tof_min, tof_max], - theta=theta) + # TODO: dead time correction should be applied here + event_refl = event_reduction.EventReflectivity( + ws, + ws_db, + signal_peak=peak, + signal_bck=peak_bck, + norm_peak=norm_peak, + norm_bck=norm_bck, + specular_pixel=sc_center.value, + signal_low_res=[65, 180], + norm_low_res=[65, 180], + q_min=None, + q_max=None, + tof_range=[tof_min, tof_max], + theta=theta, + ) # R(Q) - qz, refl, d_refl = event_refl.specular(q_summing=False, tof_weighted=False, - bck_in_q=False, clean=False, normalize=True) - qz_mid = (qz[:-1] + qz[1:])/2.0 + qz, refl, d_refl = event_refl.specular(q_summing=False, tof_weighted=False, bck_in_q=False, clean=False, normalize=True) + qz_mid = (qz[:-1] + qz[1:]) / 2.0 return qz_mid, refl, d_refl From b0e355df7dff9b7fe30b9c5cd0800c5c38a10851 Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 6 Jan 2025 15:57:44 -0500 Subject: [PATCH 05/16] prettify --- docs/conf.py | 6 + environment.yml | 1 + reduction/lr_reduction/event_reduction.py | 271 +++++++++++++++------- 3 files changed, 200 insertions(+), 78 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f47f909..394da85 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,6 +29,9 @@ "sphinx.ext.intersphinx", "sphinx.ext.viewcode", "sphinx.ext.coverage", + "sphinx.ext.mathjax", + "sphinx.ext.napoleon", + "myst_parser" ] autodoc_mock_imports = [ @@ -72,3 +75,6 @@ html_theme_options = {"style_nav_header_background": "#472375"} epub_show_urls = "footnote" # pylint: disable=C0103 + +nitpicky = True +nitpick_ignore = [('py:class', 'type')] \ No newline at end of file diff --git a/environment.yml b/environment.yml index 482147e..e3cb32c 100644 --- a/environment.yml +++ b/environment.yml @@ -19,3 +19,4 @@ dependencies: - sphinx - sphinx-rtd-theme - versioningit + - myst-parser diff --git a/reduction/lr_reduction/event_reduction.py b/reduction/lr_reduction/event_reduction.py index ed69866..33dcce2 100644 --- a/reduction/lr_reduction/event_reduction.py +++ b/reduction/lr_reduction/event_reduction.py @@ -30,9 +30,15 @@ def get_wl_range(ws): """ Determine TOF range from the data - :param ws: (Mantid workspace) workspace to work with - - :return: (list) [min, max] wavelength range + Parameters + ---------- + ws + Mantid workspace to work with + + Returns + ------- + list + [min, max] wavelength range """ run_object = ws.getRun() @@ -54,13 +60,20 @@ def get_q_binning(q_min=0.001, q_max=0.15, q_step=-0.02): If the step value is positive, it generates a linear binning. If the step value is negative, it generates a logarithmic binning. - :param q_min: (float, optional) The minimum Q value (default is 0.001). - :param q_max: (float, optional) The maximum Q value (default is 0.15). - :param q_step: (float, optional) The step size for Q binning. If positive, - linear binning is used. If negative, logarithmic binning is used (default is -0.02). - - :return: numpy.ndarray - An array of Q values based on the specified binning. + Parameters + ---------- + q_min : float + The minimum Q value. + q_max : float + The maximum Q value. + q_step : float + The step size for Q binning. If positive, linear binning is used. + If negative, logarithmic binning is used. + + Returns + ------- + ... + A numpy array of Q values based on the specified binning. """ if q_step > 0: n_steps = int((q_max - q_min) / q_step) @@ -78,9 +91,14 @@ def get_attenuation_info(ws): in the path of the beam by summing up the thicknesses of the attenuators specified in the global variable `CD_ATTENUATORS`. - :param ws: (Mantid workspace) + Parameters + ---------- + ws Mantid workspace from which to retrieve the attenuation information. - :return: float + + Returns + ------- + float The total thickness of the attenuators in the path of the beam. """ run_info = ws.getRun() @@ -98,8 +116,15 @@ def read_settings(ws): """ Read settings file and return values for the given timestamp - :param ws: Mantid workspace - :return: (dict) dictionary with settings + Parameters + ---------- + ws + Mantid workspace + + Returns + ------- + dict + Dictionary with settings """ settings = dict() package_dir, _ = os.path.split(__file__) @@ -126,10 +151,17 @@ def process_attenuation(ws, thickness=0): """ Correct for absorption by assigning weight to each neutron event - :param ws: (Mantid workspace) workspace to correct - :param thickness: (float) attenuator thickness in cm (default is 0). - - :return: (Mantid workspace) corrected workspace + Parameters + ---------- + ws + Mantid workspace to correct + thickness: float + Attenuator thickness in cm (default is 0). + + Returns + ------- + Mantid workspace + Corrected Mantid workspace """ settings = read_settings(ws) if "source-det-distance" in settings: @@ -167,11 +199,17 @@ def get_dead_time_correction(ws, template_data): The method will also try to load the error events from each of the data files to ensure that we properly estimate the dead time correction. - :param ws: (Mantid worksapce) workspace with raw data to compute correction for - :param template_data: (reduction_template_reader.ReductionParameters) - reduction parameters - - :return: (Mantid workspace) workspace with dead time correction to apply + Parameters + ---------- + ws + Workspace with raw data to compute correction for + template_data : reduction_template_reader.ReductionParameters + Reduction parameters + + Returns + ------- + ... + Workspace with dead time correction to apply """ tof_min = ws.getTofMin() tof_max = ws.getTofMax() @@ -197,11 +235,17 @@ def apply_dead_time_correction(ws, template_data): Apply dead time correction, and ensure that it is done only once per workspace. - :param ws: (Mantid workspace) workspace with raw data to compute correction for - :param template_data: (reduction_template_reader.ReductionParameters) - reduction parameters - - :return: (Mantid workspace) workspace with dead time correction applied + Parameters + ---------- + ws + Workspace with raw data to compute correction for + template_data : reduction_template_reader.ReductionParameters + Reduction parameters + + Returns + ------- + ... + Workspace with dead time correction applied """ if "dead_time_applied" not in ws.getRun(): corr_ws = get_dead_time_correction(ws, template_data) @@ -222,25 +266,46 @@ class EventReflectivity(object): Pixel ranges include the min and max pixels. - :param scattering_workspace: Mantid workspace containing the reflected data - :param direct_workspace: Mantid workspace containing the direct beam data [if None, normalization won't be applied] - :param signal_peak: pixel min and max for the specular peak - :param signal_bck: pixel range of the background [if None, the background won't be subtracted] - :param norm_peak: pixel range of the direct beam peak - :param norm_bck: direct background subtraction is not used [deprecated] - :param specular_pixel: pixel of the specular peak - :param signal_low_res: pixel range of the specular peak out of the scattering plane - :param norm_low_res: pixel range of the direct beam out of the scattering plane - :param q_min: value of lowest q point - :param q_step: step size in Q. Enter a negative value to get a log scale - :param q_min: value of largest q point - :param tof_range: TOF range,or None - :param theta: theta scattering angle in radians - :param dead_time: if not zero, dead time correction will be used - :param paralyzable: if True, the dead time calculation will use the paralyzable approach - :param dead_time_value: value of the dead time in microsecond - :param dead_time_tof_step: TOF bin size in microsecond - :param use_emmission_time: if True, the emission time delay will be computed + Parameters + ---------- + scattering_workspace + Mantid workspace containing the reflected data + direct_workspace + Mantid workspace containing the direct beam data [if None, normalization won't be applied] + signal_peak : tuple + Pixel min and max for the specular peak + signal_bck : tuple + Pixel range of the background [if None, the background won't be subtracted] + norm_peak : tuple + Pixel range of the direct beam peak + norm_bck : tuple + Direct background subtraction is not used [deprecated] + specular_pixel : float + Pixel of the specular peak + signal_low_res : tuple + Pixel range of the specular peak out of the scattering plane + norm_low_res : tuple + Pixel range of the direct beam out of the scattering plane + q_min : float + Value of lowest q point + q_step : float + Step size in Q. Enter a negative value to get a log scale + q_min : float + Value of largest q point + tof_range : tuple, None + TOF range,or None + theta : float + Theta scattering angle in radians + dead_time : float + If not zero, dead time correction will be used + paralyzable : bool + If True, the dead time calculation will use the paralyzable approach + dead_time_value : float + value of the dead time in microsecond + dead_time_tof_step : float + TOF bin size in microsecond + use_emmission_time : bool + If True, the emission time delay will be computed """ QX_VS_QZ = 0 @@ -416,7 +481,10 @@ def __repr__(self): """ Generate a string representation of the reduction settings. - :return: (str) string representation of the reduction settings + Returns + ------- + str + String representation of the reduction settings """ output = "Reduction settings:\n" output += " sample-det: %s\n" % self.det_distance @@ -433,7 +501,10 @@ def to_dict(self): """ Returns meta-data to be used/stored. - :return: (dict) dictionary with meta-data + Returns + ------- + dict + Dictionary with meta-data """ if self._ws_sc.getRun().hasProperty("start_time"): start_time = self._ws_sc.getRun().getProperty("start_time").value @@ -476,17 +547,27 @@ def specular(self, q_summing=False, tof_weighted=False, bck_in_q=False, clean=Fa For constant-Q binning, it's preferred to use tof_weighted=True. - :param q_summing: turns on constant-Q binning - :param tof_weighted: if True, binning will be done by weighting each event to the DB distribution - :param bck_in_q: if True, the background will be estimated in Q space using the constant-Q binning approach - :param clean: if True, and Q summing is True, then leading artifact will be removed - :param normalize: if True, and tof_weighted is False, normalization will be skipped - - :return: A tuple containing: - - - q_bins: The Q bin boundaries - - refl: The reflectivity values - - d_refl: The uncertainties in the reflectivity values + Parameters + ---------- + q_summing : bool + Turns on constant-Q binning + tof_weighted : bool + If True, binning will be done by weighting each event to the DB distribution + bck_in_q : bool + If True, the background will be estimated in Q space using the constant-Q binning approach + clean : bool + If True, and Q summing is True, then leading artifact will be removed + normalize : bool + If True, and tof_weighted is False, normalization will be skipped + + Returns + ------- + q_bins + The Q bin boundaries + refl + The reflectivity values + d_refl + The uncertainties in the reflectivity values """ if tof_weighted: self.specular_weighted(q_summing=q_summing, bck_in_q=bck_in_q) @@ -523,15 +604,21 @@ def specular_unweighted(self, q_summing=False, normalize=True): The original approach bins in TOF, then rebins the final results after transformation to Q. This approach bins directly to Q. - :param q_summing: (bool, optional) If True, sum the data in Q-space (default is False). - :param normalize: (bool, optional) If True, normalize the reflectivity by the direct - beam (default is True). - - :return: A tuple containing: - - - q_bins: The Q bin boundaries - - refl: The reflectivity values - - d_refl: The uncertainties in the reflectivity values + Parameters + ---------- + q_summing : bool + If True, sum the data in Q-space. + normalize : bool + If True, normalize the reflectivity by the direct beam. + + Returns + ------- + q_bins + The Q bin boundaries + refl + The reflectivity values + d_refl + The uncertainties in the reflectivity values """ # Scattering data refl, d_refl = self._reflectivity( @@ -591,14 +678,21 @@ def specular_weighted(self, q_summing=True, bck_in_q=False): This allows for summing in Q and to estimate the background in either Q or pixels next to the peak. - :param q_summing: (bool, optional) If True, sum the data in Q-space (default is False). - :param bck_in_q: (bool, optional) If True, subtract background along Q lines (default is False). - - :return: A tuple containing: - - - q_bins: The Q bin boundaries - - refl: The reflectivity values - - d_refl: The uncertainties in the reflectivity values + Parameters + ---------- + q_summing : bool + If True, sum the data in Q-space. + bck_in_q : bool + If True, subtract background along Q lines. + + Returns + ------- + q_bins + The Q bin boundaries + refl + The reflectivity values + d_refl + The uncertainties in the reflectivity values """ # Event weights for normalization db_charge = self._ws_db.getRun().getProtonCharge() @@ -656,8 +750,29 @@ def _roi_integration(self, ws, peak, low_res, q_bins=None, wl_dist=None, wl_bins return refl_bck, d_refl_bck def bck_subtraction(self, normalize_to_single_pixel=False, q_bins=None, wl_dist=None, wl_bins=None, q_summing=False): + """ - Higher-level call for background subtraction. Hides the ranges needed to define the ROI. + Perform background subtraction on the signal. + This method provides a higher-level call for background subtraction, hiding the ranges needed to define the Region of Interest (ROI). + + Parameters + ---------- + normalize_to_single_pixel : bool + If True, normalize the background to a single pixel. + q_bins + array of bins for the momentum transfer (q) values. + wl_dist + Array of wavelength (wl) values. + wl_bins + Array of bins for the wavelength (wl) values. + q_summing : bool + If True, sum the q values. + + Returns + ------- + mantid.api.Workspace + The workspace with the background subtracted. + """ # Sanity check if len(self.signal_bck) == 2 and self.use_functional_bck: From 60558f3e289b1d4e1065852e1984836540e5923c Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 6 Jan 2025 16:14:53 -0500 Subject: [PATCH 06/16] Done with main reduction code --- docs/conf.py | 5 +- reduction/lr_reduction/event_reduction.py | 75 ++++++++++++++++++----- 2 files changed, 61 insertions(+), 19 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 394da85..f0d238c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -37,6 +37,7 @@ autodoc_mock_imports = [ "mantid", "mantid.api", + "mantid.api.Workspace", "mantid.kernel", "mantid.utils", "mantid.utils.logging", @@ -52,6 +53,7 @@ "mantid.plots.plotfunctions", "mantid.plots.datafunctions", "mantid.plots.utility", + "numpy,ndarray" ] master_doc = "index" @@ -75,6 +77,3 @@ html_theme_options = {"style_nav_header_background": "#472375"} epub_show_urls = "footnote" # pylint: disable=C0103 - -nitpicky = True -nitpick_ignore = [('py:class', 'type')] \ No newline at end of file diff --git a/reduction/lr_reduction/event_reduction.py b/reduction/lr_reduction/event_reduction.py index 33dcce2..4d2ed21 100644 --- a/reduction/lr_reduction/event_reduction.py +++ b/reduction/lr_reduction/event_reduction.py @@ -72,7 +72,7 @@ def get_q_binning(q_min=0.001, q_max=0.15, q_step=-0.02): Returns ------- - ... + numpy.ndarray A numpy array of Q values based on the specified binning. """ if q_step > 0: @@ -208,7 +208,7 @@ def get_dead_time_correction(ws, template_data): Returns ------- - ... + mantid.api.Workspace Workspace with dead time correction to apply """ tof_min = ws.getTofMin() @@ -244,7 +244,7 @@ def apply_dead_time_correction(ws, template_data): Returns ------- - ... + mantid.api.Workspace Workspace with dead time correction applied """ if "dead_time_applied" not in ws.getRun(): @@ -772,7 +772,6 @@ def bck_subtraction(self, normalize_to_single_pixel=False, q_bins=None, wl_dist= ------- mantid.api.Workspace The workspace with the background subtracted. - """ # Sanity check if len(self.signal_bck) == 2 and self.use_functional_bck: @@ -964,13 +963,23 @@ def _get_events(self, ws, peak, low_res): def off_specular(self, x_axis=None, x_min=-0.015, x_max=0.015, x_npts=50, z_min=None, z_max=None, z_npts=-120, bck_in_q=None): """ Compute off-specular - :param x_axis: Axis selection - :param x_min: Min value on x-axis - :param x_max: Max value on x-axis - :param x_npts: Number of points in x (negative will produce a log scale) - :param z_min: Min value on z-axis (if none, default Qz will be used) - :param z_max: Max value on z-axis (if none, default Qz will be used) - :param z_npts: Number of points in z (negative will produce a log scale) + + Parameters + ---------- + x_axis : int + Axis selection from QX_VS_QZ, KZI_VS_KZF, DELTA_KZ_VS_QZ + x_min : float + Min value on x-axis + x_max : float + Max value on x-axis + x_npts : int + Number of points in x (negative will produce a log scale) + z_min : float + Min value on z-axis (if none, default Qz will be used) + z_max : float + Max value on z-axis (if none, default Qz will be used) + z_npts : int + Number of points in z (negative will produce a log scale) """ # Z axis binning qz_bins = self.q_bins @@ -1072,9 +1081,19 @@ def _off_specular(self, ws, wl_dist, wl_bins, x_bins, z_bins, peak_position, the def emission_time_correction(self, ws, tofs): """ - Coorect TOF for emission time delay in the moderator - :param ws: Mantid workspace - :param tofs: list of TOF values + Coorect TOF for emission time delay in the moderator. + + Parameters + ---------- + ws : mantid.api.Workspace + Mantid workspace to extract correction meta-data from + tofs : numpy.ndarray + Array of uncorrected TOF values + + Returns + ------- + numpy.ndarray + Array of corrected TOF values """ mt_run = ws.getRun() use_emission_delay = False @@ -1091,6 +1110,18 @@ def emission_time_correction(self, ws, tofs): def gravity_correction(self, ws, wl_list): """ Gravity correction for each event + + Parameters + ---------- + ws : mantid.api.Workspace + Mantid workspace to extract correction meta-data from. + wl_list : numpy.ndarray + Array of wavelengths for each event. + + Returns + ------- + numpy.ndarray + Array of gravity-corrected theta values for each event, in radians. """ # Xi reference would be the position of xi if the si slit were to be positioned # at the sample. The distance from the sample to si is then xi_reference - xi. @@ -1146,8 +1177,20 @@ def gravity_correction(self, ws, wl_list): def compute_resolution(ws, default_dq=0.027, theta=None, q_summing=False): """ Compute the Q resolution from the meta data. - :param theta: scattering angle in radians - :param q_summing: if True, the pixel size will be used for the resolution + + Parameters + ---------- + ws : mantid.api.Workspace + Mantid workspace to extract correction meta-data from. + theta : float + Scattering angle in radians + q_summing : bool + If True, the pixel size will be used for the resolution + + Returns + ------- + float + The dQ/Q resolution (FWHM) """ settings = read_settings(ws) From 1317193021ea488884ae720d0fafab16faabcb29 Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 6 Jan 2025 16:37:09 -0500 Subject: [PATCH 07/16] Add docs to workflow.py --- reduction/lr_reduction/workflow.py | 111 +++++++++++++++++++++++++---- 1 file changed, 98 insertions(+), 13 deletions(-) diff --git a/reduction/lr_reduction/workflow.py b/reduction/lr_reduction/workflow.py index 449c8f2..e5a7725 100644 --- a/reduction/lr_reduction/workflow.py +++ b/reduction/lr_reduction/workflow.py @@ -21,11 +21,28 @@ def reduce( If average_overlap is used, overlapping points will be averaged, otherwise they will be left in the final data file. - :param average_overlap: if True, the overlapping points will be averaged - :param q_summing: if True, constant-Q binning will be used - :param bck_in_q: if True, and constant-Q binning is used, the background will be estimated - along constant-Q lines rather than along TOF/pixel boundaries. - :param theta_offset: Theta offset to apply. If None, the template value will be used. + Parameters + ---------- + average_overlap : bool + If True, the overlapping points will be averaged + q_summing : bool + If True, constant-Q binning will be used + bck_in_q : bool + If True, and constant-Q binning is used, the background will be estimated + along constant-Q lines rather than along TOF/pixel boundaries. + theta_offset : float + Theta offset to apply. If None, the template value will be used. + is_live : bool + If True, the data is live and will be saved in a separate file to avoid conflict with auto-reduction + output_dir : str + Directory where the output files will be saved + template_file : str + Path to the template file containing the reduction parameters + + Returns + ------- + int + The sequence identifier for the run sequence """ # Get the sequence number sequence_number = 1 @@ -77,6 +94,24 @@ def reduce( def assemble_results(first_run, output_dir, average_overlap=False, is_live=False): """ Find related runs and assemble them in one R(q) data set + + Parameters + ---------- + first_run : int + The first run number in the sequence + output_dir : str + Directory where the output files are saved + average_overlap : bool + If True, the overlapping points will be averaged + is_live : bool + If True, the data is live and will be saved in a separate file to avoid conflict with auto-reduction + + Returns + ------- + seq_list : tuple + The sequence identifiers + run_list : tuple + The run numbers """ # Keep track of sequence IDs and run numbers so we can make a new template seq_list = [] @@ -111,6 +146,17 @@ def write_template(seq_list, run_list, template_file, output_dir): """ Read the appropriate entry in a template file and save an updated copy with the updated run number. + + Parameters + ---------- + seq_list : tuple + The sequence identifiers + run_list : tuple + The run numbers + template_file : str + Path to the template file + output_dir : str + Directory where the output files are saved """ with open(template_file, "r") as fd: xml_str = fd.read() @@ -130,14 +176,26 @@ def write_template(seq_list, run_list, template_file, output_dir): fd.write(xml_str) -def offset_from_first_run( - ws, - template_file: str, - output_dir: str, -): +def offset_from_first_run(ws, template_file: str, output_dir: str): """ - Find a theta offset from the first peak. - Used when sample is misaligned. + Find a theta offset by comparing the peak location of the reflected and direct beam compared to + the theta value in the meta data. + + When processing the first run of a set, store that offset in a file so it can be used for later runs. + + Parameters + ---------- + ws : Mantid workspace + The workspace to process + template_file : str + Path to the template file + output_dir : str + Directory where the output files are saved + + Returns + ------- + float + The theta offset """ from . import peak_finding @@ -203,7 +261,34 @@ def offset_from_first_run( def reduce_explorer(ws, ws_db, theta_pv=None, center_pixel=145, db_center_pixel=145, peak_width=10): - """ """ + """ + Very simple rough reduction for when playing around. + + Parameters + ---------- + ws : Mantid workspace + The workspace to process + ws_db : Mantid workspace + The workspace with the direct beam data + theta_pv : str + The PV name for the theta value + center_pixel : int + The pixel number for the center of the reflected beam + db_center_pixel : int + The pixel number for the center of the direct beam + peak_width : int + The width of the peak to use for the reflected beam + + Returns + ------- + qz_mid : numpy.ndarray + The Q values + refl : numpy.ndarray + The reflectivity values + d_refl : numpy.ndarray + The uncertainty in the reflectivity + + """ from . import peak_finding if theta_pv is None: From 316d24fd8d4a2be4c1c7a7f51fc697ac5fdc151c Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 6 Jan 2025 16:58:10 -0500 Subject: [PATCH 08/16] Add coments --- reduction/lr_reduction/background.py | 199 +++++++++++++++------- reduction/lr_reduction/event_reduction.py | 14 +- reduction/lr_reduction/workflow.py | 8 +- 3 files changed, 146 insertions(+), 75 deletions(-) diff --git a/reduction/lr_reduction/background.py b/reduction/lr_reduction/background.py index 3f97a16..89854e6 100644 --- a/reduction/lr_reduction/background.py +++ b/reduction/lr_reduction/background.py @@ -4,11 +4,23 @@ def find_ranges_without_overlap(r1, r2): """ - Returns the part of r1 that does not contain r2 - When summing pixels for reflectivity, include the full range, - which means that for a range [a, b], b is included. - The range that we return must always exclude the pixels - included in r2. + Returns the part of r1 that does not contain r2 + When summing pixels for reflectivity, include the full range, + which means that for a range [a, b], b is included. + The range that we return must always exclude the pixels + included in r2. + + Parameters + ---------- + r1 : list + Range of pixels to consider + r2 : list + Range of pixels to exclude + + Returns + ------- + list + List of ranges that do not overlap with r2 """ x1, x2 = r1 x3, x4 = r2 @@ -32,10 +44,42 @@ def find_ranges_without_overlap(r1, r2): return [] # no range without r2 -def functional_background(ws, event_reflectivity, peak, bck, low_res, - normalize_to_single_pixel=False, q_bins=None, - wl_dist=None, wl_bins=None, q_summing=False): +def functional_background( + ws, event_reflectivity, peak, bck, low_res, normalize_to_single_pixel=False, q_bins=None, wl_dist=None, wl_bins=None, q_summing=False +): """ + Estimate background using a linear function over a background range that may include the specular peak. + In the case where the peak is included in the background range, the peak is excluded from the background. + + Parameters + ---------- + ws : Mantid workspace + Workspace containing the data + event_reflectivity : EventReflectivity + EventReflectivity object + peak : list + Range of pixels that define the peak + bck : list + Range of pixels that define the background. It contains 4 pixels, defining up to two ranges. + low_res : list + Range in the x direction on the detector + normalize_to_single_pixel : bool + If True, the background is normalized to the number of pixels used to integrate the signal + q_bins : numpy.ndarray + Array of Q bins + wl_dist : numpy.ndarray + Wavelength distribution for the case where we use weighted events for normatization + wl_bins : numpy.ndarray + Array of wavelength bins for the case where we use weighted events for normatization + q_summing : bool + If True, sum the counts in Q bins + + Returns + ------- + numpy.ndarray + Reflectivity background + numpy.ndarray + Reflectivity background error """ charge = ws.getRun().getProtonCharge() # For each background range, exclue the peak @@ -53,13 +97,18 @@ def functional_background(ws, event_reflectivity, peak, bck, low_res, # which has been the default before implementing this more flexible # approach. if not r[0] == r[1]: - _b, _d_b = event_reflectivity._reflectivity(ws, peak_position=0, - q_bins=q_bins, - peak=r, low_res=low_res, - theta=event_reflectivity.theta, - q_summing=q_summing, - wl_dist=wl_dist, wl_bins=wl_bins, - sum_pixels=False) + _b, _d_b = event_reflectivity._reflectivity( + ws, + peak_position=0, + q_bins=q_bins, + peak=r, + low_res=low_res, + theta=event_reflectivity.theta, + q_summing=q_summing, + wl_dist=wl_dist, + wl_bins=wl_bins, + sum_pixels=False, + ) bck_counts.append(_b) d_bck_counts.append(_d_b) pixels.extend(list(range(r[0], r[1] + 1))) @@ -79,103 +128,125 @@ def functional_background(ws, event_reflectivity, peak, bck, low_res, linear = LinearModel() pars = linear.make_params(slope=0, intercept=_estimate) - weights=1/_d_bck[:, i] + weights = 1 / _d_bck[:, i] # Here we have counts normalized by proton charge, so if we want to # assign an error of 1 on the counts, it should be 1/charge. - weights[_bck[:, i]==0]=charge + weights[_bck[:, i] == 0] = charge - fit = linear.fit(_bck[:, i], pars, method='leastsq', x=pixels, weights=weights) + fit = linear.fit(_bck[:, i], pars, method="leastsq", x=pixels, weights=weights) - slope = fit.params['slope'].value - intercept = fit.params['intercept'].value + slope = fit.params["slope"].value + intercept = fit.params["intercept"].value d_slope = np.sqrt(fit.covar[0][0]) d_intercept = np.sqrt(fit.covar[1][1]) # Compute background under the peak total_bck = 0 total_err = 0 - for k in range(peak[0], peak[1]+1): + for k in range(peak[0], peak[1] + 1): total_bck += intercept + k * slope total_err += d_intercept**2 + k**2 * d_slope**2 _pixel_area = peak[1] - peak[0] + 1.0 refl_bck[i] = (slope * (peak[1] + peak[0] + 1) + 2 * intercept) * _pixel_area / 2 - d_refl_bck[i] = np.sqrt(d_slope**2 * (peak[1] + peak[0]+1)**2 + 4 * d_intercept**2 - + 4 * (peak[1] + peak[0]+1) * fit.covar[0][1]) * _pixel_area / 2 + d_refl_bck[i] = ( + np.sqrt(d_slope**2 * (peak[1] + peak[0] + 1) ** 2 + 4 * d_intercept**2 + 4 * (peak[1] + peak[0] + 1) * fit.covar[0][1]) + * _pixel_area + / 2 + ) # In case we neen the background per pixel as opposed to the total sum under the peak if normalize_to_single_pixel: - _pixel_area = peak[1] - peak[0]+1.0 + _pixel_area = peak[1] - peak[0] + 1.0 refl_bck /= _pixel_area d_refl_bck /= _pixel_area return refl_bck, d_refl_bck -def side_background(ws, event_reflectivity, peak, bck, low_res, - normalize_to_single_pixel=False, q_bins=None, - wl_dist=None, wl_bins=None, q_summing=False): +def side_background( + ws, event_reflectivity, peak, bck, low_res, normalize_to_single_pixel=False, q_bins=None, + wl_dist=None, wl_bins=None, q_summing=False +): """ - Original background substration done using two pixels defining the - area next to the specular peak that are considered background. + Original background substration done using two pixels defining the + area next to the specular peak that are considered background. + + Parameters + ---------- + ws : Mantid workspace + Workspace containing the data + event_reflectivity : EventReflectivity + EventReflectivity object + peak : list + Range of pixels that define the peak + bck : list + Range of pixels that define the background + low_res : list + Range in the x direction on the detector + normalize_to_single_pixel : bool + If True, the background is normalized to the number of pixels used to integrate the signal + q_bins : numpy.ndarray + Array of Q bins + wl_dist : numpy.ndarray + Wavelength distribution for the case where we use weighted events for normatization + wl_bins : numpy.ndarray + Array of wavelength bins for the case where we use weighted events for normatization + q_summing : bool + If True, sum the counts in Q bins + + Returns + ------- + numpy.ndarray + Reflectivity background + numpy.ndarray + Reflectivity background error """ q_bins = event_reflectivity.q_bins if q_bins is None else q_bins # Background on the left of the peak only. We allow the user to overlap the peak # on the right, but only use the part left of the peak. - if bck[0] < peak[0]-1 and bck[1] < peak[1]+1: - right_side = min(bck[1], peak[0]-1) + if bck[0] < peak[0] - 1 and bck[1] < peak[1] + 1: + right_side = min(bck[1], peak[0] - 1) _left = [bck[0], right_side] print("Left side background: [%s, %s]" % (_left[0], _left[1])) - refl_bck, d_refl_bck = event_reflectivity._roi_integration(ws, peak=_left, - low_res=low_res, - q_bins=q_bins, - wl_dist=wl_dist, - wl_bins=wl_bins, - q_summing=q_summing) + refl_bck, d_refl_bck = event_reflectivity._roi_integration( + ws, peak=_left, low_res=low_res, q_bins=q_bins, wl_dist=wl_dist, wl_bins=wl_bins, q_summing=q_summing + ) # Background on the right of the peak only. We allow the user to overlap the peak # on the left, but only use the part right of the peak. - elif bck[0] > peak[0]-1 and bck[1] > peak[1]+1: - left_side = max(bck[0], peak[1]+1) + elif bck[0] > peak[0] - 1 and bck[1] > peak[1] + 1: + left_side = max(bck[0], peak[1] + 1) _right = [left_side, bck[1]] print("Right side background: [%s, %s]" % (_right[0], _right[1])) - refl_bck, d_refl_bck = event_reflectivity._roi_integration(ws, peak=_right, - low_res=low_res, - q_bins=q_bins, - wl_dist=wl_dist, - wl_bins=wl_bins, - q_summing=q_summing) + refl_bck, d_refl_bck = event_reflectivity._roi_integration( + ws, peak=_right, low_res=low_res, q_bins=q_bins, wl_dist=wl_dist, wl_bins=wl_bins, q_summing=q_summing + ) # Background on both sides - elif bck[0] < peak[0]-1 and bck[1] > peak[1]+1: - _left = [bck[0], peak[0]-1] - refl_bck, d_refl_bck = event_reflectivity._roi_integration(ws, peak=_left, - low_res=low_res, - q_bins=q_bins, - wl_dist=wl_dist, - wl_bins=wl_bins, - q_summing=q_summing) - _right = [peak[1]+1, bck[1]] - _refl_bck, _d_refl_bck = event_reflectivity._roi_integration(ws, peak=_right, - low_res=low_res, - q_bins=q_bins, - wl_dist=wl_dist, - wl_bins=wl_bins, - q_summing=q_summing) + elif bck[0] < peak[0] - 1 and bck[1] > peak[1] + 1: + _left = [bck[0], peak[0] - 1] + refl_bck, d_refl_bck = event_reflectivity._roi_integration( + ws, peak=_left, low_res=low_res, q_bins=q_bins, wl_dist=wl_dist, wl_bins=wl_bins, q_summing=q_summing + ) + _right = [peak[1] + 1, bck[1]] + _refl_bck, _d_refl_bck = event_reflectivity._roi_integration( + ws, peak=_right, low_res=low_res, q_bins=q_bins, wl_dist=wl_dist, wl_bins=wl_bins, q_summing=q_summing + ) print("Background on both sides: [%s %s] [%s %s]" % (_left[0], _left[1], _right[0], _right[1])) - refl_bck = (refl_bck + _refl_bck)/2.0 - d_refl_bck = np.sqrt(d_refl_bck**2 + _d_refl_bck**2)/2.0 + refl_bck = (refl_bck + _refl_bck) / 2.0 + d_refl_bck = np.sqrt(d_refl_bck**2 + _d_refl_bck**2) / 2.0 else: print("Invalid background: [%s %s]" % (bck[0], bck[1])) - refl_bck = np.zeros(q_bins.shape[0]-1) + refl_bck = np.zeros(q_bins.shape[0] - 1) d_refl_bck = refl_bck # At this point we have integrated the region of interest and obtain the average per # pixel, so unless that's what we want we need to multiply by the number of pixels # used to integrate the signal. if not normalize_to_single_pixel: - _pixel_area = peak[1] - peak[0]+1.0 + _pixel_area = peak[1] - peak[0] + 1.0 refl_bck *= _pixel_area d_refl_bck *= _pixel_area diff --git a/reduction/lr_reduction/event_reduction.py b/reduction/lr_reduction/event_reduction.py index 4d2ed21..da9154c 100644 --- a/reduction/lr_reduction/event_reduction.py +++ b/reduction/lr_reduction/event_reduction.py @@ -272,19 +272,19 @@ class EventReflectivity(object): Mantid workspace containing the reflected data direct_workspace Mantid workspace containing the direct beam data [if None, normalization won't be applied] - signal_peak : tuple + signal_peak : list Pixel min and max for the specular peak - signal_bck : tuple + signal_bck : list Pixel range of the background [if None, the background won't be subtracted] - norm_peak : tuple + norm_peak : list Pixel range of the direct beam peak - norm_bck : tuple + norm_bck : list Direct background subtraction is not used [deprecated] specular_pixel : float Pixel of the specular peak - signal_low_res : tuple + signal_low_res : list Pixel range of the specular peak out of the scattering plane - norm_low_res : tuple + norm_low_res : list Pixel range of the direct beam out of the scattering plane q_min : float Value of lowest q point @@ -292,7 +292,7 @@ class EventReflectivity(object): Step size in Q. Enter a negative value to get a log scale q_min : float Value of largest q point - tof_range : tuple, None + tof_range : list, None TOF range,or None theta : float Theta scattering angle in radians diff --git a/reduction/lr_reduction/workflow.py b/reduction/lr_reduction/workflow.py index e5a7725..2506978 100644 --- a/reduction/lr_reduction/workflow.py +++ b/reduction/lr_reduction/workflow.py @@ -108,9 +108,9 @@ def assemble_results(first_run, output_dir, average_overlap=False, is_live=False Returns ------- - seq_list : tuple + seq_list : list The sequence identifiers - run_list : tuple + run_list : list The run numbers """ # Keep track of sequence IDs and run numbers so we can make a new template @@ -149,9 +149,9 @@ def write_template(seq_list, run_list, template_file, output_dir): Parameters ---------- - seq_list : tuple + seq_list : list The sequence identifiers - run_list : tuple + run_list : list The run numbers template_file : str Path to the template file From 11b62d1e13b9ed36d854678c8b894800cd906f8c Mon Sep 17 00:00:00 2001 From: glass-ships Date: Mon, 6 Jan 2025 13:36:28 -0500 Subject: [PATCH 09/16] Migrate to mkdocs --- .github/dependabot.yml | 10 + .github/workflows/actions.yml | 37 ++-- .github/workflows/deploy_docs.yaml | 38 ++++ .pre-commit-config.yaml | 8 +- Makefile | 11 +- docs/Makefile | 195 ------------------ docs/api/background.md | 1 + docs/api/event_reduction.md | 1 + docs/api/index.md | 11 + docs/api/output.md | 1 + docs/api/peak_finding.md | 1 + docs/api/reduction_template_reader.md | 1 + docs/api/template.md | 1 + docs/api/time_resolved.md | 1 + docs/api/utils.md | 1 + docs/api/workflow.md | 1 + docs/conf.py | 79 ------- .../contributing.md} | 9 +- docs/developer/developer.md | 137 ++++++++++++ docs/index.md | 27 +++ docs/index.rst | 42 ---- docs/make.bat | 35 ---- docs/{source/releases.rst => releases.md} | 21 +- docs/source/api/lr_reduction.rst | 81 -------- docs/source/developer/developer.rst | 141 ------------- .../conda_environments.md} | 23 +-- environment.yml | 10 +- mkdocs.yaml | 42 ++++ pyproject.toml | 75 ++++--- 29 files changed, 382 insertions(+), 659 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/deploy_docs.yaml delete mode 100644 docs/Makefile create mode 100644 docs/api/background.md create mode 100644 docs/api/event_reduction.md create mode 100644 docs/api/index.md create mode 100644 docs/api/output.md create mode 100644 docs/api/peak_finding.md create mode 100644 docs/api/reduction_template_reader.md create mode 100644 docs/api/template.md create mode 100644 docs/api/time_resolved.md create mode 100644 docs/api/utils.md create mode 100644 docs/api/workflow.md delete mode 100644 docs/conf.py rename docs/{source/developer/contributing.rst => developer/contributing.md} (84%) create mode 100644 docs/developer/developer.md create mode 100644 docs/index.md delete mode 100644 docs/index.rst delete mode 100644 docs/make.bat rename docs/{source/releases.rst => releases.md} (80%) delete mode 100644 docs/source/api/lr_reduction.rst delete mode 100644 docs/source/developer/developer.rst rename docs/{source/user/conda_environments.rst => user/conda_environments.md} (73%) create mode 100644 mkdocs.yaml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..f9c645b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +# Set update schedule for GitHub Actions + +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every week + interval: "monthly" \ No newline at end of file diff --git a/.github/workflows/actions.yml b/.github/workflows/actions.yml index 915d89d..4c16b0f 100644 --- a/.github/workflows/actions.yml +++ b/.github/workflows/actions.yml @@ -14,20 +14,23 @@ jobs: run: shell: bash -l {0} steps: - - uses: actions/checkout@v2 - - name: Set up Miniconda - uses: conda-incubator/setup-miniconda@v2 - with: - auto-update-conda: true - miniforge-version: latest - environment-file: environment.yml - - name: Test with pytest - working-directory: ./reduction - run: | - git submodule add --force https://code.ornl.gov/sns-hfir-scse/infrastructure/test-data/liquidsreflectometer-data.git tests/data/liquidsreflectometer-data - git submodule update --init - python -m pytest -vv --cov=. --cov-report=xml --cov-report=term test - - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v3 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + - uses: actions/checkout@v4 + + - name: Set up Miniconda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + miniforge-version: latest + environment-file: environment.yml + + - name: Test with pytest + working-directory: ./reduction + run: | + git submodule add --force https://code.ornl.gov/sns-hfir-scse/infrastructure/test-data/liquidsreflectometer-data.git tests/data/liquidsreflectometer-data + git submodule update --init + python -m pytest -vv --cov=. --cov-report=xml --cov-report=term test + + - name: Upload coverage reports to Codecov + uses: codecov/codecov-action@v3 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/deploy_docs.yaml b/.github/workflows/deploy_docs.yaml new file mode 100644 index 0000000..6b6e858 --- /dev/null +++ b/.github/workflows/deploy_docs.yaml @@ -0,0 +1,38 @@ +name: Build and Deploy Docs to GitHub Pages +on: + workflow_dispatch: + push: + branches: + - master + +env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +jobs: + build-docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 # otherwise, you will failed to push refs to dest repo + + - name: Set up Miniconda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + miniforge-version: latest + environment-file: environment.yml + + - name: Install Dependencies + run: pip install .[dev] + + - name: Build Documentation + run: make docs + + - name: Deploy to gh-pages + uses: JamesIves/github-pages-deploy-action@v4 + with: + folder: site + target-folder: docs + clean: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1f63629..d542a7a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ exclude: '(reduction/data|reduction/notebooks|scripts|scans)/.*' repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: check-added-large-files args: [--maxkb=4096] @@ -12,8 +12,12 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.278 + rev: v0.8.3 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] types_or: [python, pyi, jupyter] + +ci: + autoupdate_schedule: monthly + skip: [eslint] diff --git a/Makefile b/Makefile index 795b416..42710e7 100644 --- a/Makefile +++ b/Makefile @@ -10,12 +10,12 @@ SHELL=/bin/bash .PHONY: help conda docs test install help: - # this nifty perl one-liner collects all commnents headed by the double "#" symbols next to each target and recycles them as comments + # This nifty perl one-liner collects all commnents headed by the double "#" symbols next to each target and recycles them as comments @perl -nle'print $& if m{^[a-zA-Z_-]+:.*?## .*$$}' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-25s\033[0m %s\n", $$1, $$2}' PREFIX := /SNS/REF_L/shared -install: ## install the automated reduction code for LR +install: ## Install the automated reduction code for LR versioningit -w cp -R scripts/livereduce/*.py $(PREFIX)/livereduce cp -R scripts/autoreduce/*.py $(PREFIX)/autoreduce @@ -32,9 +32,8 @@ conda-env: ## creates conda environment `lr_reduction` and installs package `lr $(CONDA_ACTIVATE) lr_reduction pip install -e . -docs: ## generates HTML docs under `docs/_build/html/`, treating warnings as errors. Requires activation of the `lr_reduction` conda environment - # this will fail on a warning - @cd docs&& make html SPHINXOPTS="-W --keep-going -n" && echo -e "##########\n DOCS point your browser to file://$$(pwd)/build/html/index.html\n##########" +docs: ## Build the documentation + mkdocs build -test-all: ## run all tests +test-all: ## Run all tests pytest ./test diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 96fe2a3..0000000 --- a/docs/Makefile +++ /dev/null @@ -1,195 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. - -# Options should be aligned with how the documentation is built on RTD -# (except for '--keep-going', failing fast is prefered on local builds) -# We also remove '-E' to reduce the time of rebuilding reference indexes -# on each build. -SPHINXOPTS = -T -j auto -W -SPHINXBUILD = sphinx-build -PAPER = -PROJECT ?= lr_reduction -BUILDDIR = build - -# Do not use local Django settings during the docs build -export DJANGO_SETTINGS_SKIP_LOCAL = True - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -auto: - sphinx-autobuild --port 8888 $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -livehtml: - sphinx-autobuild --port 4444 -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/sdf.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/sdf.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/sdf" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/sdf" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/api/background.md b/docs/api/background.md new file mode 100644 index 0000000..a344220 --- /dev/null +++ b/docs/api/background.md @@ -0,0 +1 @@ +::: lr_reduction.background \ No newline at end of file diff --git a/docs/api/event_reduction.md b/docs/api/event_reduction.md new file mode 100644 index 0000000..ef7f66f --- /dev/null +++ b/docs/api/event_reduction.md @@ -0,0 +1 @@ +::: lr_reduction.event_reduction \ No newline at end of file diff --git a/docs/api/index.md b/docs/api/index.md new file mode 100644 index 0000000..a4895cc --- /dev/null +++ b/docs/api/index.md @@ -0,0 +1,11 @@ +# Overview + +- [lr_reduction.background](./background.md) +- [lr_reduction.event_reduction](./event_reduction.md) +- [lr_reduction.output](./output.md) +- [lr_reduction.peak_finding](./peak_finding.md) +- [lr_reduction.reduction_template_reader](./reduction_template_reader.md) +- [lr_reduction.template](./template.md) +- [lr_reduction.time_resolved](./time_resolved.md) +- [lr_reduction.utils](./utils.md) +- [lr_reduction.workflow](./workflow.md) diff --git a/docs/api/output.md b/docs/api/output.md new file mode 100644 index 0000000..a49611b --- /dev/null +++ b/docs/api/output.md @@ -0,0 +1 @@ +::: lr_reduction.output \ No newline at end of file diff --git a/docs/api/peak_finding.md b/docs/api/peak_finding.md new file mode 100644 index 0000000..1c14c96 --- /dev/null +++ b/docs/api/peak_finding.md @@ -0,0 +1 @@ +::: lr_reduction.peak_finding \ No newline at end of file diff --git a/docs/api/reduction_template_reader.md b/docs/api/reduction_template_reader.md new file mode 100644 index 0000000..22cf862 --- /dev/null +++ b/docs/api/reduction_template_reader.md @@ -0,0 +1 @@ +::: lr_reduction.reduction_template_reader \ No newline at end of file diff --git a/docs/api/template.md b/docs/api/template.md new file mode 100644 index 0000000..9774ae4 --- /dev/null +++ b/docs/api/template.md @@ -0,0 +1 @@ +::: lr_reduction.template \ No newline at end of file diff --git a/docs/api/time_resolved.md b/docs/api/time_resolved.md new file mode 100644 index 0000000..1a1f1bb --- /dev/null +++ b/docs/api/time_resolved.md @@ -0,0 +1 @@ +::: lr_reduction.time_resolved \ No newline at end of file diff --git a/docs/api/utils.md b/docs/api/utils.md new file mode 100644 index 0000000..89529da --- /dev/null +++ b/docs/api/utils.md @@ -0,0 +1 @@ +::: lr_reduction.utils \ No newline at end of file diff --git a/docs/api/workflow.md b/docs/api/workflow.md new file mode 100644 index 0000000..663c631 --- /dev/null +++ b/docs/api/workflow.md @@ -0,0 +1 @@ +::: lr_reduction.workflow \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index f0d238c..0000000 --- a/docs/conf.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Configuration file for the Sphinx documentation builder. -For the full list of built-in configuration values, see the documentation: -https://www.sphinx-doc.org/en/master/usage/configuration.html -""" -import os -import sys -import versioningit - -sys.path.insert(0, os.path.abspath("../reduction")) - -# -- Project information ----------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information - -project = "lr_reduction" -copyright = "2025, ORNL" # noqa A001 -author = "ORNL" -version = versioningit.get_version("../") -# The full version, including alpha/beta/rc tags -release = "source/".join(version.split("source/")[:-1]) - -# -- General configuration --------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration - -extensions = [ - "sphinx.ext.duration", - "sphinx.ext.doctest", - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.viewcode", - "sphinx.ext.coverage", - "sphinx.ext.mathjax", - "sphinx.ext.napoleon", - "myst_parser" -] - -autodoc_mock_imports = [ - "mantid", - "mantid.api", - "mantid.api.Workspace", - "mantid.kernel", - "mantid.utils", - "mantid.utils.logging", - "mantid.simpleapi", - "mantid.geometry", - "mantidqt.widgets", - "mantidqt.widgets.algorithmprogress", - "qtpy", - "qtpy.uic", - "qtpy.QtWidgets", - "mantidqt", - "mantid.plots", - "mantid.plots.plotfunctions", - "mantid.plots.datafunctions", - "mantid.plots.utility", - "numpy,ndarray" -] - -master_doc = "index" - -intersphinx_mapping = { - "python": ("https://docs.python.org/3/", None), - "sphinx": ("https://www.sphinx-doc.org/en/master/", None), -} -intersphinx_disabled_domains = ["std"] - -templates_path = ["_templates"] -exclude_patterns = ["_build"] -pygments_style = "sphinx" - - -# -- Options for HTML output ------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output - -html_theme = "sphinx_rtd_theme" # pylint: disable=C0103 - -html_theme_options = {"style_nav_header_background": "#472375"} - -epub_show_urls = "footnote" # pylint: disable=C0103 diff --git a/docs/source/developer/contributing.rst b/docs/developer/contributing.md similarity index 84% rename from docs/source/developer/contributing.rst rename to docs/developer/contributing.md index 847e838..8bcb5f9 100644 --- a/docs/source/developer/contributing.rst +++ b/docs/developer/contributing.md @@ -1,7 +1,4 @@ -.. contributing - -Guide to Contributing -===================== +# Contributing Guide Contributions to this project are welcome. All contributors agree to the following: @@ -13,7 +10,7 @@ Contributions to this project are welcome. All contributors agree to the followi All contributions must be "signed off" in the commit log and by doing so you agree to the above. -Getting access to the main project ----------------------------------- +## Getting access to the main project + Direct commit access to the project is currently restricted to core developers. All other contributions should be done through pull requests. diff --git a/docs/developer/developer.md b/docs/developer/developer.md new file mode 100644 index 0000000..61e76fb --- /dev/null +++ b/docs/developer/developer.md @@ -0,0 +1,137 @@ +# Developer Documentation + +- [Local Environment](#local-environment) +- [pre-commit Hooks](#pre-commit-hooks) +- [Development procedure](#development-procedure) +- [Updating mantid dependency](#updating-mantid-dependency) +- [Using the Data Repository](#using-the-data-repository) +- [Coverage reports](#coverage-reports) +- [Building the documentation](#building-the-documentation) +- [Creating a stable release](#creating-a-stable-release) + +## Local Environment + +For purposes of development, create conda environment `lr_reduction` with file `environment.yml`, and then +install the package in development mode with `pip`: + +```bash +$ cd /path/to/lr_reduction/ +$ conda create env --solver libmamba --file ./environment.yml +$ conda activate lr_reduction +(lr_reduction)$ pip install -e ./ +``` + +By installing the package in development mode, one doesn't need to re-install package `lr_reduction` in conda +environment `lr_reduction` after every change to the source code. + +## pre-commit Hooks + +Activate the hooks by typing in the terminal: + +```bash +$ cd /path/to/mr_reduction/ +$ conda activate mr_reduction +(mr_reduction)$ pre-commit install +``` + +## Development procedure + +1. A developer is assigned with a task during neutron status meeting and changes the task's status to **In Progress**. +2. The developer creates a branch off _next_ and completes the task in this branch. +3. The developer creates a pull request (PR) off _next_. +4. Any new features or bugfixes must be covered by new and/or refactored automated tests. +5. The developer asks for another developer as a reviewer to review the PR. + A PR can only be approved and merged by the reviewer. +6. The developer changes the task’s status to **Complete** and closes the associated issue. + +## Updating mantid dependency + +The mantid version and the mantid conda channel (`mantid/label/main` or `mantid/label/nightly`) **must** be +synchronized across these files: + +- environment.yml +- conda.recipe/meta.yml +- .github/workflows/package.yml + +## Using the Data Repository + +To run the integration tests in your local environment, it is necessary first to download the data files. +Because of their size, the files are stored in the Git LFS repository +`lr_reduction-data `\_. + +It is necessary to have package `git-lfs` installed in your machine. + +```bash +$ sudo apt install git-lfs +``` + +After this step, initialize or update the data repository: + +```bash +$ cd /path/to/lr_reduction +$ git submodule update --init +``` + +This will either clone `liquidsreflectometer-data` into `/path/to/lr_reduction/tests/liquidsreflectometer-data` or +bring the `liquidsreflectometer-data`'s refspec in sync with the refspec listed within file +`/path/to/liquidsreflectometer/.gitmodules`. + +An intro to Git LFS in the context of the Neutron Data Project is found in the +`Confluence pages `\_ +(login required). + +## Coverage reports + +GitHuh actions create reports for unit and integration tests, then combine into one report and upload it to +`Codecov `\_. + +## Building the documentation + +A repository webhook is setup to automatically trigger the latest documentation build by GitHub actions. +To manually build the documentation: + +```bash +$ conda activate lr_reduction +(lr_reduction)$ make docs +``` + +After this, point your browser to +`file:///path/to/lr_reduction/docs/build/html/index.html` + +## Creating a stable release + +- _patch_ release, it may be allowed to bypass the creation of a candidate release. + Still, we must update branch `qa` first, then create the release tag in branch `main`. + For instance, to create patch version "v2.1.1": + +```bash +VERSION="v2.1.2" + +# update the local repository +git fetch --all --prune +git fetch --prune --prune-tags origin + +# update branch qa from next, possibly bringing work done in qa missing in next +git switch next +git rebase -v origin/next +git merge --no-edit origin/qa # commit message is automatically generated +git push origin next # required to "link" qa to next, for future fast-forward +git switch qa +git rebase -v origin/qa +git merge --ff-only origin/next + +# update branch main from qa +git merge --no-edit origin/main # commit message is automatically generated +git push origin qa # required to "link" main to qa, for future fast-forward +git switch main +git rebase -v origin/main +git merge --ff-only origin/qa +git tag $VERSION +git push origin --tags main +``` + +- _minor_ or _major_ release, we create a stable release _after_ we have created a Candidate release. + For this customary procedure, follow: + - The [Software Maturity Model](https://ornl-neutrons.atlassian.net/wiki/spaces/NDPD/pages/23363585/Software+Maturity+Model) for continous versioning as well as creating release candidates and stable releases. + + - Update the :ref:`Release Notes ` with major fixes, updates and additions since last stable release. diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..e4e4466 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,27 @@ +# Liquids Reflectometer Reduction + +## User Guide + +- [Conda Environments](./user/conda_environments.md) +- [Releases](./releases.md) + + +### Contacting the Team + +The best mechanism for a user to request a change or report a bug is to contact the SANS CIS. +Please email [Mathieu Doucet](mailto:doucetm@ornl.gov) with your request. + +A change needs to be in the form of a: + +- Story for any enhancement request +- Defect for any bug fix request. + +## API + +- [lr_reduction](source/api/lr_reduction.md) + + +## Developer Guide + +- [Contributing Guide](source/developer/contributing.md) +- [Developer Documentation](source/developer/developer.md) diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 2447dcf..0000000 --- a/docs/index.rst +++ /dev/null @@ -1,42 +0,0 @@ -.. :lr_reduction: - -Welcome to lr_reduction (Liquids Reflectometer Reduction) documentation -======================================================================= - -User Guide ----------- - -.. toctree:: - :titlesonly: - - /source/user/conda_environments - /source/releases - -Contacting the Team -+++++++++++++++++++ -The best mechanism for a user to request a change or report a bug is to contact the SANS CIS. -Please email `Mathieu Doucet`_ with your request. - -.. _Mathieu Doucet: doucetm@ornl.gov - -A change needs to be in the form of a: - -- Story for any enhancement request -- Defect for any bug fix request. - -API ---- - -.. toctree:: - :titlesonly: - - /source/api/lr_reduction - -Developer Guide ---------------- - -.. toctree:: - :titlesonly: - - /source/developer/contributing - /source/developer/developer diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 9534b01..0000000 --- a/docs/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=source -set BUILDDIR=build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/docs/source/releases.rst b/docs/releases.md similarity index 80% rename from docs/source/releases.rst rename to docs/releases.md index 47b3161..6f75297 100644 --- a/docs/source/releases.rst +++ b/docs/releases.md @@ -1,11 +1,13 @@ -.. _release_notes: +# Release Notes -Release Notes -============= -Notes for major or minor releases. Notes for patch releases are deferred +Notes for major or minor releases. Notes for patch releases are deferred. + +Release notes are written in reverse chronological order, with the most recent release at the top, +using the following format: + +```markdown +## - ------------------------------ (date of release, format YYYY-MM-DD) **Of interest to the User**: @@ -14,10 +16,10 @@ Notes for major or minor releases. Notes for patch releases are deferred **Of interest to the Developer:** -- PR #40 documentation to create a patch release +- PR #XYZ one-liner description +``` -2.1.0 ------ +## 2.1.0 **Of interest to the User**: @@ -29,6 +31,7 @@ Notes for major or minor releases. Notes for patch releases are deferred **Of interest to the Developer:** +- PR #40 documentation to create a patch release - PR #37 documentation conforming to that of the python project template - PR #36 versioning with versioningit - PR #25 Read in error events when computing correction diff --git a/docs/source/api/lr_reduction.rst b/docs/source/api/lr_reduction.rst deleted file mode 100644 index 213ae65..0000000 --- a/docs/source/api/lr_reduction.rst +++ /dev/null @@ -1,81 +0,0 @@ -.. api_lr_reduction: - -lr_reduction package -==================== - -.. contents:: - :local: - :depth: 1 - -lr_reduction.background ------------------------ - -.. automodule:: lr_reduction.background - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.event_reduction ----------------------------- - -.. automodule:: lr_reduction.event_reduction - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.output -------------------- - -.. automodule:: lr_reduction.output - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.peak_finding -------------------------- - -.. automodule:: lr_reduction.peak_finding - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.reduction_template_reader --------------------------------------- - -.. automodule:: lr_reduction.reduction_template_reader - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.template ---------------------- - -.. automodule:: lr_reduction.template - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.time_resolved --------------------------- - -.. automodule:: lr_reduction.time_resolved - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.utils ------------------- - -.. automodule:: lr_reduction.utils - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.workflow ---------------------- - -.. automodule:: lr_reduction.workflow - :members: - :undoc-members: - :show-inheritance: - diff --git a/docs/source/developer/developer.rst b/docs/source/developer/developer.rst deleted file mode 100644 index 39e026b..0000000 --- a/docs/source/developer/developer.rst +++ /dev/null @@ -1,141 +0,0 @@ -.. _developer_documentation: - -Developer Documentation -======================= - -.. contents:: - :local: - :depth: 1 - -Local Environment ------------------ -For purposes of development, create conda environment `lr_reduction` with file `environment.yml`, and then -install the package in development mode with `pip`: - -.. code-block:: bash - - $> cd /path/to/lr_reduction/ - $> conda create env --solver libmamba --file ./environment.yml - $> conda activate lr_reduction - (lr_reduction)$> pip install -e ./ - -By installing the package in development mode, one doesn't need to re-install package `lr_reduction` in conda -environment `lr_reduction` after every change to the source code. - -pre-commit Hooks ----------------- - -Activate the hooks by typing in the terminal: - -.. code-block:: bash - - $> cd cd /path/to/mr_reduction/ - $> conda activate mr_reduction - (mr_reduction)$> pre-commit install - -Development procedure ---------------------- - -1. A developer is assigned with a task during neutron status meeting and changes the task's status to **In Progress**. -2. The developer creates a branch off *next* and completes the task in this branch. -3. The developer creates a pull request (PR) off *next*. -4. Any new features or bugfixes must be covered by new and/or refactored automated tests. -5. The developer asks for another developer as a reviewer to review the PR. - A PR can only be approved and merged by the reviewer. -6. The developer changes the task’s status to **Complete** and closes the associated issue. - -Updating mantid dependency --------------------------- -The mantid version and the mantid conda channel (`mantid/label/main` or `mantid/label/nightly`) **must** be -synchronized across these files: - -- environment.yml -- conda.recipe/meta.yml -- .github/workflows/package.yml - -Using the Data Repository liquidsreflectometer-data ---------------------------------------------------- -To run the integration tests in your local environment, it is necessary first to download the data files. -Because of their size, the files are stored in the Git LFS repository -`lr_reduction-data `_. - -It is necessary to have package `git-lfs` installed in your machine. - -.. code-block:: bash - - $> sudo apt install git-lfs - -After this step, initialize or update the data repository: - -.. code-block:: bash - - $> cd /path/to/lr_reduction - $> git submodule update --init - -This will either clone `liquidsreflectometer-data` into `/path/to/lr_reduction/tests/liquidsreflectometer-data` or -bring the `liquidsreflectometer-data`'s refspec in sync with the refspec listed within file -`/path/to/liquidsreflectometer/.gitmodules`. - -An intro to Git LFS in the context of the Neutron Data Project is found in the -`Confluence pages `_ -(login required). - - -Coverage reports ----------------- - -GitHuh actions create reports for unit and integration tests, then combine into one report and upload it to -`Codecov `_. - - -Building the documentation --------------------------- -A repository webhook is setup to automatically trigger the latest documentation build by GitHub actions. -To manually build the documentation: - -.. code-block:: bash - - $> conda activate lr_reduction - (lr_reduction)$> make docs - -After this, point your browser to -`file:///path/to/lr_reduction/docs/build/html/index.html` - - -Creating a stable release -------------------------- - -- *patch* release, it may be allowed to bypass the creation of a candidate release. - Still, we must update branch `qa` first, then create the release tag in branch `main`. - For instance, to create patch version "v2.1.1": - -.. code-block:: bash - - VERSION="v2.1.2" - # update the local repository - git fetch --all --prune - git fetch --prune --prune-tags origin - # update branch qa from next, possibly bringing work done in qa missing in next - git switch next - git rebase -v origin/next - git merge --no-edit origin/qa # commit message is automatically generated - git push origin next # required to "link" qa to next, for future fast-forward - git switch qa - git rebase -v origin/qa - git merge --ff-only origin/next - # update branch main from qa - git merge --no-edit origin/main # commit message is automatically generated - git push origin qa # required to "link" main to qa, for future fast-forward - git switch main - git rebase -v origin/main - git merge --ff-only origin/qa - git tag $VERSION - git push origin --tags main - -- *minor* or *major* release, we create a stable release *after* we have created a Candidate release. - For this customary procedure, follow: - - + the `Software Maturity Model `_ for continous versioning as well as creating release candidates and stable releases. - + Update the :ref:`Release Notes ` with major fixes, updates and additions since last stable release. - - diff --git a/docs/source/user/conda_environments.rst b/docs/user/conda_environments.md similarity index 73% rename from docs/source/user/conda_environments.rst rename to docs/user/conda_environments.md index 7f2ec71..ee418c3 100644 --- a/docs/source/user/conda_environments.rst +++ b/docs/user/conda_environments.md @@ -1,29 +1,26 @@ -.. conda_environments - -Conda Environments -================== +# Conda Environments Three conda environments are available in the analysis nodes, beamline machines, as well as the jupyter notebook severs. On a terminal: -.. code-block:: bash - - $> conda activate +```bash +$ conda activate +``` where `` is one of `lr_reduction`, `lr_reduction-qa`, and `lr_reduction-dev` -lr_reduction Environment ------------------------- +## lr_reduction Environment + Activates the latest stable release of `lr_reduction`. Typically users will reduce their data in this environment. -lr_reduction-qa Environment ---------------------------- +## lr_reduction-qa Environment + Activates a release-candidate environment. Instrument scientists and computational instrument scientists will carry out testing on this environment to prevent bugs being introduce in the next stable release. -lr_reduction-dev Environment ----------------------------- +## lr_reduction-dev Environment + Activates the environment corresponding to the latest changes in the source code. Instrument scientists and computational instrument scientists will test the latest changes to `lr_reduction` in this environment. diff --git a/environment.yml b/environment.yml index e3cb32c..05e0de2 100644 --- a/environment.yml +++ b/environment.yml @@ -7,16 +7,20 @@ dependencies: - lmfit - mantid=6.10 - matplotlib -# development dependencies + # development dependencies - boa - codecov - conda-build - conda-verify + - mkdocs + - mkdocstrings + - mkdocstrings-python - pre-commit - pytest - pytest-cov - python-build - - sphinx - - sphinx-rtd-theme + - ruff + # - sphinx + # - sphinx-rtd-theme - versioningit - myst-parser diff --git a/mkdocs.yaml b/mkdocs.yaml new file mode 100644 index 0000000..f87f8d4 --- /dev/null +++ b/mkdocs.yaml @@ -0,0 +1,42 @@ +site_name: "Liquids Reflectometer" +repo_name: "LiquidsReflectometer" +repo_url: "https://github.com/neutrons/LiquidsReflectometer" + +theme: + name: "readthedocs" + docs_dir: docs/ + # logo: "" + +plugins: + - search + - mkdocstrings: + handlers: + python: + paths: [reduction/lr_reduction] + import: + - https://docs.python.org/3/objects.inv + - https://mkdocstrings.github.io/objects.inv + - https://mkdocstrings.github.io/griffe/objects.inv + options: + # See: https://mkdocstrings.github.io/usage/handlers/?h=docstring_style#selection-options + docstring_style: google + docstring_options: + docstring_section_style: table + ignore_init_summary: yes + merge_init_into_class: yes + # separate_signature: yes + show_signature: no + show_source: no + show_root_full_path: no + show_root_toc_entry: False + filters: + - "!^_[^_]" + # - '^_[^_]' + +extra: + social: + - icon: "fontawesome/solid/house" + link: "https://neutrons.ornl.gov" + - icon: "fontawesome/brands/github-alt" + link: "https://github.com/neutrons/LiquidsReflectometer" +# copyright: "Copyright © 2020 - 2023 [Glass](glass-ships.com)" diff --git a/pyproject.toml b/pyproject.toml index c7587ac..15462c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,38 +1,27 @@ [project] name = "lr_reduction" -authors = [{name = "Mat", email = "m2d@ornl.gov"}] +authors = [{ name = "Mat", email = "m2d@ornl.gov" }] description = "Reduction scripts for the Liquids Reflectometer. This includes both automated reduction scripts and useful scripts to reprocess data." -version = "0.0.1" # initial version, dynamically overriden -license = {file = "LICENSE"} +version = "0.0.1" # initial version, dynamically overriden +license = { file = "LICENSE" } readme = "README.md" requires-python = ">=3.8" classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python :: 3", - "Topic :: Scientific/Engineering :: Neutron reflectometry" -] -dependencies = [ - "lmfit", - "numpy", - "matplotlib", - "mantid" + "Topic :: Scientific/Engineering :: Neutron reflectometry", ] +dependencies = ["lmfit", "mantid", "matplotlib", "numpy"] + +[project.optional-dependencies] +dev = ["mkdocs", "mkdocstrings[python]", "ruff"] +tests = ["pytest"] # versioningit will generate file _version.py when installing this package, even if installing from sources [build-system] -requires = [ - "setuptools >= 40.6.0", - "wheel", - "toml", - "versioningit" -] +requires = ["setuptools >= 40.6.0", "wheel", "toml", "versioningit"] build-backend = "setuptools.build_meta" -[project.optional-dependencies] -tests = [ - "pytest" -] - [project.urls] Homepage = "https://github.com/neutrons/LiquidsReflectometer" @@ -47,11 +36,13 @@ exclude = ["tests*", "scripts*", "docs*", "notebooks*"] line-length = 140 select = ["A", "ARG", "E", "W", "F", "I", "PT", "RUF100"] ignore = [ - "E402", # module level import not at top of file - "E722", # bare except - "E741", "E743", # ambiguous function name, generally 'l' - "F403", # wild imports - "F405", "F821" # unknown names from probably from wild imports + "E402", # module level import not at top of file + "E722", # bare except + "E741", + "E743", # ambiguous function name, generally 'l' + "F403", # wild imports + "F405", + "F821", # unknown names from probably from wild imports ] [tool.versioningit.vcs] @@ -77,10 +68,20 @@ skip_empty = true [tool.pytest.ini_options] pythonpath = [ - ".", "reduction", "scripts", "tests/data/liquidsreflectometer-data/nexus/" + ".", + "reduction", + "scripts", + "tests/data/liquidsreflectometer-data/nexus/", ] testpaths = ["tests"] -norecursedirs = [".git", "tmp*", "_tmp*", "__pycache__", "*dataset*", "*data_set*"] +norecursedirs = [ + ".git", + "tmp*", + "_tmp*", + "__pycache__", + "*dataset*", + "*data_set*", +] markers = [ "datarepo: mark a test as using LiquidsReflectometer-data repository", "scripts: mark a test as a script that should be run manually", @@ -88,8 +89,22 @@ markers = [ [tool.flake8] ignore = [ - "E114", "E115", "E116", "E121", "E123", "E126", "E133", - "E2", "E704", "E722", "E741", "E743", "W503", "F403", "F405", "F999" + "E114", + "E115", + "E116", + "E121", + "E123", + "E126", + "E133", + "E2", + "E704", + "E722", + "E741", + "E743", + "W503", + "F403", + "F405", + "F999", ] doctests = false max-line-length = 130 From c662d601465ed3888b6b5027b2a4f21bf274d22d Mon Sep 17 00:00:00 2001 From: glass-ships Date: Mon, 6 Jan 2025 13:44:25 -0500 Subject: [PATCH 10/16] ruff non source code files --- .github/dependabot.yml | 2 +- .github/pull_request_template.md | 1 - docs-sphinx/Makefile | 195 ++++++++++++++++++ docs-sphinx/conf.py | 74 +++++++ docs-sphinx/index.rst | 42 ++++ docs-sphinx/make.bat | 35 ++++ docs-sphinx/source/api/lr_reduction.rst | 81 ++++++++ docs-sphinx/source/developer/contributing.rst | 19 ++ docs-sphinx/source/developer/developer.rst | 141 +++++++++++++ docs-sphinx/source/releases.rst | 38 ++++ .../source/user/conda_environments.rst | 29 +++ docs/api/background.md | 2 +- docs/api/event_reduction.md | 2 +- docs/api/output.md | 2 +- docs/api/peak_finding.md | 2 +- docs/api/reduction_template_reader.md | 2 +- docs/api/template.md | 2 +- docs/api/time_resolved.md | 2 +- docs/api/utils.md | 2 +- docs/api/workflow.md | 2 +- docs/developer/developer.md | 3 +- docs/index.md | 5 +- docs/releases.md | 2 +- reduction/lr_reduction/DeadTimeCorrection.py | 8 +- reduction/lr_reduction/__init__.py | 2 +- .../scaling_factors/LRDirectBeamSort.py | 13 +- .../scaling_factors/LRScalingFactors.py | 3 +- reduction/test/test_dead_time.py | 41 ++-- reduction/test/test_scaling_factors.py | 11 +- 29 files changed, 706 insertions(+), 57 deletions(-) create mode 100644 docs-sphinx/Makefile create mode 100644 docs-sphinx/conf.py create mode 100644 docs-sphinx/index.rst create mode 100644 docs-sphinx/make.bat create mode 100644 docs-sphinx/source/api/lr_reduction.rst create mode 100644 docs-sphinx/source/developer/contributing.rst create mode 100644 docs-sphinx/source/developer/developer.rst create mode 100644 docs-sphinx/source/releases.rst create mode 100644 docs-sphinx/source/user/conda_environments.rst diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f9c645b..674f488 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -7,4 +7,4 @@ updates: directory: "/" schedule: # Check for updates to GitHub Actions every week - interval: "monthly" \ No newline at end of file + interval: "monthly" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 526a52a..622e81d 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -23,4 +23,3 @@ Check all that apply: - [ ] All the tests are passing - [ ] The documentation is up to date - [ ] code comments added when explaining intent - diff --git a/docs-sphinx/Makefile b/docs-sphinx/Makefile new file mode 100644 index 0000000..96fe2a3 --- /dev/null +++ b/docs-sphinx/Makefile @@ -0,0 +1,195 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. + +# Options should be aligned with how the documentation is built on RTD +# (except for '--keep-going', failing fast is prefered on local builds) +# We also remove '-E' to reduce the time of rebuilding reference indexes +# on each build. +SPHINXOPTS = -T -j auto -W +SPHINXBUILD = sphinx-build +PAPER = +PROJECT ?= lr_reduction +BUILDDIR = build + +# Do not use local Django settings during the docs build +export DJANGO_SETTINGS_SKIP_LOCAL = True + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +auto: + sphinx-autobuild --port 8888 $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +livehtml: + sphinx-autobuild --port 4444 -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/sdf.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/sdf.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/sdf" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/sdf" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs-sphinx/conf.py b/docs-sphinx/conf.py new file mode 100644 index 0000000..6dc63c6 --- /dev/null +++ b/docs-sphinx/conf.py @@ -0,0 +1,74 @@ +"""Configuration file for the Sphinx documentation builder. +For the full list of built-in configuration values, see the documentation: +https://www.sphinx-doc.org/en/master/usage/configuration.html +""" +import os +import sys +import versioningit + +sys.path.insert(0, os.path.abspath("../reduction")) + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "lr_reduction" +copyright = "2024, ORNL" # noqa A001 +author = "ORNL" +version = versioningit.get_version("../") +# The full version, including alpha/beta/rc tags +release = "source/".join(version.split("source/")[:-1]) + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "sphinx.ext.duration", + "sphinx.ext.doctest", + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.viewcode", + "sphinx.ext.coverage", +] + +autodoc_mock_imports = [ + "mantid", + "mantid.api", + "mantid.kernel", + "mantid.utils", + "mantid.utils.logging", + "mantid.simpleapi", + "mantid.geometry", + "mantidqt.widgets", + "mantidqt.widgets.algorithmprogress", + "qtpy", + "qtpy.uic", + "qtpy.QtWidgets", + "mantidqt", + "mantid.plots", + "mantid.plots.plotfunctions", + "mantid.plots.datafunctions", + "mantid.plots.utility", +] + +master_doc = "index" + +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), + "sphinx": ("https://www.sphinx-doc.org/en/master/", None), +} +intersphinx_disabled_domains = ["std"] + +templates_path = ["_templates"] +exclude_patterns = ["_build"] +pygments_style = "sphinx" + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "sphinx_rtd_theme" # pylint: disable=C0103 + +html_theme_options = {"style_nav_header_background": "#472375"} + +epub_show_urls = "footnote" # pylint: disable=C0103 diff --git a/docs-sphinx/index.rst b/docs-sphinx/index.rst new file mode 100644 index 0000000..2447dcf --- /dev/null +++ b/docs-sphinx/index.rst @@ -0,0 +1,42 @@ +.. :lr_reduction: + +Welcome to lr_reduction (Liquids Reflectometer Reduction) documentation +======================================================================= + +User Guide +---------- + +.. toctree:: + :titlesonly: + + /source/user/conda_environments + /source/releases + +Contacting the Team ++++++++++++++++++++ +The best mechanism for a user to request a change or report a bug is to contact the SANS CIS. +Please email `Mathieu Doucet`_ with your request. + +.. _Mathieu Doucet: doucetm@ornl.gov + +A change needs to be in the form of a: + +- Story for any enhancement request +- Defect for any bug fix request. + +API +--- + +.. toctree:: + :titlesonly: + + /source/api/lr_reduction + +Developer Guide +--------------- + +.. toctree:: + :titlesonly: + + /source/developer/contributing + /source/developer/developer diff --git a/docs-sphinx/make.bat b/docs-sphinx/make.bat new file mode 100644 index 0000000..9534b01 --- /dev/null +++ b/docs-sphinx/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs-sphinx/source/api/lr_reduction.rst b/docs-sphinx/source/api/lr_reduction.rst new file mode 100644 index 0000000..213ae65 --- /dev/null +++ b/docs-sphinx/source/api/lr_reduction.rst @@ -0,0 +1,81 @@ +.. api_lr_reduction: + +lr_reduction package +==================== + +.. contents:: + :local: + :depth: 1 + +lr_reduction.background +----------------------- + +.. automodule:: lr_reduction.background + :members: + :undoc-members: + :show-inheritance: + +lr_reduction.event_reduction +---------------------------- + +.. automodule:: lr_reduction.event_reduction + :members: + :undoc-members: + :show-inheritance: + +lr_reduction.output +------------------- + +.. automodule:: lr_reduction.output + :members: + :undoc-members: + :show-inheritance: + +lr_reduction.peak_finding +------------------------- + +.. automodule:: lr_reduction.peak_finding + :members: + :undoc-members: + :show-inheritance: + +lr_reduction.reduction_template_reader +-------------------------------------- + +.. automodule:: lr_reduction.reduction_template_reader + :members: + :undoc-members: + :show-inheritance: + +lr_reduction.template +--------------------- + +.. automodule:: lr_reduction.template + :members: + :undoc-members: + :show-inheritance: + +lr_reduction.time_resolved +-------------------------- + +.. automodule:: lr_reduction.time_resolved + :members: + :undoc-members: + :show-inheritance: + +lr_reduction.utils +------------------ + +.. automodule:: lr_reduction.utils + :members: + :undoc-members: + :show-inheritance: + +lr_reduction.workflow +--------------------- + +.. automodule:: lr_reduction.workflow + :members: + :undoc-members: + :show-inheritance: + diff --git a/docs-sphinx/source/developer/contributing.rst b/docs-sphinx/source/developer/contributing.rst new file mode 100644 index 0000000..847e838 --- /dev/null +++ b/docs-sphinx/source/developer/contributing.rst @@ -0,0 +1,19 @@ +.. contributing + +Guide to Contributing +===================== + +Contributions to this project are welcome. All contributors agree to the following: + +- It is assumed that the contributor is an ORNL employee and belongs to the development team. + Thus the following instructions are specific to ORNL development team's process. +- You have permission and any required rights to submit your contribution. +- Your contribution is provided under the license of this project and may be redistributed as such. +- All contributions to this project are public. + +All contributions must be "signed off" in the commit log and by doing so you agree to the above. + +Getting access to the main project +---------------------------------- +Direct commit access to the project is currently restricted to core developers. +All other contributions should be done through pull requests. diff --git a/docs-sphinx/source/developer/developer.rst b/docs-sphinx/source/developer/developer.rst new file mode 100644 index 0000000..39e026b --- /dev/null +++ b/docs-sphinx/source/developer/developer.rst @@ -0,0 +1,141 @@ +.. _developer_documentation: + +Developer Documentation +======================= + +.. contents:: + :local: + :depth: 1 + +Local Environment +----------------- +For purposes of development, create conda environment `lr_reduction` with file `environment.yml`, and then +install the package in development mode with `pip`: + +.. code-block:: bash + + $> cd /path/to/lr_reduction/ + $> conda create env --solver libmamba --file ./environment.yml + $> conda activate lr_reduction + (lr_reduction)$> pip install -e ./ + +By installing the package in development mode, one doesn't need to re-install package `lr_reduction` in conda +environment `lr_reduction` after every change to the source code. + +pre-commit Hooks +---------------- + +Activate the hooks by typing in the terminal: + +.. code-block:: bash + + $> cd cd /path/to/mr_reduction/ + $> conda activate mr_reduction + (mr_reduction)$> pre-commit install + +Development procedure +--------------------- + +1. A developer is assigned with a task during neutron status meeting and changes the task's status to **In Progress**. +2. The developer creates a branch off *next* and completes the task in this branch. +3. The developer creates a pull request (PR) off *next*. +4. Any new features or bugfixes must be covered by new and/or refactored automated tests. +5. The developer asks for another developer as a reviewer to review the PR. + A PR can only be approved and merged by the reviewer. +6. The developer changes the task’s status to **Complete** and closes the associated issue. + +Updating mantid dependency +-------------------------- +The mantid version and the mantid conda channel (`mantid/label/main` or `mantid/label/nightly`) **must** be +synchronized across these files: + +- environment.yml +- conda.recipe/meta.yml +- .github/workflows/package.yml + +Using the Data Repository liquidsreflectometer-data +--------------------------------------------------- +To run the integration tests in your local environment, it is necessary first to download the data files. +Because of their size, the files are stored in the Git LFS repository +`lr_reduction-data `_. + +It is necessary to have package `git-lfs` installed in your machine. + +.. code-block:: bash + + $> sudo apt install git-lfs + +After this step, initialize or update the data repository: + +.. code-block:: bash + + $> cd /path/to/lr_reduction + $> git submodule update --init + +This will either clone `liquidsreflectometer-data` into `/path/to/lr_reduction/tests/liquidsreflectometer-data` or +bring the `liquidsreflectometer-data`'s refspec in sync with the refspec listed within file +`/path/to/liquidsreflectometer/.gitmodules`. + +An intro to Git LFS in the context of the Neutron Data Project is found in the +`Confluence pages `_ +(login required). + + +Coverage reports +---------------- + +GitHuh actions create reports for unit and integration tests, then combine into one report and upload it to +`Codecov `_. + + +Building the documentation +-------------------------- +A repository webhook is setup to automatically trigger the latest documentation build by GitHub actions. +To manually build the documentation: + +.. code-block:: bash + + $> conda activate lr_reduction + (lr_reduction)$> make docs + +After this, point your browser to +`file:///path/to/lr_reduction/docs/build/html/index.html` + + +Creating a stable release +------------------------- + +- *patch* release, it may be allowed to bypass the creation of a candidate release. + Still, we must update branch `qa` first, then create the release tag in branch `main`. + For instance, to create patch version "v2.1.1": + +.. code-block:: bash + + VERSION="v2.1.2" + # update the local repository + git fetch --all --prune + git fetch --prune --prune-tags origin + # update branch qa from next, possibly bringing work done in qa missing in next + git switch next + git rebase -v origin/next + git merge --no-edit origin/qa # commit message is automatically generated + git push origin next # required to "link" qa to next, for future fast-forward + git switch qa + git rebase -v origin/qa + git merge --ff-only origin/next + # update branch main from qa + git merge --no-edit origin/main # commit message is automatically generated + git push origin qa # required to "link" main to qa, for future fast-forward + git switch main + git rebase -v origin/main + git merge --ff-only origin/qa + git tag $VERSION + git push origin --tags main + +- *minor* or *major* release, we create a stable release *after* we have created a Candidate release. + For this customary procedure, follow: + + + the `Software Maturity Model `_ for continous versioning as well as creating release candidates and stable releases. + + Update the :ref:`Release Notes ` with major fixes, updates and additions since last stable release. + + diff --git a/docs-sphinx/source/releases.rst b/docs-sphinx/source/releases.rst new file mode 100644 index 0000000..47b3161 --- /dev/null +++ b/docs-sphinx/source/releases.rst @@ -0,0 +1,38 @@ +.. _release_notes: + +Release Notes +============= +Notes for major or minor releases. Notes for patch releases are deferred + + +----------------------------- +(date of release, format YYYY-MM-DD) + +**Of interest to the User**: + +- PR #XYZ one-liner description + +**Of interest to the Developer:** + +- PR #40 documentation to create a patch release + +2.1.0 +----- + +**Of interest to the User**: + +- PR #33 enable dead time correction for runs with skipped pulses +- PR #26 add dead time correction to the computation of scaling factors +- PR #23 add dead time correction +- PR #19 Functionality to use two backgrounds +- PR #15 Ability to fit a background with a polynomial function + +**Of interest to the Developer:** + +- PR #37 documentation conforming to that of the python project template +- PR #36 versioning with versioningit +- PR #25 Read in error events when computing correction +- PR #21 switch dependency from mantidworkbench to mantid +- PR #20 allow runtime initialization of new attributes for ReductionParameters +- PR #14 add first GitHub actions +- PR #12 switch from mantid to mantidworkbench conda package diff --git a/docs-sphinx/source/user/conda_environments.rst b/docs-sphinx/source/user/conda_environments.rst new file mode 100644 index 0000000..7f2ec71 --- /dev/null +++ b/docs-sphinx/source/user/conda_environments.rst @@ -0,0 +1,29 @@ +.. conda_environments + +Conda Environments +================== + +Three conda environments are available in the analysis nodes, beamline machines, as well as the +jupyter notebook severs. On a terminal: + +.. code-block:: bash + + $> conda activate + +where `` is one of `lr_reduction`, `lr_reduction-qa`, and `lr_reduction-dev` + +lr_reduction Environment +------------------------ +Activates the latest stable release of `lr_reduction`. Typically users will reduce their data in this environment. + +lr_reduction-qa Environment +--------------------------- +Activates a release-candidate environment. +Instrument scientists and computational instrument scientists will carry out testing on this environment +to prevent bugs being introduce in the next stable release. + +lr_reduction-dev Environment +---------------------------- +Activates the environment corresponding to the latest changes in the source code. +Instrument scientists and computational instrument scientists will test the latest changes to `lr_reduction` in this +environment. diff --git a/docs/api/background.md b/docs/api/background.md index a344220..5e54a78 100644 --- a/docs/api/background.md +++ b/docs/api/background.md @@ -1 +1 @@ -::: lr_reduction.background \ No newline at end of file +::: lr_reduction.background diff --git a/docs/api/event_reduction.md b/docs/api/event_reduction.md index ef7f66f..9cdbec8 100644 --- a/docs/api/event_reduction.md +++ b/docs/api/event_reduction.md @@ -1 +1 @@ -::: lr_reduction.event_reduction \ No newline at end of file +::: lr_reduction.event_reduction diff --git a/docs/api/output.md b/docs/api/output.md index a49611b..732dc54 100644 --- a/docs/api/output.md +++ b/docs/api/output.md @@ -1 +1 @@ -::: lr_reduction.output \ No newline at end of file +::: lr_reduction.output diff --git a/docs/api/peak_finding.md b/docs/api/peak_finding.md index 1c14c96..01b440f 100644 --- a/docs/api/peak_finding.md +++ b/docs/api/peak_finding.md @@ -1 +1 @@ -::: lr_reduction.peak_finding \ No newline at end of file +::: lr_reduction.peak_finding diff --git a/docs/api/reduction_template_reader.md b/docs/api/reduction_template_reader.md index 22cf862..607b495 100644 --- a/docs/api/reduction_template_reader.md +++ b/docs/api/reduction_template_reader.md @@ -1 +1 @@ -::: lr_reduction.reduction_template_reader \ No newline at end of file +::: lr_reduction.reduction_template_reader diff --git a/docs/api/template.md b/docs/api/template.md index 9774ae4..46c5766 100644 --- a/docs/api/template.md +++ b/docs/api/template.md @@ -1 +1 @@ -::: lr_reduction.template \ No newline at end of file +::: lr_reduction.template diff --git a/docs/api/time_resolved.md b/docs/api/time_resolved.md index 1a1f1bb..1e82b73 100644 --- a/docs/api/time_resolved.md +++ b/docs/api/time_resolved.md @@ -1 +1 @@ -::: lr_reduction.time_resolved \ No newline at end of file +::: lr_reduction.time_resolved diff --git a/docs/api/utils.md b/docs/api/utils.md index 89529da..972f7b9 100644 --- a/docs/api/utils.md +++ b/docs/api/utils.md @@ -1 +1 @@ -::: lr_reduction.utils \ No newline at end of file +::: lr_reduction.utils diff --git a/docs/api/workflow.md b/docs/api/workflow.md index 663c631..8269495 100644 --- a/docs/api/workflow.md +++ b/docs/api/workflow.md @@ -1 +1 @@ -::: lr_reduction.workflow \ No newline at end of file +::: lr_reduction.workflow diff --git a/docs/developer/developer.md b/docs/developer/developer.md index 61e76fb..ac96d83 100644 --- a/docs/developer/developer.md +++ b/docs/developer/developer.md @@ -131,7 +131,8 @@ git push origin --tags main ``` - _minor_ or _major_ release, we create a stable release _after_ we have created a Candidate release. - For this customary procedure, follow: + For this customary procedure, follow: + - The [Software Maturity Model](https://ornl-neutrons.atlassian.net/wiki/spaces/NDPD/pages/23363585/Software+Maturity+Model) for continous versioning as well as creating release candidates and stable releases. - Update the :ref:`Release Notes ` with major fixes, updates and additions since last stable release. diff --git a/docs/index.md b/docs/index.md index e4e4466..d0f98aa 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,10 +5,10 @@ - [Conda Environments](./user/conda_environments.md) - [Releases](./releases.md) - ### Contacting the Team -The best mechanism for a user to request a change or report a bug is to contact the SANS CIS. +The best mechanism for a user to request a change or report a bug is to contact the SANS CIS. + Please email [Mathieu Doucet](mailto:doucetm@ornl.gov) with your request. A change needs to be in the form of a: @@ -20,7 +20,6 @@ A change needs to be in the form of a: - [lr_reduction](source/api/lr_reduction.md) - ## Developer Guide - [Contributing Guide](source/developer/contributing.md) diff --git a/docs/releases.md b/docs/releases.md index 6f75297..34873a3 100644 --- a/docs/releases.md +++ b/docs/releases.md @@ -2,7 +2,7 @@ Notes for major or minor releases. Notes for patch releases are deferred. -Release notes are written in reverse chronological order, with the most recent release at the top, +Release notes are written in reverse chronological order, with the most recent release at the top, using the following format: ```markdown diff --git a/reduction/lr_reduction/DeadTimeCorrection.py b/reduction/lr_reduction/DeadTimeCorrection.py index cdc19f2..f36ccab 100644 --- a/reduction/lr_reduction/DeadTimeCorrection.py +++ b/reduction/lr_reduction/DeadTimeCorrection.py @@ -2,12 +2,12 @@ Dead time correction algorithm for single-readout detectors. """ +import numpy as np +import scipy from mantid.api import * from mantid.api import AlgorithmFactory, PythonAlgorithm -from mantid.simpleapi import * from mantid.kernel import * -import numpy as np -import scipy +from mantid.simpleapi import * class SingleReadoutDeadTimeCorrection(PythonAlgorithm): @@ -94,4 +94,4 @@ def PyExec(self): self.setProperty('OutputWorkspace', counts_ws) -AlgorithmFactory.subscribe(SingleReadoutDeadTimeCorrection) \ No newline at end of file +AlgorithmFactory.subscribe(SingleReadoutDeadTimeCorrection) diff --git a/reduction/lr_reduction/__init__.py b/reduction/lr_reduction/__init__.py index f3716b6..cc5c453 100644 --- a/reduction/lr_reduction/__init__.py +++ b/reduction/lr_reduction/__init__.py @@ -1,4 +1,4 @@ try: - from ._version import __version__ # noqa: F401 + from ._version import __version__ except ImportError: __version__ = "unknown" diff --git a/reduction/lr_reduction/scaling_factors/LRDirectBeamSort.py b/reduction/lr_reduction/scaling_factors/LRDirectBeamSort.py index 80fd7be..e5742a5 100644 --- a/reduction/lr_reduction/scaling_factors/LRDirectBeamSort.py +++ b/reduction/lr_reduction/scaling_factors/LRDirectBeamSort.py @@ -5,14 +5,15 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * -import functools -import numpy as np -from typing import List, Tuple import datetime +import functools from math import ceil +from typing import List, Tuple + +import numpy as np +from mantid.api import * +from mantid.kernel import * +from mantid.simpleapi import * THI_TOLERANCE = 0.002 diff --git a/reduction/lr_reduction/scaling_factors/LRScalingFactors.py b/reduction/lr_reduction/scaling_factors/LRScalingFactors.py index 1a0d58d..2446a7c 100644 --- a/reduction/lr_reduction/scaling_factors/LRScalingFactors.py +++ b/reduction/lr_reduction/scaling_factors/LRScalingFactors.py @@ -8,9 +8,10 @@ import os import re import time + from mantid.api import * -from mantid.simpleapi import * from mantid.kernel import * +from mantid.simpleapi import * import lr_reduction from lr_reduction import DeadTimeCorrection diff --git a/reduction/test/test_dead_time.py b/reduction/test/test_dead_time.py index 60ca998..f44862a 100644 --- a/reduction/test/test_dead_time.py +++ b/reduction/test/test_dead_time.py @@ -1,18 +1,16 @@ +import mantid.simpleapi as mtd_api +from lr_reduction import template from lr_reduction.DeadTimeCorrection import SingleReadoutDeadTimeCorrection +from lr_reduction.utils import amend_config -import mantid -import mantid.simpleapi as mtd_api mtd_api.config["default.facility"] = "SNS" mtd_api.config["default.instrument"] = "REF_L" -from lr_reduction import event_reduction, template, workflow -from lr_reduction.utils import amend_config - def test_deadtime(nexus_dir): """ - Test the time-resolved reduction that uses a measured reference. - It is generally used at 30 Hz but it also works at 60 Hz. + Test the time-resolved reduction that uses a measured reference. + It is generally used at 30 Hz but it also works at 60 Hz. """ with amend_config(data_dir=nexus_dir): ws = mtd_api.Load("REF_L_198409") @@ -22,16 +20,17 @@ def test_deadtime(nexus_dir): algo.setProperty("InputWorkspace", ws) algo.setProperty("OutputWorkspace", "dead_time_corr") algo.PyExec() - corr_ws = algo.getProperty('OutputWorkspace').value + corr_ws = algo.getProperty("OutputWorkspace").value corr = corr_ws.readY(0) for c in corr: - assert(c>0) - assert(c<1.001) + assert c > 0 + assert c < 1.001 + def test_deadtime_paralyzable(nexus_dir): """ - Test the time-resolved reduction that uses a measured reference. - It is generally used at 30 Hz but it also works at 60 Hz. + Test the time-resolved reduction that uses a measured reference. + It is generally used at 30 Hz but it also works at 60 Hz. """ with amend_config(data_dir=nexus_dir): ws = mtd_api.Load("REF_L_198409") @@ -42,17 +41,18 @@ def test_deadtime_paralyzable(nexus_dir): algo.setProperty("Paralyzable", True) algo.setProperty("OutputWorkspace", "dead_time_corr") algo.PyExec() - corr_ws = algo.getProperty('OutputWorkspace').value + corr_ws = algo.getProperty("OutputWorkspace").value corr = corr_ws.readY(0) for c in corr: - assert(c>0) - assert(c<1.001) + assert c > 0 + assert c < 1.001 + def test_full_reduction(nexus_dir): """ - Test dead time from the reduction workflow + Test dead time from the reduction workflow """ - template_path = 'data/template.xml' + template_path = "data/template.xml" with amend_config(data_dir=nexus_dir): ws = mtd_api.Load("REF_L_198409") @@ -65,8 +65,7 @@ def test_full_reduction(nexus_dir): template_data.dead_time = False _, r2, _ = template.process_from_template_ws(ws, template_data) - corr = r1/r2 + corr = r1 / r2 for c in corr: - assert(c>0) - assert(c<1.001) - + assert c > 0 + assert c < 1.001 diff --git a/reduction/test/test_scaling_factors.py b/reduction/test/test_scaling_factors.py index a0992d3..fb1ba65 100644 --- a/reduction/test/test_scaling_factors.py +++ b/reduction/test/test_scaling_factors.py @@ -1,17 +1,13 @@ import os -import numpy as np - -from lr_reduction.DeadTimeCorrection import SingleReadoutDeadTimeCorrection -import mantid import mantid.simpleapi as mtd_api +import numpy as np + mtd_api.config["default.facility"] = "SNS" mtd_api.config["default.instrument"] = "REF_L" -from lr_reduction import event_reduction, template, workflow -from lr_reduction.utils import amend_config - from lr_reduction.scaling_factors import workflow as sf_workflow +from lr_reduction.utils import amend_config def check_results(data_file, reference): @@ -172,4 +168,3 @@ def test_compute_sf_with_deadtime_tof_200_sort(nexus_dir): assert output is True check_results(output_cfg, 'data/sf_197912_Si_dt_par_46_200.cfg') - From 3a8794cd3cec782fbe914e9273bf9db89d76345e Mon Sep 17 00:00:00 2001 From: glass-ships Date: Mon, 6 Jan 2025 13:47:46 -0500 Subject: [PATCH 11/16] remove docs-sphinx dir --- docs-sphinx/Makefile | 195 ------------------ docs-sphinx/conf.py | 74 ------- docs-sphinx/index.rst | 42 ---- docs-sphinx/make.bat | 35 ---- docs-sphinx/source/api/lr_reduction.rst | 81 -------- docs-sphinx/source/developer/contributing.rst | 19 -- docs-sphinx/source/developer/developer.rst | 141 ------------- docs-sphinx/source/releases.rst | 38 ---- .../source/user/conda_environments.rst | 29 --- 9 files changed, 654 deletions(-) delete mode 100644 docs-sphinx/Makefile delete mode 100644 docs-sphinx/conf.py delete mode 100644 docs-sphinx/index.rst delete mode 100644 docs-sphinx/make.bat delete mode 100644 docs-sphinx/source/api/lr_reduction.rst delete mode 100644 docs-sphinx/source/developer/contributing.rst delete mode 100644 docs-sphinx/source/developer/developer.rst delete mode 100644 docs-sphinx/source/releases.rst delete mode 100644 docs-sphinx/source/user/conda_environments.rst diff --git a/docs-sphinx/Makefile b/docs-sphinx/Makefile deleted file mode 100644 index 96fe2a3..0000000 --- a/docs-sphinx/Makefile +++ /dev/null @@ -1,195 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. - -# Options should be aligned with how the documentation is built on RTD -# (except for '--keep-going', failing fast is prefered on local builds) -# We also remove '-E' to reduce the time of rebuilding reference indexes -# on each build. -SPHINXOPTS = -T -j auto -W -SPHINXBUILD = sphinx-build -PAPER = -PROJECT ?= lr_reduction -BUILDDIR = build - -# Do not use local Django settings during the docs build -export DJANGO_SETTINGS_SKIP_LOCAL = True - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -auto: - sphinx-autobuild --port 8888 $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -livehtml: - sphinx-autobuild --port 4444 -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/sdf.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/sdf.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/sdf" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/sdf" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs-sphinx/conf.py b/docs-sphinx/conf.py deleted file mode 100644 index 6dc63c6..0000000 --- a/docs-sphinx/conf.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Configuration file for the Sphinx documentation builder. -For the full list of built-in configuration values, see the documentation: -https://www.sphinx-doc.org/en/master/usage/configuration.html -""" -import os -import sys -import versioningit - -sys.path.insert(0, os.path.abspath("../reduction")) - -# -- Project information ----------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information - -project = "lr_reduction" -copyright = "2024, ORNL" # noqa A001 -author = "ORNL" -version = versioningit.get_version("../") -# The full version, including alpha/beta/rc tags -release = "source/".join(version.split("source/")[:-1]) - -# -- General configuration --------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration - -extensions = [ - "sphinx.ext.duration", - "sphinx.ext.doctest", - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.viewcode", - "sphinx.ext.coverage", -] - -autodoc_mock_imports = [ - "mantid", - "mantid.api", - "mantid.kernel", - "mantid.utils", - "mantid.utils.logging", - "mantid.simpleapi", - "mantid.geometry", - "mantidqt.widgets", - "mantidqt.widgets.algorithmprogress", - "qtpy", - "qtpy.uic", - "qtpy.QtWidgets", - "mantidqt", - "mantid.plots", - "mantid.plots.plotfunctions", - "mantid.plots.datafunctions", - "mantid.plots.utility", -] - -master_doc = "index" - -intersphinx_mapping = { - "python": ("https://docs.python.org/3/", None), - "sphinx": ("https://www.sphinx-doc.org/en/master/", None), -} -intersphinx_disabled_domains = ["std"] - -templates_path = ["_templates"] -exclude_patterns = ["_build"] -pygments_style = "sphinx" - - -# -- Options for HTML output ------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output - -html_theme = "sphinx_rtd_theme" # pylint: disable=C0103 - -html_theme_options = {"style_nav_header_background": "#472375"} - -epub_show_urls = "footnote" # pylint: disable=C0103 diff --git a/docs-sphinx/index.rst b/docs-sphinx/index.rst deleted file mode 100644 index 2447dcf..0000000 --- a/docs-sphinx/index.rst +++ /dev/null @@ -1,42 +0,0 @@ -.. :lr_reduction: - -Welcome to lr_reduction (Liquids Reflectometer Reduction) documentation -======================================================================= - -User Guide ----------- - -.. toctree:: - :titlesonly: - - /source/user/conda_environments - /source/releases - -Contacting the Team -+++++++++++++++++++ -The best mechanism for a user to request a change or report a bug is to contact the SANS CIS. -Please email `Mathieu Doucet`_ with your request. - -.. _Mathieu Doucet: doucetm@ornl.gov - -A change needs to be in the form of a: - -- Story for any enhancement request -- Defect for any bug fix request. - -API ---- - -.. toctree:: - :titlesonly: - - /source/api/lr_reduction - -Developer Guide ---------------- - -.. toctree:: - :titlesonly: - - /source/developer/contributing - /source/developer/developer diff --git a/docs-sphinx/make.bat b/docs-sphinx/make.bat deleted file mode 100644 index 9534b01..0000000 --- a/docs-sphinx/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=source -set BUILDDIR=build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/docs-sphinx/source/api/lr_reduction.rst b/docs-sphinx/source/api/lr_reduction.rst deleted file mode 100644 index 213ae65..0000000 --- a/docs-sphinx/source/api/lr_reduction.rst +++ /dev/null @@ -1,81 +0,0 @@ -.. api_lr_reduction: - -lr_reduction package -==================== - -.. contents:: - :local: - :depth: 1 - -lr_reduction.background ------------------------ - -.. automodule:: lr_reduction.background - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.event_reduction ----------------------------- - -.. automodule:: lr_reduction.event_reduction - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.output -------------------- - -.. automodule:: lr_reduction.output - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.peak_finding -------------------------- - -.. automodule:: lr_reduction.peak_finding - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.reduction_template_reader --------------------------------------- - -.. automodule:: lr_reduction.reduction_template_reader - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.template ---------------------- - -.. automodule:: lr_reduction.template - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.time_resolved --------------------------- - -.. automodule:: lr_reduction.time_resolved - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.utils ------------------- - -.. automodule:: lr_reduction.utils - :members: - :undoc-members: - :show-inheritance: - -lr_reduction.workflow ---------------------- - -.. automodule:: lr_reduction.workflow - :members: - :undoc-members: - :show-inheritance: - diff --git a/docs-sphinx/source/developer/contributing.rst b/docs-sphinx/source/developer/contributing.rst deleted file mode 100644 index 847e838..0000000 --- a/docs-sphinx/source/developer/contributing.rst +++ /dev/null @@ -1,19 +0,0 @@ -.. contributing - -Guide to Contributing -===================== - -Contributions to this project are welcome. All contributors agree to the following: - -- It is assumed that the contributor is an ORNL employee and belongs to the development team. - Thus the following instructions are specific to ORNL development team's process. -- You have permission and any required rights to submit your contribution. -- Your contribution is provided under the license of this project and may be redistributed as such. -- All contributions to this project are public. - -All contributions must be "signed off" in the commit log and by doing so you agree to the above. - -Getting access to the main project ----------------------------------- -Direct commit access to the project is currently restricted to core developers. -All other contributions should be done through pull requests. diff --git a/docs-sphinx/source/developer/developer.rst b/docs-sphinx/source/developer/developer.rst deleted file mode 100644 index 39e026b..0000000 --- a/docs-sphinx/source/developer/developer.rst +++ /dev/null @@ -1,141 +0,0 @@ -.. _developer_documentation: - -Developer Documentation -======================= - -.. contents:: - :local: - :depth: 1 - -Local Environment ------------------ -For purposes of development, create conda environment `lr_reduction` with file `environment.yml`, and then -install the package in development mode with `pip`: - -.. code-block:: bash - - $> cd /path/to/lr_reduction/ - $> conda create env --solver libmamba --file ./environment.yml - $> conda activate lr_reduction - (lr_reduction)$> pip install -e ./ - -By installing the package in development mode, one doesn't need to re-install package `lr_reduction` in conda -environment `lr_reduction` after every change to the source code. - -pre-commit Hooks ----------------- - -Activate the hooks by typing in the terminal: - -.. code-block:: bash - - $> cd cd /path/to/mr_reduction/ - $> conda activate mr_reduction - (mr_reduction)$> pre-commit install - -Development procedure ---------------------- - -1. A developer is assigned with a task during neutron status meeting and changes the task's status to **In Progress**. -2. The developer creates a branch off *next* and completes the task in this branch. -3. The developer creates a pull request (PR) off *next*. -4. Any new features or bugfixes must be covered by new and/or refactored automated tests. -5. The developer asks for another developer as a reviewer to review the PR. - A PR can only be approved and merged by the reviewer. -6. The developer changes the task’s status to **Complete** and closes the associated issue. - -Updating mantid dependency --------------------------- -The mantid version and the mantid conda channel (`mantid/label/main` or `mantid/label/nightly`) **must** be -synchronized across these files: - -- environment.yml -- conda.recipe/meta.yml -- .github/workflows/package.yml - -Using the Data Repository liquidsreflectometer-data ---------------------------------------------------- -To run the integration tests in your local environment, it is necessary first to download the data files. -Because of their size, the files are stored in the Git LFS repository -`lr_reduction-data `_. - -It is necessary to have package `git-lfs` installed in your machine. - -.. code-block:: bash - - $> sudo apt install git-lfs - -After this step, initialize or update the data repository: - -.. code-block:: bash - - $> cd /path/to/lr_reduction - $> git submodule update --init - -This will either clone `liquidsreflectometer-data` into `/path/to/lr_reduction/tests/liquidsreflectometer-data` or -bring the `liquidsreflectometer-data`'s refspec in sync with the refspec listed within file -`/path/to/liquidsreflectometer/.gitmodules`. - -An intro to Git LFS in the context of the Neutron Data Project is found in the -`Confluence pages `_ -(login required). - - -Coverage reports ----------------- - -GitHuh actions create reports for unit and integration tests, then combine into one report and upload it to -`Codecov `_. - - -Building the documentation --------------------------- -A repository webhook is setup to automatically trigger the latest documentation build by GitHub actions. -To manually build the documentation: - -.. code-block:: bash - - $> conda activate lr_reduction - (lr_reduction)$> make docs - -After this, point your browser to -`file:///path/to/lr_reduction/docs/build/html/index.html` - - -Creating a stable release -------------------------- - -- *patch* release, it may be allowed to bypass the creation of a candidate release. - Still, we must update branch `qa` first, then create the release tag in branch `main`. - For instance, to create patch version "v2.1.1": - -.. code-block:: bash - - VERSION="v2.1.2" - # update the local repository - git fetch --all --prune - git fetch --prune --prune-tags origin - # update branch qa from next, possibly bringing work done in qa missing in next - git switch next - git rebase -v origin/next - git merge --no-edit origin/qa # commit message is automatically generated - git push origin next # required to "link" qa to next, for future fast-forward - git switch qa - git rebase -v origin/qa - git merge --ff-only origin/next - # update branch main from qa - git merge --no-edit origin/main # commit message is automatically generated - git push origin qa # required to "link" main to qa, for future fast-forward - git switch main - git rebase -v origin/main - git merge --ff-only origin/qa - git tag $VERSION - git push origin --tags main - -- *minor* or *major* release, we create a stable release *after* we have created a Candidate release. - For this customary procedure, follow: - - + the `Software Maturity Model `_ for continous versioning as well as creating release candidates and stable releases. - + Update the :ref:`Release Notes ` with major fixes, updates and additions since last stable release. - - diff --git a/docs-sphinx/source/releases.rst b/docs-sphinx/source/releases.rst deleted file mode 100644 index 47b3161..0000000 --- a/docs-sphinx/source/releases.rst +++ /dev/null @@ -1,38 +0,0 @@ -.. _release_notes: - -Release Notes -============= -Notes for major or minor releases. Notes for patch releases are deferred - - ------------------------------ -(date of release, format YYYY-MM-DD) - -**Of interest to the User**: - -- PR #XYZ one-liner description - -**Of interest to the Developer:** - -- PR #40 documentation to create a patch release - -2.1.0 ------ - -**Of interest to the User**: - -- PR #33 enable dead time correction for runs with skipped pulses -- PR #26 add dead time correction to the computation of scaling factors -- PR #23 add dead time correction -- PR #19 Functionality to use two backgrounds -- PR #15 Ability to fit a background with a polynomial function - -**Of interest to the Developer:** - -- PR #37 documentation conforming to that of the python project template -- PR #36 versioning with versioningit -- PR #25 Read in error events when computing correction -- PR #21 switch dependency from mantidworkbench to mantid -- PR #20 allow runtime initialization of new attributes for ReductionParameters -- PR #14 add first GitHub actions -- PR #12 switch from mantid to mantidworkbench conda package diff --git a/docs-sphinx/source/user/conda_environments.rst b/docs-sphinx/source/user/conda_environments.rst deleted file mode 100644 index 7f2ec71..0000000 --- a/docs-sphinx/source/user/conda_environments.rst +++ /dev/null @@ -1,29 +0,0 @@ -.. conda_environments - -Conda Environments -================== - -Three conda environments are available in the analysis nodes, beamline machines, as well as the -jupyter notebook severs. On a terminal: - -.. code-block:: bash - - $> conda activate - -where `` is one of `lr_reduction`, `lr_reduction-qa`, and `lr_reduction-dev` - -lr_reduction Environment ------------------------- -Activates the latest stable release of `lr_reduction`. Typically users will reduce their data in this environment. - -lr_reduction-qa Environment ---------------------------- -Activates a release-candidate environment. -Instrument scientists and computational instrument scientists will carry out testing on this environment -to prevent bugs being introduce in the next stable release. - -lr_reduction-dev Environment ----------------------------- -Activates the environment corresponding to the latest changes in the source code. -Instrument scientists and computational instrument scientists will test the latest changes to `lr_reduction` in this -environment. From d6f3e266e6d0263aa72fccf6b100120064a5785d Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 6 Jan 2025 15:57:44 -0500 Subject: [PATCH 12/16] prettify --- docs/conf.py | 80 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 80 insertions(+) create mode 100644 docs/conf.py diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..394da85 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,80 @@ +"""Configuration file for the Sphinx documentation builder. +For the full list of built-in configuration values, see the documentation: +https://www.sphinx-doc.org/en/master/usage/configuration.html +""" +import os +import sys +import versioningit + +sys.path.insert(0, os.path.abspath("../reduction")) + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "lr_reduction" +copyright = "2025, ORNL" # noqa A001 +author = "ORNL" +version = versioningit.get_version("../") +# The full version, including alpha/beta/rc tags +release = "source/".join(version.split("source/")[:-1]) + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "sphinx.ext.duration", + "sphinx.ext.doctest", + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.viewcode", + "sphinx.ext.coverage", + "sphinx.ext.mathjax", + "sphinx.ext.napoleon", + "myst_parser" +] + +autodoc_mock_imports = [ + "mantid", + "mantid.api", + "mantid.kernel", + "mantid.utils", + "mantid.utils.logging", + "mantid.simpleapi", + "mantid.geometry", + "mantidqt.widgets", + "mantidqt.widgets.algorithmprogress", + "qtpy", + "qtpy.uic", + "qtpy.QtWidgets", + "mantidqt", + "mantid.plots", + "mantid.plots.plotfunctions", + "mantid.plots.datafunctions", + "mantid.plots.utility", +] + +master_doc = "index" + +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), + "sphinx": ("https://www.sphinx-doc.org/en/master/", None), +} +intersphinx_disabled_domains = ["std"] + +templates_path = ["_templates"] +exclude_patterns = ["_build"] +pygments_style = "sphinx" + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "sphinx_rtd_theme" # pylint: disable=C0103 + +html_theme_options = {"style_nav_header_background": "#472375"} + +epub_show_urls = "footnote" # pylint: disable=C0103 + +nitpicky = True +nitpick_ignore = [('py:class', 'type')] \ No newline at end of file From 437c2b19a9fd50a2573b672fe148455909293f60 Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 6 Jan 2025 16:14:53 -0500 Subject: [PATCH 13/16] Done with main reduction code --- docs/conf.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 394da85..f0d238c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -37,6 +37,7 @@ autodoc_mock_imports = [ "mantid", "mantid.api", + "mantid.api.Workspace", "mantid.kernel", "mantid.utils", "mantid.utils.logging", @@ -52,6 +53,7 @@ "mantid.plots.plotfunctions", "mantid.plots.datafunctions", "mantid.plots.utility", + "numpy,ndarray" ] master_doc = "index" @@ -75,6 +77,3 @@ html_theme_options = {"style_nav_header_background": "#472375"} epub_show_urls = "footnote" # pylint: disable=C0103 - -nitpicky = True -nitpick_ignore = [('py:class', 'type')] \ No newline at end of file From 33819cdba13ff69b30e26efd3e2d0ebe9fdc6fb3 Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 6 Jan 2025 17:06:52 -0500 Subject: [PATCH 14/16] remove conf --- docs/conf.py | 79 ---------------------------------------------------- 1 file changed, 79 deletions(-) delete mode 100644 docs/conf.py diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index f0d238c..0000000 --- a/docs/conf.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Configuration file for the Sphinx documentation builder. -For the full list of built-in configuration values, see the documentation: -https://www.sphinx-doc.org/en/master/usage/configuration.html -""" -import os -import sys -import versioningit - -sys.path.insert(0, os.path.abspath("../reduction")) - -# -- Project information ----------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information - -project = "lr_reduction" -copyright = "2025, ORNL" # noqa A001 -author = "ORNL" -version = versioningit.get_version("../") -# The full version, including alpha/beta/rc tags -release = "source/".join(version.split("source/")[:-1]) - -# -- General configuration --------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration - -extensions = [ - "sphinx.ext.duration", - "sphinx.ext.doctest", - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.viewcode", - "sphinx.ext.coverage", - "sphinx.ext.mathjax", - "sphinx.ext.napoleon", - "myst_parser" -] - -autodoc_mock_imports = [ - "mantid", - "mantid.api", - "mantid.api.Workspace", - "mantid.kernel", - "mantid.utils", - "mantid.utils.logging", - "mantid.simpleapi", - "mantid.geometry", - "mantidqt.widgets", - "mantidqt.widgets.algorithmprogress", - "qtpy", - "qtpy.uic", - "qtpy.QtWidgets", - "mantidqt", - "mantid.plots", - "mantid.plots.plotfunctions", - "mantid.plots.datafunctions", - "mantid.plots.utility", - "numpy,ndarray" -] - -master_doc = "index" - -intersphinx_mapping = { - "python": ("https://docs.python.org/3/", None), - "sphinx": ("https://www.sphinx-doc.org/en/master/", None), -} -intersphinx_disabled_domains = ["std"] - -templates_path = ["_templates"] -exclude_patterns = ["_build"] -pygments_style = "sphinx" - - -# -- Options for HTML output ------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output - -html_theme = "sphinx_rtd_theme" # pylint: disable=C0103 - -html_theme_options = {"style_nav_header_background": "#472375"} - -epub_show_urls = "footnote" # pylint: disable=C0103 From 72a314d27800474e1de0dda5530557631f709240 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 7 Jan 2025 10:32:06 -0500 Subject: [PATCH 15/16] edit target branch for docs action --- .github/workflows/deploy_docs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy_docs.yaml b/.github/workflows/deploy_docs.yaml index 6b6e858..961a259 100644 --- a/.github/workflows/deploy_docs.yaml +++ b/.github/workflows/deploy_docs.yaml @@ -3,7 +3,7 @@ on: workflow_dispatch: push: branches: - - master + - next env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 5e7c0a33ebfa7508fa631401e782ca2cc22e0924 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 7 Jan 2025 11:44:49 -0500 Subject: [PATCH 16/16] update pointers in docs/index.md --- docs/index.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/index.md b/docs/index.md index d0f98aa..96d78d0 100644 --- a/docs/index.md +++ b/docs/index.md @@ -18,9 +18,9 @@ A change needs to be in the form of a: ## API -- [lr_reduction](source/api/lr_reduction.md) +- [lr_reduction](./api/index.md) ## Developer Guide -- [Contributing Guide](source/developer/contributing.md) -- [Developer Documentation](source/developer/developer.md) +- [Contributing Guide](./developer/contributing.md) +- [Developer Documentation](./developer/developer.md)