From e57718c9e6c9fa12795e905cb707595230fc2f08 Mon Sep 17 00:00:00 2001 From: Pierre Thibault Date: Fri, 2 Oct 2015 21:36:16 +0100 Subject: [PATCH 001/363] gitignore update --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 9cd4354bb..3bff49593 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ doc/_script2rst/*.png tutorial/*.png ghostdriver* ptypy/version.py +.idea/ + From a43485b3a3816cfd1761d7c4749f8e306501bb46 Mon Sep 17 00:00:00 2001 From: Pierre Thibault Date: Wed, 7 Oct 2015 14:37:34 +0100 Subject: [PATCH 002/363] Major reconfiguration of the data pipeline --- ptypy/core/data.py | 153 ++------ ptypy/core/manager.py | 861 ++++++++++++++++-------------------------- ptypy/core/ptycho.py | 8 +- 3 files changed, 366 insertions(+), 656 deletions(-) diff --git a/ptypy/core/data.py b/ptypy/core/data.py index 2ab28e384..b7096dd86 100644 --- a/ptypy/core/data.py +++ b/ptypy/core/data.py @@ -21,6 +21,7 @@ from ptypy import utils as u from ptypy import io from ptypy import resources + from ptypy.experiment import PtyScanTypes from ptypy.core import geometry from ptypy.utils.verbose import logger, log, headerline import numpy as np @@ -30,6 +31,7 @@ from .. import utils as u from .. import io from .. import resources + from ..experiment import PtyScanTypes from ..utils.verbose import logger, log, headerline import geometry import numpy as np @@ -79,7 +81,7 @@ CODES = {WAIT: 'Scan unfinished. More frames available after a pause', EOS: 'End of scan reached'} -__all__ = ['GENERIC','PtyScan','PTYD','PtydScan','MoonFlowerScan'] +__all__ = ['GENERIC', 'PtyScan', 'PTYD', 'PtydScan', 'MoonFlowerScan', 'makePtyScan'] class PtyScan(object): """\ @@ -1221,125 +1223,40 @@ def load(self, indices): return raw, {}, {} - -class DataSource(object): +def makePtyScan(scan_pars, scanmodel=None): """ - A source of data for ptychographic reconstructions. The important method is "feed_data", which returns - packets of diffraction patterns with their meta-data. + Factory for PtyScan object. Return a PtyScan instance (or subclass) according to the + input parameters (see ...). """ - def __init__(self, scans, frames_per_call=10000000, feed_format='dp'): - """ - DataSource initialization. - - Parameters - ---------- - scans : - a dictionnary of scan structures. - - frames_per_call : (optional) - number of frames to load in one call. By default, load as many as possible. - - feed_format : - the format in with the data is packaged. For now only 'dp' is implemented. - """ - - from ..experiment import PtyScanTypes - - self.frames_per_call = frames_per_call - self.feed_format = feed_format - self.scans = scans - - # sort after keys given - self.labels = sorted(scans.keys()) - - # empty list for the scans - self.PS = [] - - for label in self.labels: - # we are making a copy of the root as we want to fill it - scan = scans[label] - s = scan['pars'] - - # Copy other relevant information - prep = s.data.copy() - - # Assign label - prep.label = label - - source = prep.source - recipe = prep.get('recipe',{}) - if prep.get('positions_theory') is None: - prep.positions_theory = scan['pos_theory'] - - #prep.dfile = s.data_file - #prep.geometry = s.geometry.copy() - #prep.xy = s.xy.copy() - - if source is not None: - source = source.lower() - - if source in PtyScanTypes: - PS = PtyScanTypes[source] - logger.info('Scan %s will be prepared with the recipe "%s"' % (label, source)) - self.PS.append(PS(prep, recipe= recipe)) - elif source.endswith('.ptyd') or source.endswith('.pty') or str(source)=='file': - self.PS.append(PtydScan(prep, source=source)) - elif source=='test': - self.PS.append(MoonFlowerScan(prep)) - elif source=='sim': - from ..simulations import SimScan - logger.info('Scan %s will simulated' % (label)) - self.PS.append(SimScan(prep,s.copy())) - elif source=='empty' or source is None: - prep.recipe = None - logger.warning('Generating dummy PtyScan for scan `%s` - This label will source only zeros as data' % label) - self.PS.append(PtyScan(prep)) - else: - raise RuntimeError('Could not manage source "%s" for scan `%s`' % (str(source),label)) - - # Initialize flags - self.scan_current = -1 - self.data_available = True - self.scan_total = len(self.PS) - - @property - def scan_available(self): - return self.scan_current < (self.scan_total - 1) - - def feed_data(self): - """ - Yield data packages. - """ - # get PtyScan instance to scan_number - PS = self.PS[self.scan_current] - label = self.labels[self.scan_current] - - # initialize if that has not been done yet - if not PS.is_initialized: - PS.initialize() - - msg = PS.auto(self.frames_per_call, self.feed_format) - # if we catch a scan that has ended look for an unfinished scan - while msg == EOS and self.scan_available: - self.scan_current += 1 - PS = self.PS[self.scan_current] - label = self.labels[self.scan_current] - if not PS.is_initialized: - PS.initialize() - msg = PS.auto(self.frames_per_call, self.feed_format) - - self.data_available = (msg != EOS or self.scan_available) - - logger.debug(u.verbose.report(msg)) - if msg != WAIT and msg != EOS: - # ok that would be a data package - # attach inner label - msg['common']['ptylabel'] = label - #print u.verbose.report(msg) - logger.info('Feeding data chunk') - return msg - else: - return None + # Extract information on the type of object to build + pars = scan_pars.data + label = pars.label + source = pars.source + recipe = pars.get('recipe', {}) + + if source is not None: + source = source.lower() + + if source in PtyScanTypes: + ps_obj = PtyScanTypes[source] + logger.info('Scan %s will be prepared with the recipe "%s"' % (label, source)) + ps_instance = ps_obj(pars, recipe=recipe) + elif source.endswith('.ptyd') or source.endswith('.pty') or str(source) == 'file': + ps_instance = PtydScan(pars, source=source) + elif source == 'test': + ps_instance = MoonFlowerScan(pars) + elif source == 'sim': + from ..simulations import SimScan + logger.info('Scan %s will simulated' % label) + ps_instance = SimScan(pars, scanmodel) + elif source == 'empty' or source is None: + pars.recipe = None + logger.warning('Generating dummy PtyScan for scan `%s` - This label will source only zeros as data' % label) + ps_instance = PtyScan(pars) + else: + raise RuntimeError('Could not manage source "%s" for scan `%s`' % (str(source), label)) + + return ps_instance if __name__ == "__main__": u.verbose.set_level(3) diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py index f948c02c4..18ebb6f25 100644 --- a/ptypy/core/manager.py +++ b/ptypy/core/manager.py @@ -37,7 +37,7 @@ FType = np.float64 CType = np.complex128 -__all__ = ['DEFAULT','ModelManager'] +__all__ = ['DEFAULT', 'ModelManager', 'ScanModel'] DESCRIPTION = u.Param() @@ -75,6 +75,276 @@ tags="", # For now only used to declare an empty scan ) +NO_DATA_FLAG = 'No data' + +class ScanModel(object): + """ + Manage a single scan model (data, illumination, geometry, ...) + """ + + DEFAULT = DEFAULT + + def __init__(self, ptycho=None, specific_pars=None, generic_pars=None, label=None): + """ + Create ScanModel object. + + Parameters + ---------- + specific_pars : dict or Param + Input parameters specific to the given scan. + + generic_pars : dict or Param + Input parameters (see :py:attr:`DEFAULT`) + If None uses defaults + """ + # Update parameter structure + p = u.Param(self.DEFAULT.copy()) + p.update(generic_pars, in_place_depth=4) + p.update(specific_pars, in_place_depth=4) + self.p = p + self.label = label + self.ptycho = ptycho + + # Create Associated PtyScan object + self.ptyscan = data.makePtyScan(self.p) + + # Initialize instance attributes + self.mask = None + self.diff = None + self.mask_views = [] + self.diff_views = [] + self.new_positions = None + self.new_diff_views = None + self.new_mask_views = None + + self.meta = None + self.geometries = [] + self.shape = None + self.psize = None + + self.data_available = True + + self.frames_per_call = 100000 + self.feed_format = 'dp' + + def new_data(self): + """ + Feed data from ptyscan object. + :return: + """ + + # Initialize if that has not been done yet + if not self.ptyscan.is_initialized: + self.ptyscan.initialize() + + # Get data + dp = self.ptyscan.auto(self.frames_per_call, self.feed_format) + + self.data_available = (dp != data.EOS) + logger.debug(u.verbose.report(dp)) + + if dp == data.WAIT or not self.data_available: + return None + + # Store metadata + self.meta = dp['common'] + + label = self.label + logger.info('Importing data from scan %s.' % label) + + # Prepare the scan geometry if not already done. + if not self.geometries: + # Check if meta and scan geometry agree + self.geometries = [] + geo = self.p.geometry + + # FIXME: User should be informed of the final geometry parameters. + for key in geometry.DEFAULT.keys(): + if geo.get(key) is None or not (geo.precedence == 'meta'): + mk = self.meta.get(key) + if mk is not None: # None existing key or None values in meta dict are treated alike + geo[key] = mk + + # The multispectral case will have multiple geometries + for ii, fac in enumerate(self.p.coherence.energies): + geoID = geometry.Geo._PREFIX + '%02d' % ii + label + g = geometry.Geo(self.ptycho, geoID, pars=geo) + # now we fix the sample pixel size, This will make the frame size adapt + g.p.resolution_is_fix = True + # save old energy value: + g.p.energy_orig = g.energy + # change energy + g.energy *= fac + # append the geometry + self.geometries.append(g) + + # Store frame shape + self.shape = np.array(self.meta.get('shape', self.geometries[0].shape)) + self.psize = self.geometries[0].psize + + sh = self.shape + + # Storage generation if not already existing + if self.diff is None: + # This scan is brand new so we create storages for it + self.diff = self.ptycho.diff.new_storage(shape=(1, sh[-2], sh[-1]), psize=self.psize, padonly=True, + layermap=None) + old_diff_views = [] + old_diff_layers = [] + else: + # ok storage exists already. Views most likely also. Let's do some analysis and deactivate the old views + old_diff_views = self.ptycho.diff.views_in_storage(self.diff, active=False) + old_diff_layers = [] + for v in old_diff_views: + old_diff_layers.append(v.layer) + + # Same for mask + if self.mask is None: + self.mask = self.ptycho.mask.new_storage(shape=(1, sh[-2], sh[-1]), psize=self.psize, padonly=True, + layermap=None) + old_mask_views = [] + old_mask_layers = [] + else: + old_mask_views = self.ptycho.mask.views_in_storage(self.mask, active=False) + old_mask_layers = [] + for v in old_mask_views: + old_mask_layers.append(v.layer) + + # Prepare for View generation + AR_diff_base = DEFAULT_ACCESSRULE.copy() + AR_diff_base.shape = self.shape + AR_diff_base.coord = 0.0 + AR_diff_base.psize = self.psize + AR_mask_base = AR_diff_base.copy() + AR_diff_base.storageID = self.diff.ID + AR_mask_base.storageID = self.mask.ID + + diff_views = [] + mask_views = [] + positions = [] + + # First pass: create or update views and reformat corresponding storage + for dct in dp['iterable']: + + index = dct['index'] + active = dct['data'] is not None + + pos = dct.get('position') + + if pos is None: + logger.warning('No position set to scan point %d of scan %s' % (index, label)) + + AR_diff = AR_diff_base + AR_mask = AR_mask_base + AR_diff.layer = index + AR_mask.layer = index + AR_diff.active = active + AR_mask.active = active + + # check here: is there already a view to this layer? Is it active? + try: + old_view = old_diff_views[old_diff_layers.index(index)] + old_active = old_view.active + old_view.active = active + + logger.debug( + 'Diff view with layer/index %s of scan %s exists. \nSetting view active state from %s to %s' % ( + index, label, old_active, active)) + except ValueError: + v = View(self.ptycho.diff, accessrule=AR_diff) + diff_views.append(v) + logger.debug( + 'Diff view with layer/index %s of scan %s does not exist. \nCreating view with ID %s and set active state to %s' % ( + index, label, v.ID, active)) + # append position also + positions.append(pos) + + try: + old_view = old_mask_views[old_mask_layers.index(index)] + old_view.active = active + except ValueError: + v = View(self.ptycho.mask, accessrule=AR_mask) + mask_views.append(v) + + # so now we should have the right views to this storages. Let them reformat() + # that will create the right sizes and the datalist access + self.diff.reformat() + self.mask.reformat() + + # Second pass: copy the data + for dct in dp['iterable']: + parallel.barrier() + if not dct['active']: + continue + diff_data = dct['data'] + idx = dct['index'] + + # FIXME: Find a more transparent way than this. + self.diff.data[self.diff.layermap.index(idx)][:] = diff_data + self.mask.data[self.mask.layermap.index(idx)][:] = dct.get('mask', np.ones_like(diff_data)) + + self.diff.nlayers = parallel.MPImax(self.diff.layermap) + 1 + self.mask.nlayers = parallel.MPImax(self.mask.layermap) + 1 + + self.new_positions = positions + self.new_diff_views = diff_views + self.new_mask_views = mask_views + self.diff_views += diff_views + self.mask_views += mask_views + + self._update_stats() + + def _update_stats(self): + """ + (Re)compute the statistics for the data stored in the scan. + These statistics are: + * Itotal: The integrated power per frame + * max/min/mean_frame: pixel-by-pixel maximum, minimum and + average among all frames. + """ + mask_views = self.mask_views + diff_views = self.diff_views + + # Nothing to do if no view exist + if not self.diff: return + + # Reinitialize containers + Itotal = [] + max_frame = np.zeros(self.diff_views[0].shape) + min_frame = np.zeros_like(max_frame) + mean_frame = np.zeros_like(max_frame) + norm = np.zeros_like(max_frame) + + for maview, diview in zip(mask_views, diff_views): + if not diview.active: + continue + dv = diview.data + m = maview.data + v = m * dv + Itotal.append(np.sum(v)) + max_frame[max_frame < v] = v[max_frame < v] + min_frame[min_frame > v] = v[min_frame > v] + mean_frame += v + norm += m + + parallel.allreduce(mean_frame) + parallel.allreduce(norm) + parallel.allreduce(max_frame, parallel.MPI.MAX) + parallel.allreduce(max_frame, parallel.MPI.MIN) + mean_frame /= (norm + (norm == 0)) + + self.diff.norm = norm + self.diff.max_power = parallel.MPImax(Itotal) + self.diff.tot_power = parallel.MPIsum(Itotal) + self.diff.pbound_stub = self.diff.max_power / mean_frame.shape[-1]**2 + self.diff.mean = mean_frame + self.diff.max = max_frame + self.diff.min = min_frame + + info = {'label': self.label, 'max': self.diff.max_power, 'tot': self.diff.tot_power, 'mean': mean_frame.sum()} + logger.info( + '\n--- Scan %(label)s photon report ---\nTotal photons : %(tot).2e \nAverage photons : %(mean).2e\nMaximum photons : %(max).2e\n' % info + '-' * 29) + class ModelManager(object): """ @@ -122,32 +392,23 @@ def __init__(self, ptycho, pars=None, scans=None, **kwargs): # Initialize the input parameters p = u.Param(self.DEFAULT.copy()) - p.update(pars, in_place_depth = 4) + p.update(pars, in_place_depth=4) self.p = p - #print '############### HERE' - #print u.verbose.report(p) - self.ptycho = ptycho # abort if ptycho is None: + # FIXME: PT Is this the expected behavior? if self.ptycho is None: return - # Prepare the list of scan_labels (important because they are sorted) - # FIXME: BE I don't think this is the way to go. This is only needed for sharing - # For the user it might be better to mark the sharing behavior directly - self.scan_labels = [] - - # store scan specifics + # store scan-specific parameters self.scans_pars = scans if scans is not None else self.ptycho.p.get('scans', u.Param()) - # Scan dictionary - # This will store everything scan specific and will hold - # references to the storages of the respective scan - self.scans = u.Param() - # update self.scans from information already available - for label in self.scans_pars.keys(): - self.prepare_scan(label) + self.scans = {} + + # Create scan objects from information already available + for label, scan_pars in self.scans_pars.iteritems(): + self.scans[label] = ScanModel(specific_pars=scan_pars, generic_pars=self.p, label=label) # Sharing dictionary that stores sharing behavior self.sharing = {'probe_ids': {}, 'object_ids': {}} @@ -170,122 +431,9 @@ def _from_dict(cls, dct): inst.__dict__ = dct return inst - def prepare_scan(self, label=None): - """ - Prepare scan specific parameters and create a label if necessary - """ - if label is None: - label = 'Scan%05d' % self.label_idx - self.label_idx += 1 - - try: - # return the scan if already prepared - return self.scans[label] - - except KeyError: - # get standard parameters - # Create a dictionary specific for the scan. - scan = u.Param() - self.scans[label] = scan - scan.label = label - # make a copy of model dictionary - scan.pars = self.p.copy(depth=5) - # Look for a scan-specific entry in the input parameters - scan_specific_parameters = self.scans_pars.get(label, None) - scan.pars.update(scan_specific_parameters, in_place_depth=5) - - # prepare the tags - t = scan.pars.tags - if str(t) == t: - scan.pars.tags = [tag.strip().lower() for tag in t.split(',')] - # also create positions - scan.pos_theory = xy.from_pars(scan.pars.xy) - return scan - - def _update_stats(self, scan, mask_views=None, diff_views=None): - """ - (Re)compute the statistics for the data stored in a scan. - These statistics are: - * Itotal: The integrated power per frame - * max/min/mean_frame: pixel-by-pixel maximum, minimum and - average among all frames. - """ - if mask_views is None: - mask_views = scan.mask_views - if diff_views is None: - diff_views = scan.diff_views - # Reinitialize containers - Itotal = [] - #DPCX = [] - #DPCY = [] - max_frame = np.zeros(scan.diff_views[0].shape) - min_frame = np.zeros_like(max_frame) - mean_frame = np.zeros_like(max_frame) - norm = np.zeros_like(max_frame) - # Useful quantities - #sh0,sh1 = scan.geo.N - #x = np.arange(s1, dtype=float) - #y = np.arange(s0, dtype=float) - - for maview, diview in zip(mask_views, diff_views): - if not diview.active: - continue - dv = diview.data - m = maview.data - v = m * dv - #mv = dv.pod.ma_view.data # pods may not yet exist, since mask & data are not linked yet - #S0 = np.sum(mv*dv, axis=0) - #S1 = np.sum(mv*dv, axis=1) - #I0 = np.sum(S0) - Itotal.append(np.sum(v)) - #DPCX.append(np.sum(S0*x)/I0 - sh1/2.) - #DPCY.append(np.sum(S1*y)/I0 - sh0/2.) - max_frame[max_frame < v] = v[max_frame < v] - min_frame[min_frame > v] = v[min_frame > v] - mean_frame += v - norm += m - - parallel.allreduce(mean_frame) - parallel.allreduce(norm) - parallel.allreduce(max_frame, parallel.MPI.MAX) - parallel.allreduce(max_frame, parallel.MPI.MIN) - mean_frame /= (norm + (norm == 0)) - - scan.diff.norm = norm - scan.diff.max_power = parallel.MPImax(Itotal) - scan.diff.tot_power = parallel.MPIsum(Itotal) - scan.diff.pbound_stub = scan.diff.max_power / mean_frame.shape[-1]**2 - scan.diff.mean = mean_frame - scan.diff.max = max_frame - scan.diff.min = min_frame - - info = {'label': scan.label, 'max': scan.diff.max_power, 'tot': scan.diff.tot_power, 'mean': mean_frame.sum()} - logger.info( - '\n--- Scan %(label)s photon report ---\nTotal photons : %(tot).2e \nAverage photons : %(mean).2e\nMaximum photons : %(max).2e\n' % info + '-' * 29) - - def make_datasource(self, data_pars=None): - """ - This function will make a static datasource from - parameters in the self.scans dict. - For any additional file in data.filelist it will create a new entry in - self.scans with generic parameters given by the current model. - """ - """ - if data_pars is not None: - filelist = data_pars.get('filelist') - if filelist is not None: - for f in filelist: - scan = self.prepare_scan() - scan.pars.data_file = f - """ - # now there should be little suprises. every scan is listed in - # self.scans - for label, scan in self.scans.items(): - #if scan.pars.get('data_file') is None: - # scan.pars['data_file'] = self.ptycho.paths.get_data_file(label=label) - scan.pars['label'] = label - - return data.DataSource(self.scans) + @property + def data_available(self): + return any(s.data_available for s in self.scans.values()) def new_data(self): """ @@ -293,234 +441,23 @@ def new_data(self): accordingly. """ parallel.barrier() + # Nothing to do if there are no new data. - if not self.ptycho.datasource.data_available: return 'No Data' + if not self.data_available: + return 'No data' logger.info('Processing new data.') used_scans = [] - not_initialized = [] - - # For some funny reason the Generator constuct used to fail. - while True: - dp = self.ptycho.datasource.feed_data() - if dp is None: break - """ - A dp (data package) contains the following: - - common : dict or Param - Meta information common to all datapoints in the - data package. Variable names need to be consistent with those - in the rest of ptypy package. - (TODO further description) - - Important info: - ------------------------ - shape : (tuple or array) - expected frame shape - label : (string) - Script label. This label is matched to the parameter tree - a string signifying to which scan this package belongs - - - iterable : An iterable structure that yields for each iteration - a dict with the following fields: - - data : (np.2darray, float) - diffraction data - In MPI case, data can be None if distributed to other nodes - mask : (np.2darray, bool) - masked out areas in diffraction data array - index : (int) - diffraction datapoint index in scan - position : (tuple or array) - scan position - """ - meta = dp['common'] - label = meta['ptylabel'] - - # we expect a string for the label. - assert label == str(label) - - used_scans.append(label) - logger.info('Importing data from %s as scan %s.' % (meta['label'], label)) - - # prepare scan dictionary or dig up the already prepared one - scan = self.prepare_scan(label) - scan.meta = meta - - # empty buffer - scan.iterable = [] - - # prepare the scan geometry if not already done. - if scan.get('geometries') is None: - # ok now that we have meta we can check if the geometry fits - scan.geometries = [] - geo = scan.pars.geometry - for key in geometry.DEFAULT.keys(): - if geo.get(key) is None or not (geo.precedence=='meta'): #scan.pars.if_conflict_use_meta: - mk = scan.meta.get(key) - if mk is not None: # None existing key or None values in meta dict are treated alike - geo[key] = mk - - for ii, fac in enumerate(self.p.coherence.energies): - geoID = geometry.Geo._PREFIX + '%02d' % ii + label - g = geometry.Geo(self.ptycho, geoID, pars=geo) - # now we fix the sample pixel size, This will make the frame size adapt - g.p.resolution_is_fix = True - # save old energy value: - g.p.energy_orig = g.energy - # change energy - g.energy *= fac - # append the geometry - scan.geometries.append(g) - - # create a buffer - scan.iterable = [] - - scan.diff_views = [] - scan.mask_views = [] - - # Remember the order in which these scans were fed the manager - self.scan_labels.append(label) - - # Remember also that these new scans are probably not initialized yet - not_initialized.append(label) - - # Buffer incoming data and evaluate if we got Nones in data - for dct in dp['iterable']: - dct['active'] = dct['data'] is not None - scan.iterable.append(dct) - - # Ok data transmission is over for now. - # Lets see what scans have received data and create the views for those - for label in used_scans: - - # get scan Param - scan = self.scans[label] - # pick one of the geometries for calculating the frame shape - geo = scan.geometries[0] - sh = np.array(scan.meta.get('shape', geo.shape)) - - # Storage generation if not already existing - if scan.get('diff') is None: - # this scan is brand new so we create storages for it - scan.diff = self.ptycho.diff.new_storage(shape=(1, sh[-2], sh[-1]), psize=geo.psize, padonly=True, - layermap=None) - old_diff_views = [] - old_diff_layers = [] - else: - # ok storage exists already. Views most likely also. Let's do some analysis and deactivate the old views - old_diff_views = self.ptycho.diff.views_in_storage(scan.diff, active=False) - old_diff_layers = [] - for v in old_diff_views: - old_diff_layers.append(v.layer) - #v.active = False - - # same for mask - if scan.get('mask') is None: - scan.mask = self.ptycho.mask.new_storage(shape=(1, sh[-2], sh[-1]), psize=geo.psize, padonly=True, - layermap=None) - old_mask_views = [] - old_mask_layers = [] - else: - old_mask_views = self.ptycho.mask.views_in_storage(scan.mask, active=False) - old_mask_layers = [] - for v in old_mask_views: - old_mask_layers.append(v.layer) - #v.active = False - - # Prepare for View genereation - AR_diff_base = DEFAULT_ACCESSRULE.copy() - AR_diff_base.shape = geo.shape #None - AR_diff_base.coord = 0.0 - AR_diff_base.psize = geo.psize - AR_mask_base = AR_diff_base.copy() - AR_diff_base.storageID = scan.diff.ID - AR_mask_base.storageID = scan.mask.ID - - diff_views = [] - mask_views = [] - positions = [] - #positions_theory = xy.from_pars(scan.pars.xy) - - for dct in scan.iterable: - index = dct['index'] - active = dct['active'] - #tpos = positions_theory[index] - if scan.pars.geometry.precedence=='meta' and scan.pos_theory is not None: - pos = scan.pos_theory[index] - else: - pos = dct.get('position') #,positions_theory[index]) - - if pos is None: - logger.warning('No position set to scan poin %d of scan %s' %(index,label)) - - AR_diff = AR_diff_base #.copy() - AR_mask = AR_mask_base #.copy() - AR_diff.layer = index - AR_mask.layer = index - AR_diff.active = active - AR_mask.active = active - - # check here: is there already a view to this layer? Is it active? - try: - old_view = old_diff_views[old_diff_layers.index(index)] - old_active = old_view.active - old_view.active = active - # also set this for the attached pods' exit views - #for pod in old_view.pods.itervalues(): - # pod.ex_view.active = active - - logger.debug( - 'Diff view with layer/index %s of scan %s exists. \nSetting view active state from %s to %s' % ( - index, label, old_active, active)) - except ValueError: - v = View(self.ptycho.diff, accessrule=AR_diff) - diff_views.append(v) - logger.debug( - 'Diff view with layer/index %s of scan %s does not exist. \nCreating view with ID %s and set active state to %s' % ( - index, label, v.ID, active)) - # append position also - positions.append(pos) - - try: - old_view = old_mask_views[old_mask_layers.index(index)] - old_view.active = active - except ValueError: - v = View(self.ptycho.mask, accessrule=AR_mask) - mask_views.append(v) - # so now we should have the right views to this storages. Let them reformat() - # that will create the right sizes and the datalist access - scan.diff.reformat() - scan.mask.reformat() - #parallel.barrier() - #print 'this' - - for dct in scan.iterable: - parallel.barrier() - if not dct['active']: - continue - data = dct['data'] - idx = dct['index'] - #scan.diff.datalist[idx][:] = data #.astype(scan.diff.dtype) - #scan.mask.datalist[idx][:] = dct.get('mask', np.ones_like(data)) #.astype(scan.mask.dtype) - scan.diff.data[scan.diff.layermap.index(idx)][:] = data - scan.mask.data[scan.mask.layermap.index(idx)][:] = dct.get('mask', np.ones_like(data)) - #del dct['data'] - - #print 'hallo' - scan.diff.nlayers = parallel.MPImax(scan.diff.layermap) + 1 - scan.mask.nlayers = parallel.MPImax(scan.mask.layermap) + 1 - # empty iterable buffer - #scan.iterable = [] - scan.new_positions = positions - scan.new_diff_views = diff_views - scan.new_mask_views = mask_views - scan.diff_views += diff_views - scan.mask_views += mask_views - - self._update_stats(scan) + # Attempt to get new data + for label, scan in self.scans.iteritems(): + new_data = scan.new_data() + if new_data: + used_scans.append(label) + + if not used_scans: + return None + # Create PODs new_pods, new_probe_ids, new_object_ids = self._create_pods(used_scans) logger.info('Process %d created %d new PODs, %d new probes and %d new objects.' % ( @@ -537,15 +474,16 @@ def new_data(self): def _initialize_probe(self, probe_ids): """ - initializes the probe storages referred to by the probe_ids + Initialize the probe storages referred to by the probe_ids """ - logger.info('\n'+headerline('Probe initialization','l')) + logger.info('\n'+headerline('Probe initialization', 'l')) + + # Loop through probe ids for pid, labels in probe_ids.items(): - # pick scanmanagers from scan_label for illumination parameters - # for now, the scanmanager of the first label is chosen + # Pick first scan - this should not matter. scan = self.scans[labels[0]] - illu_pars = scan.pars.illumination + illu_pars = scan.p.illumination # pick storage from container s = self.ptycho.probe.S.get(pid) @@ -555,29 +493,16 @@ def _initialize_probe(self, probe_ids): logger.info('Initializing probe storage %s using scan %s' % (pid, scan.label)) # if photon count is None, assign a number from the stats. - phot = illu_pars.get('photons') + phot = illu_pars.get('photons') phot_max = scan.diff.max_power if phot is None: - logger.info('Found no photon count for probe in parameters.\nUsing photon count %.2e from photon report' %phot_max) + logger.info('Found no photon count for probe in parameters.\nUsing photon count %.2e from photon report' % phot_max) illu_pars['photons'] = phot_max elif np.abs(np.log10(phot)-np.log10(phot_max)) > 1: - logger.warn('Photon count from input parameters (%.2e) differs from statistics (%.2e) by more than a magnitude' %(phot,phot_max)) - - """ - # find out energy or wavelength. Maybe store that information - # in the storages in future too avoid this weird call. - geo = s.views[0].pod.geometry - - # pass info how far off center energy /wavelength of spectrum you are. - off = (geo.energy - geo.p.energy_orig)/ geo.p.energy_orig - - # make this a single call in future - pr = illumination.from_pars(s.shape[-2:], s.psize, geo.lam, off, illu_pars) - pr = illumination.create_modes(s.shape[-3], pr) - s.fill(pr.probe) - """ - illumination.init_storage(s,illu_pars) + logger.warn('Photon count from input parameters (%.2e) differs from statistics (%.2e) by more than a magnitude' % (phot, phot_max)) + + illumination.init_storage(s, illu_pars) s.reformat() # maybe not needed s.model_initialized = True @@ -586,13 +511,14 @@ def _initialize_object(self, object_ids): """ initializes the probe storages referred to by the object_ids """ - logger.info('\n'+headerline('Object initialization','l')) + logger.info('\n'+headerline('Object initialization', 'l')) + + # Loop through object IDs for oid, labels in object_ids.items(): - # pick scanmanagers from scan_label for illumination parameters - # for now, the scanmanager of the first label is chosen + # Pick first scan - this should not matter. scan = self.scans[labels[0]] - sample_pars = scan.pars.sample + sample_pars = scan.p.sample # pick storage from container s = self.ptycho.obj.S.get(oid) @@ -601,29 +527,16 @@ def _initialize_object(self, object_ids): else: logger.info('Initializing object storage %s using scan %s' % (oid, scan.label)) - sample.init_storage(s,sample_pars) - """ - if sample_pars.get('source') == 'diffraction': - logger.info('STXM initialization using diffraction data') - trans, dpc_row, dpc_col = u.stxm_analysis(s) - s.fill(trans * np.exp(1j * u.phase_from_dpc(dpc_row, dpc_col))) - else: - # find out energy or wavelength. Maybe store that information in the storages too in future - lam = s.views[0].pod.geometry.lam - - # make this a single call in future - obj = sample.from_pars(s.shape[-2:], lam, sample_pars) - obj = sample.create_modes(s.shape[-3], obj) - s.fill(obj.obj) - """ + sample.init_storage(s, sample_pars) s.reformat() # maybe not needed s.model_initialized = True - def _initialize_exit(self, pods): + @staticmethod + def _initialize_exit(pods): """ - initializes exit waves ousing the pods + initializes exit waves using the pods """ - logger.info('\n'+headerline('Creating exit waves','l')) + logger.info('\n'+headerline('Creating exit waves', 'l')) for pod in pods: if not pod.active: continue @@ -636,7 +549,7 @@ def _create_pods(self, new_scans): Return the list of new pods, probe and object ids (to allow for initialization). """ - logger.info('\n'+headerline('Creating PODS','l')) + logger.info('\n'+headerline('Creating PODS', 'l')) new_pods = [] new_probe_ids = {} new_object_ids = {} @@ -646,20 +559,17 @@ def _create_pods(self, new_scans): existing_objects = self.ptycho.obj.S.keys() #self.sharing_rules.object_ids.keys() logger.info('Found these probes : ' + ', '.join(existing_probes)) logger.info('Found these objects: ' + ', '.join(existing_objects)) - #exit_index = 0 + # Loop through scans for label in new_scans: scan = self.scans[label] - # Store probe and object weights in meta - #meta = {'probe_weight':scan.pars.probe_weight, 'object_weight':scan.pars.object_weight} - positions = scan.new_positions di_views = scan.new_diff_views ma_views = scan.new_mask_views # Compute sharing rules - share = scan.pars.sharing + share = scan.p.sharing alt_obj = share.object_share_with if share is not None else None alt_pr = share.probe_share_with if share is not None else None @@ -669,15 +579,13 @@ def _create_pods(self, new_scans): # Loop through diffraction patterns for i in range(len(di_views)): dv, mv = di_views.pop(0), ma_views.pop(0) - #print dv + index = dv.layer # object and probe position pos_pr = u.expect2(0.0) pos_obj = positions[i] if 'empty' not in scan.pars.tags else 0.0 - - t, object_id = self.sharing_rules(obj_label, index) probe_id, t = self.sharing_rules(pr_label, index) @@ -687,8 +595,8 @@ def _create_pods(self, new_scans): # make new IDs and keep them in record # sharing_rules is not aware of IDs with suffix - pdis = scan.pars.coherence.probe_dispersion - if pdis is None or str(pdis)=='achromatic': + pdis = scan.p.coherence.probe_dispersion + if pdis is None or str(pdis) == 'achromatic': gind = 0 else: gind = ii @@ -697,8 +605,8 @@ def _create_pods(self, new_scans): if probe_id_suf not in new_probe_ids.keys() and probe_id_suf not in existing_probes: new_probe_ids[probe_id_suf] = self.sharing_rules.probe_ids[probe_id] - odis = scan.pars.coherence.object_dispersion - if odis is None or str(odis)=='achromatic': + odis = scan.p.coherence.object_dispersion + if odis is None or str(odis) == 'achromatic': gind = 0 else: gind = ii @@ -708,8 +616,8 @@ def _create_pods(self, new_scans): new_object_ids[object_id_suf] = self.sharing_rules.object_ids[object_id] # Loop through modes - for pm in range(scan.pars.coherence.num_probe_modes): - for om in range(scan.pars.coherence.num_object_modes): + for pm in range(scan.p.coherence.num_probe_modes): + for om in range(scan.p.coherence.num_object_modes): # Make a unique layer index for exit view # The actual number does not matter due to the layermap access exit_index = index * 10000 + pm * 100 + om @@ -723,14 +631,14 @@ def _create_pods(self, new_scans): 'coord': pos_pr, 'storageID': probe_id_suf, 'layer': pm, - 'active' : True}) + 'active': True}) ov = View(container=self.ptycho.obj, accessrule={'shape': geometry.shape, 'psize': geometry.resolution, 'coord': pos_obj, 'storageID': object_id_suf, 'layer': om, - 'active' : True}) + 'active': True}) ev = View(container=self.ptycho.exit, accessrule={'shape': geometry.shape, 'psize': geometry.resolution, @@ -738,122 +646,11 @@ def _create_pods(self, new_scans): 'storageID': probe_id+object_id_suf[1:], 'layer': exit_index, 'active': dv.active}) - views = {'probe': pv, 'obj': ov, 'diff': dv, 'mask': mv, 'exit':ev} - #views = {'probe': pv, 'obj': ov, 'diff': dv, 'mask': mv} - pod = POD(ptycho=self.ptycho, ID=None, views=views, geometry=geometry) #, meta=meta) + views = {'probe': pv, 'obj': ov, 'diff': dv, 'mask': mv, 'exit': ev} + pod = POD(ptycho=self.ptycho, ID=None, views=views, geometry=geometry) new_pods.append(pod) pod.probe_weight = share.probe_share_power if share is not None else 1. pod.object_weight = share.object_share_power if share is not None else 1. pod.is_empty = True if 'empty' in scan.pars.tags else False - #exit_index += 1 - - # delete buffer & meta (meta may be filled with a lot of stuff) - scan.iterable = [] - #scan.meta={} return new_pods, new_probe_ids, new_object_ids - - - def collect_diff_mask_meta(self, label=None, filename=None, save=False, dtype=None, **kwargs): - """ - *DEPRECATED* - attempt to save diffraction data - - Parameters - ---------- - - label : str - ptypy label of the scan to save - if None, tries to save ALL diffraction data - - filename : str - override the file path to write to - will change `data_filename` in `scan_info` dict - - all other kwargs are added to 'scan_info' key in the '.h5' file - """ - - if label is None: - scans = {} - for l in self.scans.keys(): - scans[l] = self.collect_diff_mask_meta(l, filename, save, dtype, **kwargs) - return scans - else: - dct = {} - # get the scan - scan = self.scans[label] - for kind in ['mask', 'diff']: - storage = scan[kind] - # fresh copy - new = [data.copy() if data is not None else None for data in storage.datalist] - Nframes = len(new) - if parallel.MPIenabled: - logger.info('Using MPI to gather arrays for storing %s' % kind) - for i in range(Nframes): - if parallel.master: - - # Root receives the data if it doesn't have it yet - if new[i] is None: - new[i] = parallel.receive() - logger.info('%s :: Frame %d/%d received at process %d' % ( - kind.upper(), i, Nframes, parallel.rank), extra={'allprocesses': True}) - - parallel.barrier() - - else: - if new[i] is not None: - # Send data to root. - parallel.send(new[i]) - #logger.info('Process %d - Send frame %d of %s' % (parallel.rank,i,kind), extra={'allprocesses':True}) - sender = parallel.rank - logger.info('%s :: Frame %d/%d send from process %d' % ( - kind.upper(), i, Nframes, parallel.rank), extra={'allprocesses': True}) - - parallel.barrier() - - parallel.barrier() - - # storing as arrays - if parallel.master: - key = 'data' if kind == 'diff' else kind - dct[key] = np.asarray(new) - - # save if you are master - if parallel.master: - - # get meta data - meta = self.scans[label]['meta'] - # update with geometric info - meta.update(scan.pars.geometry.copy()) - - # translate to scan_info and ditch variables not in data.DEFAULT_scan_info - from data import MT as LeTraducteur - - dct['scan_info'] = LeTraducteur.as_scan_info(self.scans[label]['meta']) - - # overwrite filename - if filename is not None: - dct['scan_info']['data_filename'] = filename - - filename = dct['scan_info'].get('data_filename') - - # add other kwargs to scan_info - dct['scan_info'].update(kwargs) - dct['scan_info']['shape'] = dct['data'].shape - - # switch data type for data if wanted (saves space) - if dtype is not None: - dct['data'] = dct['data'].astype(dtype) - - if save: - # cropping - from .. import io - - filename = u.clean_path(filename) - logger.info('Saving to ' + filename) - io.h5write(filename, dct) - logger.info('Saved') - return filename - - return dct - diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py index 771008fc1..803978b5f 100644 --- a/ptypy/core/ptycho.py +++ b/ptypy/core/ptycho.py @@ -293,13 +293,9 @@ def init_data(self, print_stats=True): """ Called on __init__ if ``level>=2``. - Creates a datasource and calls for :py:meth:`ModelManager.new_data()` + Call :py:meth:`ModelManager.new_data()` Prints statistics on the ptypy structure if ``print_stats=True`` """ - # Create the data source object, which give diffraction frames one - # at a time, supporting MPI sharing. - self.datasource = self.modelm.make_datasource() - # Load the data. This call creates automatically the scan managers, # which create the views and the PODs. self.modelm.new_data() @@ -309,7 +305,7 @@ def init_data(self, print_stats=True): if print_stats: self.print_stats() - # create plotting instance (maybe) + # TODO: create plotting instance here (maybe) def _init_engines(self): """ From 4e88a1b4fd7ca2f078a4dec01d1ceaf017bfdb94 Mon Sep 17 00:00:00 2001 From: Pierre Thibault Date: Wed, 7 Oct 2015 21:11:18 +0100 Subject: [PATCH 003/363] Bugfixes --- ptypy/core/data.py | 2 +- ptypy/core/manager.py | 26 +++++++++++++++++++------- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/ptypy/core/data.py b/ptypy/core/data.py index b7096dd86..d5d1e2377 100644 --- a/ptypy/core/data.py +++ b/ptypy/core/data.py @@ -1230,7 +1230,7 @@ def makePtyScan(scan_pars, scanmodel=None): """ # Extract information on the type of object to build pars = scan_pars.data - label = pars.label + # label = pars.label source = pars.source recipe = pars.get('recipe', {}) diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py index 18ebb6f25..c8b21477a 100644 --- a/ptypy/core/manager.py +++ b/ptypy/core/manager.py @@ -105,12 +105,23 @@ def __init__(self, ptycho=None, specific_pars=None, generic_pars=None, label=Non self.label = label self.ptycho = ptycho + # Manage stand-alone cases + if self.ptycho is None: + self.Cdiff = Container(ptycho=self, ID='Cdiff', data_type='real') + self.Cmask = Container(ptycho=self, ID='Cmask', data_type='bool') + self.CType = CType + self.FType = FType + else: + self.Cdiff = ptycho.diff + self.Cmask = ptycho.mask + # Create Associated PtyScan object self.ptyscan = data.makePtyScan(self.p) # Initialize instance attributes self.mask = None self.diff = None + self.positions = [] self.mask_views = [] self.diff_views = [] self.new_positions = None @@ -187,25 +198,25 @@ def new_data(self): # Storage generation if not already existing if self.diff is None: # This scan is brand new so we create storages for it - self.diff = self.ptycho.diff.new_storage(shape=(1, sh[-2], sh[-1]), psize=self.psize, padonly=True, + self.diff = self.Cdiff.new_storage(shape=(1, sh[-2], sh[-1]), psize=self.psize, padonly=True, layermap=None) old_diff_views = [] old_diff_layers = [] else: # ok storage exists already. Views most likely also. Let's do some analysis and deactivate the old views - old_diff_views = self.ptycho.diff.views_in_storage(self.diff, active=False) + old_diff_views = self.Cdiff.views_in_storage(self.diff, active=False) old_diff_layers = [] for v in old_diff_views: old_diff_layers.append(v.layer) # Same for mask if self.mask is None: - self.mask = self.ptycho.mask.new_storage(shape=(1, sh[-2], sh[-1]), psize=self.psize, padonly=True, + self.mask = self.Cmask.new_storage(shape=(1, sh[-2], sh[-1]), psize=self.psize, padonly=True, layermap=None) old_mask_views = [] old_mask_layers = [] else: - old_mask_views = self.ptycho.mask.views_in_storage(self.mask, active=False) + old_mask_views = self.Cmask.views_in_storage(self.mask, active=False) old_mask_layers = [] for v in old_mask_views: old_mask_layers.append(v.layer) @@ -251,7 +262,7 @@ def new_data(self): 'Diff view with layer/index %s of scan %s exists. \nSetting view active state from %s to %s' % ( index, label, old_active, active)) except ValueError: - v = View(self.ptycho.diff, accessrule=AR_diff) + v = View(self.Cdiff, accessrule=AR_diff) diff_views.append(v) logger.debug( 'Diff view with layer/index %s of scan %s does not exist. \nCreating view with ID %s and set active state to %s' % ( @@ -263,7 +274,7 @@ def new_data(self): old_view = old_mask_views[old_mask_layers.index(index)] old_view.active = active except ValueError: - v = View(self.ptycho.mask, accessrule=AR_mask) + v = View(self.Cmask, accessrule=AR_mask) mask_views.append(v) # so now we should have the right views to this storages. Let them reformat() @@ -274,7 +285,7 @@ def new_data(self): # Second pass: copy the data for dct in dp['iterable']: parallel.barrier() - if not dct['active']: + if dct['data'] is None: continue diff_data = dct['data'] idx = dct['index'] @@ -289,6 +300,7 @@ def new_data(self): self.new_positions = positions self.new_diff_views = diff_views self.new_mask_views = mask_views + self.positions += positions self.diff_views += diff_views self.mask_views += mask_views From 961310ed33bf7d0817992774b52c80d745794e1a Mon Sep 17 00:00:00 2001 From: Pierre Thibault Date: Thu, 8 Oct 2015 09:50:59 +0100 Subject: [PATCH 004/363] A bit of documentation and small bug fix in makePtyScan --- ptypy/core/data.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/ptypy/core/data.py b/ptypy/core/data.py index d5d1e2377..22c91cf09 100644 --- a/ptypy/core/data.py +++ b/ptypy/core/data.py @@ -1225,12 +1225,20 @@ def load(self, indices): def makePtyScan(scan_pars, scanmodel=None): """ - Factory for PtyScan object. Return a PtyScan instance (or subclass) according to the - input parameters (see ...). + Factory for PtyScan object. Return an instance of the appropriate PtyScan subclass based on the + input parameters. + + Parameters + ---------- + scan_pars: dict or Param + Input parameters according to :py:data:`.scan.data`. + + scanmodel: ScanModel object + FIXME: This seems to be needed for simulations but broken for now. """ + # Extract information on the type of object to build pars = scan_pars.data - # label = pars.label source = pars.source recipe = pars.get('recipe', {}) @@ -1239,7 +1247,7 @@ def makePtyScan(scan_pars, scanmodel=None): if source in PtyScanTypes: ps_obj = PtyScanTypes[source] - logger.info('Scan %s will be prepared with the recipe "%s"' % (label, source)) + logger.info('Scan will be prepared with the recipe "%s"' % source) ps_instance = ps_obj(pars, recipe=recipe) elif source.endswith('.ptyd') or source.endswith('.pty') or str(source) == 'file': ps_instance = PtydScan(pars, source=source) @@ -1247,14 +1255,14 @@ def makePtyScan(scan_pars, scanmodel=None): ps_instance = MoonFlowerScan(pars) elif source == 'sim': from ..simulations import SimScan - logger.info('Scan %s will simulated' % label) + logger.info('Scan will simulated') ps_instance = SimScan(pars, scanmodel) elif source == 'empty' or source is None: pars.recipe = None - logger.warning('Generating dummy PtyScan for scan `%s` - This label will source only zeros as data' % label) + logger.warning('Generating dummy PtyScan - This label will source only zeros as data') ps_instance = PtyScan(pars) else: - raise RuntimeError('Could not manage source "%s" for scan `%s`' % (str(source), label)) + raise RuntimeError('Could not manage source "%s"' % str(source)) return ps_instance From 41fdc5342663fbd9ec435c084bf4dfe6f051181f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Wed, 11 Jan 2017 14:41:05 +0100 Subject: [PATCH 005/363] Generalized Container, Storage, and View to 3d. Normal operations (as in the ptypyclasses.py tutorial) work, and the 2d case behaves like before --- ptypy/core/classes.py | 226 ++++++++++++++++++++++++++++-------------- ptypy/utils/misc.py | 10 +- 2 files changed, 161 insertions(+), 75 deletions(-) diff --git a/ptypy/core/classes.py b/ptypy/core/classes.py index 286d3c5b2..8e7a9c820 100644 --- a/ptypy/core/classes.py +++ b/ptypy/core/classes.py @@ -51,7 +51,7 @@ DEFAULT_PSIZE = 1. # Default shape -DEFAULT_SHAPE = (1, 1, 1) +DEFAULT_SHAPE = {2: (1, 1, 1), 3: (1, 1, 1, 1)} # Expected structure for Views initialization. DEFAULT_ACCESSRULE = u.Param( @@ -334,22 +334,28 @@ def __init__(self, container, ID=None, data=None, shape=(1, 1, 1), fill=0., """ super(Storage, self).__init__(container, ID) + #: Data dimensionality + self.ndim = container.ndim + #: Default fill value self.fill_value = fill if fill is not None else 0. # For documentation - #: Three dimensional array as data buffer + #: Three/four dimensional array as data buffer self.data = None if shape is None: - shape = DEFAULT_SHAPE + shape = DEFAULT_SHAPE[self.ndim] elif np.isscalar(shape): - shape = (1, int(shape), int(shape)) - elif len(shape) == 2: + shape = (1,) + (int(shape),) * self.ndim + elif len(shape) == self.ndim: shape = (1,) + tuple(shape) - elif len(shape) != 3: - raise ValueError( - '`shape` must be None or scalar or 2-tuple or 3-tuple of int') + elif len(shape) != self.ndim + 1: + if self.ndim == 2: + msg = 'For `data_dims`=2, `shape` must be None or scalar or 2-tuple or 3-tuple of int' + elif self.ndim == 3: + 'For `data_dims`=3, `shape` must be None or scalar or 3-tuple or 4-tuple of int' + raise ValueError(msg) # Set data buffer if data is None: @@ -360,11 +366,13 @@ def __init__(self, container, ID=None, data=None, shape=(1, 1, 1), fill=0., else: # Set initial buffer. Casting the type makes a copy data = np.asarray(data).astype(self.dtype) - if data.ndim < 2 or data.ndim > 3: - raise ValueError( - 'Initial buffer must be 2D or 3D, this one is %dD.' - % data.ndim) - elif data.ndim == 2: + if data.ndim < self.ndim or data.ndim > (self.ndim + 1): + if self.ndim == 2: + msg = 'For `data_dims` = 2, initial buffer must be 2D or 3D, this one is %dD.' + elif self.ndim == 3: + msg = 'For `data_dims` = 3, initial buffer must be 3D or 4D, this one is %dD.' + raise ValueError(msg % data.ndim) + elif data.ndim == self.ndim: self.data = data.reshape((1,) + data.shape) else: self.data = data @@ -379,7 +387,7 @@ def __init__(self, container, ID=None, data=None, shape=(1, 1, 1), fill=0., # Need to bootstrap the parameters. # We set the initial center to the middle of the array - self._center = u.expect2(self.shape[-2:]) // 2 + self._center = u.expectN(self.shape[-self.ndim:], self.ndim) // 2 # Set pixel size (in physical units) self.psize = psize if psize is not None else DEFAULT_PSIZE @@ -501,12 +509,11 @@ def fill(self, fill=None): self.data.fill(fill) elif type(fill) is np.ndarray: # Replace the buffer - if fill.ndim < 2 or fill.ndim > 3: - raise ValueError('Numpy ndarray fill must be 2D or 3D.') - elif fill.ndim == 2: - fill = np.resize(fill, (self.shape[0], - fill.shape[0], - fill.shape[1])) + if fill.ndim < self.ndim or fill.ndim > (self.ndim + 1): + raise ValueError( + 'For N-dimensional data, numpy ndarray fill must be ND or (N+1)D.') + elif fill.ndim == self.ndim: + fill = np.resize(fill, (self.shape[0],) + fill.shape) self.data = fill.astype(self.dtype) self.shape = self.data.shape @@ -540,7 +547,7 @@ def update_views(self, v=None): # v.shape can be None upon initialization - this means "full frame" if v.shape is None: - v.shape = u.expect2(self.shape[-2:]) + v.shape = u.expectN(self.shape[-self.ndim:], self.ndim) v.pcoord = v.shape / 2. v.coord = self._to_phys(v.pcoord) else: @@ -551,8 +558,8 @@ def update_views(self, v=None): v.dcoord = np.round(v.pcoord).astype(int) # These are the important attributes used when accessing the data - v.dlow = v.dcoord - v.shape/2 - v.dhigh = v.dcoord + (v.shape + 1)/2 + v.dlow = v.dcoord - v.shape / 2 + v.dhigh = v.dcoord + (v.shape + 1) / 2 # v.roi = np.array([pix - v.shape/2, pix + (v.shape + 1)/2]) v.sp = v.pcoord - v.dcoord @@ -597,6 +604,7 @@ def reformat(self, newID=None): # Loop through all active views to get individual boundaries rows = [] cols = [] + aisles = [] layers = [] for v in views: if not v.active: @@ -608,6 +616,8 @@ def reformat(self, newID=None): # cols += [v.roi[0, 1], v.roi[1, 1]] rows += [v.dlow[0], v.dhigh[0]] cols += [v.dlow[1], v.dhigh[1]] + if self.ndim == 3: + aisles += [v.dlow[2], v.dhigh[2]] # Gather a (unique) list of layers if v.layer not in layers: @@ -615,13 +625,22 @@ def reformat(self, newID=None): sh = self.data.shape - # Compute 2d misfit (distance between the buffer boundaries and the + # Compute Nd misfit (distance between the buffer boundaries and the # region required to fit all the views) - misfit = np.array([[-np.min(rows), np.max(rows) - sh[-2]], - [-np.min(cols), np.max(cols) - sh[-1]]]) - - logger.debug('%s[%s] :: misfit = [%s, %s]' - % (self.owner.ID, self.ID, misfit[0], misfit[1])) + if self.ndim == 2: + misfit = np.array([[-np.min(rows), np.max(rows) - sh[-2]], + [-np.min(cols), np.max(cols) - sh[-1]]]) + elif self.ndim == 3: + misfit = np.array([[-np.min(rows), np.max(rows) - sh[-3]], + [-np.min(cols), np.max(cols) - sh[-2]], + [-np.min(aisles), np.max(aisles) - sh[-1]]]) + + if self.ndim == 2: + logger.debug('%s[%s] :: misfit = [%s, %s]' + % (self.owner.ID, self.ID, misfit[0], misfit[1])) + elif self.ndim == 3: + logger.debug('%s[%s] :: misfit = [%s, %s, %s]' + % (self.owner.ID, self.ID, misfit[0], misfit[1], misfit[2])) posmisfit = (misfit > 0) negmisfit = (misfit < 0) @@ -630,10 +649,16 @@ def reformat(self, newID=None): (negmisfit.any() and not self.padonly)) if posmisfit.any() or negmisfit.any(): - logger.debug( - 'Storage %s of container %s has a misfit of [%s, %s] between ' - 'its data and its views' - % (str(self.ID), str(self.owner.ID), misfit[0], misfit[1])) + if self.ndim == 2: + logger.debug( + 'Storage %s of container %s has a misfit of [%s, %s] between ' + 'its data and its views' + % (str(self.ID), str(self.owner.ID), misfit[0], misfit[1])) + elif self.ndim == 3: + logger.debug( + 'Storage %s of container %s has a misfit of [%s, %s, %s] between ' + 'its data and its views' + % (str(self.ID), str(self.owner.ID), misfit[0], misfit[1], misfit[2])) if needtocrop_or_pad: if self.padonly: @@ -644,6 +669,8 @@ def reformat(self, newID=None): new_shape = (sh[0], sh[1] + misfit[0].sum(), sh[2] + misfit[1].sum()) + if self.ndim == 3: + new_shape += (sh[3] + misfit[2].sum(),) logger.debug('%s[%s] :: center: %s -> %s' % (self.owner.ID, self.ID, str(self.center), str(new_center))) @@ -680,7 +707,7 @@ def reformat(self, newID=None): d = new_data[self.layermap.index(i)] else: # A new layer - d = np.empty(new_shape[-2:], self.dtype) + d = np.empty(new_shape[-self.ndim:], self.dtype) d.fill(self.fill_value) relaid_data.append(d) new_data = np.array(relaid_data) @@ -751,7 +778,7 @@ def psize(self, v): """ Set the pixel size, and update all the internal variables. """ - self._psize = u.expect2(v) + self._psize = u.expectN(v, self.ndim) self._origin = - self._center * self._psize self.update() @@ -767,7 +794,7 @@ def origin(self, v): """ Set the origin and update all the internal variables. """ - self._origin = u.expect2(v) + self._origin = u.expectN(v, self.ndim) self._center = - self._origin / self._psize self.update() @@ -784,7 +811,7 @@ def center(self, v): """ Set the center and update all the internal variables. """ - self._center = u.expect2(v) + self._center = u.expectN(v, self.ndim) self._origin = - self._center * self._psize self.update() @@ -826,7 +853,7 @@ def zoom_to_psize(self, new_psize, **kwargs): new_psize : scalar or array_like new pixel size """ - new_psize = u.expect2(new_psize) + new_psize = u.expectN(new_psize, self.ndim) sh = np.asarray(self.shape[-2:]) # psize is quantized new_sh = np.round(self.psize / new_psize * sh) @@ -859,11 +886,16 @@ def grids(self): grids in the shape of internal buffer """ sh = self.data.shape - nm = np.indices(sh)[-2:] - flat = nm.reshape((2, self.data.size)) + nm = np.indices(sh)[-self.ndim:] + flat = nm.reshape((self.ndim, self.data.size)) c = self._to_phys(flat.T).T - c = c.reshape((2,) + sh) - return c[0], c[1] + c = c.reshape((self.ndim,) + sh) + if self.ndim == 2: + return c[0], c[1] + elif self.ndim == 3: + return c[0], c[1], c[2] + else: + raise ValueError('Dimensionality corrupted, not 2 or 3') def get_view_coverage(self): """ @@ -949,16 +981,26 @@ def formatted_report(self, table_format=None, offset=8, align='right', for key, column in fr.table: if str(key) == 'shape': dct[key] = tuple(self.data.shape) - info = '%d * %d * %d' % dct[key] + info = ('%d' + ' * %d' * self.ndim) % dct[key] elif str(key) == 'psize': dct[key] = tuple(self.psize) - info = '%.2e * %.2e' % tuple(dct[key]) - info = info.split('e', 1)[0] + info.split('e', 1)[1][3:] + info = ('%.2e' + ' * %.2e'*(self.ndim - 1)) % tuple(dct[key]) + if self.ndim == 2: + info = info.split('e', 1)[0] + info.split('e', 1)[1][3:] + elif self.ndim == 3: + info = info.split('e', 2)[0] + info.split('e', 2)[1][3:] + info.split('e', 2)[2][3:] elif str(key) == 'dimension': - dct[key] = (self.psize[0] * self.data.shape[-2], - self.psize[1] * self.data.shape[-1]) - info = '%.2e*%.2e' % tuple(dct[key]) - info = info.split('e', 1)[0] + info.split('e', 1)[1][3:] + if self.ndim == 2: + dct[key] = (self.psize[0] * self.data.shape[-2], + self.psize[1] * self.data.shape[-1]) + info = '%.2e*%.2e' % tuple(dct[key]) + info = info.split('e', 1)[0] + info.split('e', 1)[1][3:] + elif self.ndim == 3: + dct[key] = (self.psize[0] * self.data.shape[-3], + self.psize[1] * self.data.shape[-2], + self.psize[2] * self.data.shape[-1]) + info = '%.2e*%.2e*%.2e' % tuple(dct[key]) + info = info.split('e', 1)[0] + info.split('e', 2)[1][3:] + info.split('e', 2)[2][3:] elif str(key) == 'memory': dct[key] = float(self.data.nbytes) / 1e6 info = '%.1f' % dct[key] @@ -1001,9 +1043,17 @@ def __getitem__(self, v): # return shift(self.data[v.slayer, v.roi[0, 0]:v.roi[1, 0], # v.roi[0, 1]:v.roi[1, 1]], v.sp) if isinstance(v, View): - return shift(self.data[ - v.dlayer, v.dlow[0]:v.dhigh[0], v.dlow[1]:v.dhigh[1]], - v.sp) + if not self.ndim == v.ndim: + raise ValueError( + 'Storage and View instances have confliction data dimensions') + if self.ndim == 2: + return shift(self.data[ + v.dlayer, v.dlow[0]:v.dhigh[0], v.dlow[1]:v.dhigh[1]], + v.sp) + elif self.ndim == 3: + return shift(self.data[ + v.dlayer, v.dlow[0]:v.dhigh[0], v.dlow[1]:v.dhigh[1], v.dlow[2]:v.dhigh[2]], + v.sp) elif v in self.layermap: return self.data[self.layermap.index(v)] else: @@ -1032,8 +1082,16 @@ def __setitem__(self, v, newdata): # self.data[v.slayer, v.roi[0, 0]:v.roi[1, 0], # v.roi[0, 1]:v.roi[1, 1]] = shift(newdata, -v.sp) if isinstance(v, View): - self.data[v.dlayer, v.dlow[0]:v.dhigh[0], v.dlow[1]:v.dhigh[1]] = ( - shift(newdata, -v.sp)) + if not self.ndim == v.ndim: + raise ValueError( + 'Storage and View instances have confliction data dimensions') + if self.ndim == 2: + self.data[v.dlayer, v.dlow[0]:v.dhigh[0], + v.dlow[1]:v.dhigh[1]] = (shift(newdata, -v.sp)) + elif self.ndim == 3: + self.data[v.dlayer, v.dlow[0]:v.dhigh[0], + v.dlow[1]:v.dhigh[1], v.dlow[2]:v.dhigh[2] + ] = (shift(newdata, -v.sp)) elif v in self.layermap: self.data[self.layermap.index(v)] = newdata else: @@ -1112,11 +1170,11 @@ def __init__(self, container, ID=None, accessrule=None, **kwargs): psize : float or tuple of float Pixel size [meters]. Required for storage initialization, See :py:data:`DEFAULT_PSIZE` - + layer : int Index of the third dimension if applicable. (*default* is ``0``) - + active : bool Whether this view is active (*default* is ``True``) """ @@ -1129,6 +1187,9 @@ def __init__(self, container, ID=None, accessrule=None, **kwargs): """ Volatile dictionary for all :any:`POD`\ s that connect to this view """ + #: Data dimensionality + self.ndim = container.ndim + # A single pod lookup (weak reference). self._pod = None @@ -1144,8 +1205,8 @@ def __init__(self, container, ID=None, accessrule=None, **kwargs): to a :any:`Container`.""" # Numpy buffer arrays - self._arint = np.zeros((4, 2), dtype=np.int) - self._arfloat = np.zeros((4, 2), dtype=np.float) + self._arint = np.zeros((4, self.ndim), dtype=np.int) + self._arfloat = np.zeros((4, self.ndim), dtype=np.float) #: The "layer" i.e. first axis index in Storage data buffer self.dlayer = 0 @@ -1185,7 +1246,8 @@ def _set(self, accessrule, **kwargs): if s is None: s = self.owner.new_storage(ID=self.storageID, psize=rule.psize, - shape=self.shape) + shape=self.shape, + data_dims=len(self.shape) - 1) self.storage = s if (self.psize is not None @@ -1219,9 +1281,17 @@ def slice(self): # else: # slayer = self.storage.layermap.index(self.layer) - return (self.dlayer, - slice(self.dlow[0], self.dhigh[0]), - slice(self.dlow[1], self.dhigh[1])) + if self.ndim == 2: + return (self.dlayer, + slice(self.dlow[0], self.dhigh[0]), + slice(self.dlow[1], self.dhigh[1])) + elif self.ndim == 3: + return (self.dlayer, + slice(self.dlow[0], self.dhigh[0]), + slice(self.dlow[1], self.dhigh[1]), + slice(self.dlow[2], self.dhigh[2])) + else: + raise ValueError('Dimensionality corrupted, not 2 or 3') @property def pod(self): @@ -1250,19 +1320,19 @@ def data(self, v): @property def shape(self): """ - Two dimensional shape of View. + Two/three dimensional shape of View. """ return self._arint[0] if (self._arint[0] > 0).all() else None @shape.setter def shape(self, v): """ - Set two dimensional shape of View. + Set two/three dimensional shape of View. """ if v is None: - self._arint[0] = u.expect2(0) + self._arint[0] = u.expectN(0, self.ndim) else: - self._arint[0] = u.expect2(v) + self._arint[0] = u.expectN(v, self.ndim) @property def dlow(self): @@ -1319,9 +1389,9 @@ def psize(self, v): Set pixel size """ if v is None: - self._arfloat[0] = u.expect2(0.) + self._arfloat[0] = u.expectN(0., self.ndim) else: - self._arfloat[0] = u.expect2(v) + self._arfloat[0] = u.expectN(v, self.ndim) @property def coord(self): @@ -1336,9 +1406,9 @@ def coord(self, v): Set the View's physical coordinate (meters) """ if v is None: - self._arfloat[1] = u.expect2(0.) + self._arfloat[1] = u.expectN(0., self.ndim) elif type(v) is not np.ndarray: - self._arfloat[1] = u.expect2(v) + self._arfloat[1] = u.expectN(v, self.ndim) else: self._arfloat[1] = v @@ -1357,9 +1427,9 @@ def sp(self, v): and data coordinate. """ if v is None: - self._arfloat[2] = u.expect2(0.) + self._arfloat[2] = u.expectN(0., self.ndim) elif type(v) is not np.ndarray: - self._arfloat[2] = u.expect2(v) + self._arfloat[2] = u.expectN(v, self.ndim) else: self._arfloat[2] = v @@ -1402,7 +1472,7 @@ class Container(Base): """ _PREFIX = CONTAINER_PREFIX - def __init__(self, ptycho=None, ID=None, data_type='complex', **kwargs): + def __init__(self, ptycho=None, ID=None, data_type='complex', data_dims=2, **kwargs): """ Parameters ---------- @@ -1415,7 +1485,10 @@ def __init__(self, ptycho=None, ID=None, data_type='complex', **kwargs): data_type : str or numpy.dtype data type - either a numpy.dtype object or 'complex' or 'real' (precision is taken from ptycho.FType or ptycho.CType) - + + data_dims : int + The dimensionality of the stored data, either 2 or 3. + """ # if ptycho is None: # ptycho = ptypy.currentPtycho @@ -1426,6 +1499,11 @@ def __init__(self, ptycho=None, ID=None, data_type='complex', **kwargs): # def _initialize(self, original=None, data_type='complex'): + #: Data dimensionality + self.ndim = data_dims + if self.ndim not in (2, 3): + raise ValueError('`data_dim` must be 2 or 3') + self.data_type = data_type # Prepare for copy diff --git a/ptypy/utils/misc.py b/ptypy/utils/misc.py index 4f54c7d0f..0be509c1a 100644 --- a/ptypy/utils/misc.py +++ b/ptypy/utils/misc.py @@ -12,7 +12,7 @@ from functools import wraps __all__ = ['str2int','str2range',\ - 'complex_overload','expect2','expect3',\ + 'complex_overload','expect2','expect3','expectN',\ 'keV2m','keV2nm','nm2keV', 'clean_path','unique_path'] def str2index(s): @@ -151,6 +151,14 @@ def expect3(a): else: b=np.array([a.flat[0],a.flat[1],a.flat[2]]) return b + +def expectN(a, N): + if N==2: + return expect2(a) + elif N==3: + return expect3(a) + else: + raise ValueError('N must be 2 or 3') def complex_overload(func): """\ From 5aa1c9681ded4cca8e9ce5fa01d72f27b14cd26b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Wed, 11 Jan 2017 15:03:09 +0100 Subject: [PATCH 006/363] tiny fix --- ptypy/core/classes.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ptypy/core/classes.py b/ptypy/core/classes.py index 8e7a9c820..e431b8c9f 100644 --- a/ptypy/core/classes.py +++ b/ptypy/core/classes.py @@ -1246,8 +1246,7 @@ def _set(self, accessrule, **kwargs): if s is None: s = self.owner.new_storage(ID=self.storageID, psize=rule.psize, - shape=self.shape, - data_dims=len(self.shape) - 1) + shape=self.shape) self.storage = s if (self.psize is not None From fb6d93dd65b77101db8934c6d202535cca04fe20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Fri, 13 Jan 2017 10:41:43 +0100 Subject: [PATCH 007/363] Small refactoring of Geo.__init__ to allow more general subclassing. --- ptypy/core/geometry.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/ptypy/core/geometry.py b/ptypy/core/geometry.py index 096939c49..1058dce52 100644 --- a/ptypy/core/geometry.py +++ b/ptypy/core/geometry.py @@ -100,10 +100,6 @@ def __init__(self, owner=None, ID=None, pars=None, **kwargs): if the key exists in `Geo.DEFAULT`. """ super(Geo, self).__init__(owner, ID) - # if len(kwargs)>0: - # self._initialize(**kwargs) - - # def _initialize(self, pars=None, **kwargs): # Starting parameters p = u.Param(DEFAULT) @@ -117,6 +113,13 @@ def __init__(self, owner=None, ID=None, pars=None, **kwargs): p[k] = v self.p = p + self._initialize(p) + + def _initialize(self, p): + """ + Parse input parameters, fill missing parameters and set up a + propagator. + """ self.interact = False # Set distance From 1338b0a710e3b4bb0a39700847ced8f3d8ec6f3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Fri, 13 Jan 2017 11:25:42 +0100 Subject: [PATCH 008/363] Geo now needs to have flexible defaults --- ptypy/core/geometry.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ptypy/core/geometry.py b/ptypy/core/geometry.py index 1058dce52..e6089efc2 100644 --- a/ptypy/core/geometry.py +++ b/ptypy/core/geometry.py @@ -83,7 +83,7 @@ class Geo(Base): _keV2m = 1.240597288e-09 _PREFIX = GEO_PREFIX - def __init__(self, owner=None, ID=None, pars=None, **kwargs): + def __init__(self, owner=None, ID=None, pars=None, default_override=None, **kwargs): """ Parameters ---------- @@ -102,6 +102,8 @@ def __init__(self, owner=None, ID=None, pars=None, **kwargs): super(Geo, self).__init__(owner, ID) # Starting parameters + if default_override is not None: + DEFAULT = default_override p = u.Param(DEFAULT) if pars is not None: p.update(pars) From 9f77b543d72a9e9164ce994ea7a3b17dbc9c3e12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Fri, 13 Jan 2017 13:45:19 +0100 Subject: [PATCH 009/363] tiny fix --- ptypy/core/geometry.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ptypy/core/geometry.py b/ptypy/core/geometry.py index e6089efc2..3b6605fb2 100644 --- a/ptypy/core/geometry.py +++ b/ptypy/core/geometry.py @@ -103,8 +103,9 @@ def __init__(self, owner=None, ID=None, pars=None, default_override=None, **kwar # Starting parameters if default_override is not None: - DEFAULT = default_override - p = u.Param(DEFAULT) + p = u.Param(default_override) + else: + p = u.Param(DEFAULT) if pars is not None: p.update(pars) for k, v in p.items(): From eb966a7ad23f35052e23cb6ce46c008bdc4cfd32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Tue, 24 Jan 2017 07:25:15 +0100 Subject: [PATCH 010/363] Added 3D bragg geometry class, complete with coordinate system conversions, extrusion of a tilted 2d probe, and propagation. --- ptypy/core/__init__.py | 1 + ptypy/core/geometry_bragg.py | 512 +++++++++++++++++++++++++++++++++++ 2 files changed, 513 insertions(+) create mode 100644 ptypy/core/geometry_bragg.py diff --git a/ptypy/core/__init__.py b/ptypy/core/__init__.py index e0a47adc4..db77bf48b 100644 --- a/ptypy/core/__init__.py +++ b/ptypy/core/__init__.py @@ -15,5 +15,6 @@ import illumination import sample import geometry +import geometry_bragg import save_load import data diff --git a/ptypy/core/geometry_bragg.py b/ptypy/core/geometry_bragg.py new file mode 100644 index 000000000..b5bbbabfc --- /dev/null +++ b/ptypy/core/geometry_bragg.py @@ -0,0 +1,512 @@ +""" +Geometry management and propagation for Bragg geometry. +""" + +from .. import utils as u +from ..utils.verbose import logger +from geometry import Geo as _Geo +from classes import Container, Storage, View +import numpy as np +from scipy.ndimage.interpolation import map_coordinates + +__all__ = ['DEFAULT', 'Geo_Bragg'] + +DEFAULT = u.Param( + # Incident photon energy (in keV) + energy=15.25, + # Wavelength (in meters) + lam=None, + # Distance from object to screen + distance=2.3, + # Pixel sizes (in meters) and rocking curve step (in degrees) + psize=(172e-6, 172e-6, .065), + # Bragg angle in degrees + theta_bragg=6.89, + # 3D sample pixel size (in meters) in the conjugate (natural) coordinate + # system + resolution=None, + # Number of detector pixels and rocking curve positions + shape=(128, 128, 31), +) + + +def _map_coordinates(input, coords, **kwargs): + if 'complex' in str(input.dtype): + if 'output' in kwargs.keys(): + raise NotImplementedError( + 'You''ll have to use map_coordinates directly') + r = map_coordinates(np.abs(input), coords, **kwargs) + t = map_coordinates(np.angle(input), coords, **kwargs) + return r * np.exp(1j * t) + else: + return map_coordinates(input, coords, **kwargs) + + +class Geo_Bragg(_Geo): + + DEFAULT = DEFAULT + + def __init__(self, owner=None, ID=None, pars=None, **kwargs): + super(Geo_Bragg, self).__init__( + owner, ID, pars, default_override=DEFAULT, **kwargs) + + def _initialize(self, p): + """ + Parse input parameters, fill missing parameters and set up a + propagator. + """ + self.interact = False + + # Set distance + if self.p.distance is None or self.p.distance == 0: + raise ValueError( + 'Distance (geometry.distance) must not be None or 0') + + # Set frame shape + if self.p.shape is None or (np.array(self.p.shape) == 0).any(): + raise ValueError( + 'Frame size (geometry.shape) must not be None or 0') + else: + self.p.shape = u.expect3(p.shape) + + # Set energy and wavelength + if p.energy is None: + if p.lam is None: + raise ValueError( + 'Wavelength (geometry.lam) and energy (geometry.energy)\n' + 'must not both be None') + else: + self.lam = p.lam # also sets energy through a property + else: + if p.lam is not None: + logger.debug('Energy and wavelength both specified. ' + 'Energy takes precedence over wavelength') + + self.energy = p.energy # also sets lam through a property + + # Pixel size + self.p.psize_is_fix = p.psize is not None + self.p.resolution_is_fix = p.resolution is not None + + if not self.p.psize_is_fix and not self.p.resolution_is_fix: + raise ValueError( + 'Pixel size in sample plane (geometry.resolution) and ' + 'detector plane \n(geometry.psize) must not both be None') + + # Fill pixel sizes + if self.p.resolution_is_fix: + self.p.resolution = u.expect3(p.resolution) + else: + self.p.resolution = u.expect3(1.0) + + if self.p.psize_is_fix: + self.p.psize = u.expect3(p.psize) + else: + self.p.psize = u.expect3(1.0) + + # Update other values + self.update(False) + + # Attach propagator + self._propagator = self._get_propagator() + self.interact = True + + def update(self, update_propagator=True): + """ + Update things which need a little computation + """ + + # Update the internal pixel sizes: 4 cases + if not self.p.resolution_is_fix and not self.p.psize_is_fix: + raise ValueError( + 'Neither pixel size nor sample resolution specified.') + elif not self.p.resolution_is_fix and self.p.psize_is_fix: + dq1 = self.psize[0] * 2 * np.pi / self.distance / self.lam + dq2 = self.psize[1] * 2 * np.pi / self.distance / self.lam + dq3 = np.deg2rad(self.psize[2]) * 4 * np.pi / self.lam * self.sintheta + self.p.resolution[0] = 2 * np.pi / \ + (self.shape[0] * dq1 * self.costheta) + self.p.resolution[1] = 2 * np.pi / (self.shape[1] * dq2) + self.p.resolution[2] = 2 * np.pi / \ + (self.shape[2] * dq3 * self.costheta) + elif self.p.resolution_is_fix and not self.p.psize_is_fix: + dq1 = 2 * np.pi / \ + (self.shape[0] * self.resolution[0] * self.costheta) + dq2 = 2 * np.pi / (self.shape[1] * self.resolution[1]) + dq3 = 2 * np.pi / \ + (self.shape[2] * self.resolution[2] * self.costheta) + self.p.psize[0] = dq1 * self.distance * self.lam / (2 * np.pi) + self.p.psize[1] = dq2 * self.distance * self.lam / (2 * np.pi) + self.p.psize[2] = np.rad2deg( + dq3 * self.lam / (4 * np.pi * self.sintheta)) + else: + raise ValueError( + 'Both pixel size and sample resolution specified.') + + # Establish transforms between coordinate systems + # ...from {z y x} to {r1 r2 r3} + self.A_r1r2r3 = [[1, 0, -self.sintheta / self.costheta], + [0, 1, 0], + [0, 0, 1 / self.costheta]] + # ...from {r1 r2 r3} to {z y x} + self.A_zyx = [[1, 0, self.sintheta], + [0, 1, 0], + [0, 0, self.costheta]] + # ...from {qz qy qx} to {q1 q2 q3} + self.A_q1q2q3 = [[1 / self.costheta, 0, 0], + [0, 1, 0], + [self.sintheta / self.costheta, 0, 1]] + # ...from {q1 q2 q3} to {qz qy qx} + self.A_qzqyqx = [[self.costheta, 0, 0], + [0, 1, 0], + [-self.sintheta, 0, 1]] + + # Update the propagator too + if update_propagator: + self.propagator.update() + + @property + def theta_bragg(self): + return self.p.theta_bragg + + @theta_bragg.setter + def theta_bragg(self, v): + self.p.theta_bragg = v + if self.interact: + self.update() + + @property + def sintheta(self): + return np.sin(np.deg2rad(self.theta_bragg)) + + @property + def costheta(self): + return np.cos(np.deg2rad(self.theta_bragg)) + + @property + def tantheta(self): + return np.tan(np.deg2rad(self.theta_bragg)) + + @property + def resolution(self): + return self.p.resolution + + @resolution.setter + def resolution(self, v): + self.p.resolution[:] = u.expect3(v) + if self.interact: + self.update() + + @property + def psize(self): + return self.p.psize + + @psize.setter + def psize(self, v): + self.p.psize[:] = u.expect3(v) + if self.interact: + self.update() + + @property + def shape(self): + return self.p.shape + + @shape.setter + def shape(self, v): + self.p.shape[:] = u.expect3(v).astype(int) + if self.interact: + self.update() + + def _get_propagator(self): + """ + The real space pixel size in the cartesian system. + """ + prop = BasicBragg3dPropagator(self) + return prop + + def _r1r2r3(self, p): + """ + Transforms a single point from [z y x] to [r1 r2 r3] + """ + return np.dot(self.A_r1r2r3, p) + + def _zyx(self, p): + """ + Transforms a single point from [r1 r2 r3] to [z y x] + """ + return np.dot(self.A_zyx, p) + + def _q1q2q3(self, p): + """ + Transforms a single point from [qz qy qx] to [q1 q2 q3] + """ + return np.dot(self.A_q1q2q3, p) + + def _qzqyqx(self, p): + """ + Transforms a single point from [q1 q2 q3] to [qz qy qx] + """ + return np.dot(self.A_qzqyqx, p) + + def transformed_grid(self, grids, input_space='real', input_system='natural'): + """ + + Transforms a coordinate grid between the cartesian and natural + coordinate systems in real or reciprocal space. + + Parameters + ---------- + grids : 3-tuple of 3-dimensional arrays: (z, y, x), + (r1, r2, r3), (qz, qy, qz), or (q1, q2, q3), + or a 3-dimensional Storage instance. + + input_space: `real` or `reciprocal` + + input_system: `cartesian` or `natural` + + """ + + if isinstance(grids, Storage): + grids = grids.grids() + + # choose transformation operator: 4 cases + if input_space == 'real' and input_system == 'natural': + r1, r2, r3 = grids + z = r1 + self.sintheta * r3 + y = r2 + x = self.costheta * r3 + return z, y, x + elif input_space == 'real' and input_system == 'cartesian': + z, y, x = grids + r1 = z - self.sintheta / self.costheta * x + r2 = y + r3 = 1 / self.costheta * x + return r1, r2, r3 + elif input_space == 'reciprocal' and input_system == 'natural': + q1, q2, q3 = grids + qz = self.costheta * q1 + qy = q2 + qx = q3 - self.sintheta * q1 + return qz, qy, qx + elif input_space == 'reciprocal' and input_system == 'cartesian': + qz, qy, qz = grids + q1 = 1 / self.costheta * qz + q2 = qy + q3 = qx + self.sintheta / self.costheta * qz + return q1, q2, q3 + else: + raise ValueError('invalid options') + + + def coordinate_shift(self, input_storage, input_space='real', + input_system='natural', keep_dims=True, + layer=0): + """ + Transforms a 3D storage between the cartesian and natural + coordinate systems in real or reciprocal space by simply rolling + the axes. It tries to do this symmetrically so that the center + is maintained. + + Note that this transform can be done in any way, and always + involves the choice of a new grid. This method (arbitrarily) + chooses the grid which results from skewing the along the + z/qx direction for real/reciprocal space. + + The shifting is identical to doing a nearest neighbor + interpolation, and it would not be difficult to use other + interpolation orders by instead shifting an index array and + using scipy.ndimage.interpolation.map_coordinates(). But then + you have to decide how to interpolate complex numbers. + + Parameters + ---------- + input_storage : The storage to operate on + + input_space: `real` or `reciprocal` + + input_system: `cartesian` or `natural` + + keep_dims : If True, maintain pixel size and number of pixels. + If False, keeps all the data of the input storage, which means + that the shape of the output storage will be larger than the + input. + + """ + + C_ = Container(data_type=input_storage.dtype, data_dims=3) + S = input_storage + + # Four cases. In real and reciprocal space, these skewing + # operations are done along different axes. For each space, the + # direction of the transform is taken care of. + + if input_space == 'real': + # create a bottom-padded copy of the data array + shape = S.shape[1:] + pad = int(np.ceil(self.sintheta * + shape[-1] * S.psize[-1] / S.psize[0])) + d = np.pad(S.data[layer], pad_width=( + (0, pad), (0, 0), (0, 0)), mode='constant') + # walk along the r3/x axis and roll the r1/z axis. the + # array is padded at the bottom (high indices) so the + # actual shifts have to be positive. + for i in range(shape[-1]): + if input_system == 'cartesian': + # roll the z axis in the negative direction for more + # positive x + shift = int( + round((shape[-1] - i) * S.psize[-1] * self.sintheta / S.psize[0])) + elif input_system == 'natural': + # roll the r1 axis in the positive direction for more + # positive r3 + shift = int( + round(i * S.psize[-1] * self.sintheta / S.psize[0])) + d[:, :, i] = np.roll(d[:, :, i], shift, axis=0) + + # optionally crop the new array + if keep_dims: + d = d[pad / 2:shape[0] + pad / 2, :, :] + # construct a new Storage + if input_system == 'cartesian': + new_psize = S.psize * np.array([1, 1, 1 / self.costheta]) + elif input_system == 'natural': + new_psize = S.psize * np.array([1, 1, self.costheta]) + old_center = S.origin + S.psize * shape / 2 + S_out = C_.new_storage(ID='S0', psize=new_psize, padonly=False) + V = View(container=C_, storageID='S0', coord=old_center, + shape=d.shape, psize=new_psize) + S_out.reformat() + # should use the view here, but there is a bug (#74) + S_out.data[0] = d + + elif input_space == 'reciprocal': + # create a padded copy of the data array + shape = S.shape[1:] + pad = int(np.ceil(self.sintheta * shape[0])) + d = np.pad(S.data[layer], pad_width=( + (0, 0), (0, 0), (0, pad)), mode='constant') + # walk along the q1/qz axis and roll the q3/qx axis. the + # array is padded at the right (high indices) so the + # actual shifts have to be positive. + for i in range(shape[0]): + if input_system == 'cartesian': + # roll the qx axis in the positive direction for more + # positive qz + shift = int(round(i * self.sintheta)) + elif input_system == 'natural': + # roll the q3 axis in the positive direction for more + # negative q1 + shift = int(round((shape[0] - i) * self.sintheta)) + d[i, :, :] = np.roll(d[i, :, :], shift, axis=2) + # optionally crop the new array + if keep_dims: + d = d[:, :, pad / 2:shape[0] + pad / 2] + # construct a new Storage + if input_system == 'cartesian': + new_psize = S.psize * np.array([1 / self.costheta, 1, 1]) + elif input_system == 'natural': + new_psize = S.psize * np.array([self.costheta, 1, 1]) + old_center = S.origin + S.psize * shape / 2 + S_out = C_.new_storage(ID='S0', psize=new_psize, padonly=False) + V = View(container=C_, storageID='S0', coord=old_center, + shape=d.shape, psize=new_psize) + S_out.reformat() + # should use the view here, but there is a bug (#74) + S_out.data[0] = d + + return S_out + + def prepare_3d_probe(self, S_2d, auto_center=False, system='cartesian', layer=0): + """ + + Prepare a three-dimensional probe from a two-dimensional incident wavefront. + + Parameters + ---------- + S_2d : Two-dimensional storage holding a (typically complex) + wavefront. The absolute positions are not important, only the + pixel sizes. The first index is interpreted as the quasi- + vertical axis zi (coincident with z at theta=0), and the second + index as yi (coincident always with y and r2). + + center : If true, the input wavefront is centered at the + intensity center of mass. + + system : `cartesian` or `natural`, the coordinate system of the + returned object. + + layer : which layer of the 2d probe to use + + """ + + if auto_center: + raise NotImplementedError + + # storage in natural space to fill with the probe + C = Container(data_type=S_2d.dtype, data_dims=3) + if system == 'natural': + View(C, storageID='S0000', psize=self.resolution, shape=self.shape) + S_3d = C.storages['S0000'] + elif system == 'cartesian': + View(C, storageID='S0000', psize=self.resolution * np.array([1, 1, self.costheta]), shape=self.shape) + S_3d = C.storages['S0000'] + + # center both storages (meaning that the central pixel is the + # physical origin) + S_3d.center = np.array(S_3d.shape[1:]) / 2 + S_2d.center = np.array(S_2d.shape[1:]) / 2 + + # find the physical coordinates (zi, yi) of each point in the 3d probe + if system == 'natural': + r1, r2, r3 = S_3d.grids() + r1, r2, r3 = r1[0], r2[0], r3[0] # layer 0 + z, y, x = self.transformed_grid((r1, r2, r3), input_space='real', input_system='natural') + zi = x * self.sintheta + z * (1/self.costheta - self.sintheta * self.tantheta) + yi = y + elif system == 'cartesian': + z, y, x = S_3d.grids() + z, y, x = z[0], y[0], x[0] + zi = x * self.sintheta + z * (1/self.costheta - self.sintheta * self.tantheta) + yi = y + + # find the corresponding indices into S.data[layer] + zi[:] = zi / S_2d.psize[0] + S_2d.center[0] + yi[:] = yi / S_2d.psize[0] + S_2d.center[0] + + # interpolate + S_3d.data[0][:] = map_coordinates(S_2d.data[layer], (zi, yi)) + + #import ipdb; ipdb.set_trace() + + return S_3d + + +class BasicBragg3dPropagator(object): + """ + Just a wrapper for the n-dimensional FFT, no other Bragg-specific + magic applied here (at the moment). + """ + + DEFAULT = DEFAULT + + def __init__(self, geo=None, ffttype='numpy'): + self.geo = geo + if ffttype == 'numpy': + self.fft = np.fft.fftn + self.ifft = np.fft.ifftn + elif ffttype == 'fftw': + import pyfftw + self.fft = pyfftw.interfaces.numpy_fft.fftn + self.ifft = pyfftw.interfaces.numpy_fft.ifftn + + def update(self): + """ + Update any internal buffers. + """ + return + + def fw(self, a): + return np.fft.fftshift(self.fft(a)) + + def bw(self, a): + return self.ifft(np.fft.ifftshift(a)) From c48c7e18205ab78efa6da1a0da569f56d0102484 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Fri, 27 Jan 2017 17:51:13 +0100 Subject: [PATCH 011/363] Added an example usage script for the Bragg classes --- tutorial/bragg3d_initial.py | 177 ++++++++++++++++++++++++++++++++++++ 1 file changed, 177 insertions(+) create mode 100644 tutorial/bragg3d_initial.py diff --git a/tutorial/bragg3d_initial.py b/tutorial/bragg3d_initial.py new file mode 100644 index 000000000..f843fb0df --- /dev/null +++ b/tutorial/bragg3d_initial.py @@ -0,0 +1,177 @@ +# This script is a demonstration of how the new container and geometry +# classes can be used to do 3d Bragg ptychography. Everything is done +# manually on the View and Geo_Bragg instances, no PtyScan objects or +# model managers or pods are involved yet. + +# I've taken the sample and geometry from the numerical experiment in +# Berenguer et al., PRB 2013. Unlike that example, the probe here is +# just a flat rectangle. + +# The imports. I've found it useful to visualize 3d data in mayavi, but +# here we just look at projections with matplotlib. +import ptypy +import numpy as np +import matplotlib.pyplot as plt + +# Set up a 3D geometry and a scan +# ------------------------------- + +# The following geometry corresponds to an oversampling ratio of 1 along +# the rocking curve, which means that the numerical field of view +# tightly contains the exit wave. +g = ptypy.core.geometry_bragg.Geo_Bragg(psize=(13e-6, 13e-6, 0.01/4), shape=(128, 128, 9*4), energy=8.5, distance=2.0, theta_bragg=22.32) + +# Set up scan positions along y, perpendicular to the incoming beam and +# to the thin layer stripes. +Npos = 11 +positions = np.zeros((Npos,3)) +positions[:, 1] = np.arange(Npos) - Npos/2.0 +positions *= .5e-6 + +# Set up the object and its views +# ------------------------------- + +# Create a container for the object array, which will represent the +# object in the non-orthogonal coordinate system conjugate to the +# q-space measurement frame. +C = ptypy.core.Container(data_type=np.complex128, data_dims=3) + +# For each scan position in the orthogonal coordinate system, find the +# natural coordinates and create a View instance there. +views = [] +for pos in positions: + pos_ = g._r1r2r3(pos) + views.append(ptypy.core.View(C, storageID='S0000', psize=g.resolution, coord=pos_, shape=g.shape)) +S = C.storages['S0000'] +C.reformat() + +# Define the test sample based on the orthogonal position of each voxel. +# First, the cartesian grid is obtained from the geometry object, then +# this grid is used as a condition for the sample's magnitude. +zz, yy, xx = g.transformed_grid(S, input_space='real', input_system='natural') +S.fill(0.0) +S.data[(zz >= -90e-9) & (zz < 90e-9) & (yy >= 1e-6) & (yy < 2e-6) & (xx < 1e-6)] = 1 +S.data[(zz >= -90e-9) & (zz < 90e-9) & (yy >= -2e-6) & (yy < -1e-6)] = 1 + +# Set up the probe and calculate diffraction patterns +# --------------------------------------------------- + +# First set up a two-dimensional representation of the probe, with +# arbitrary pixel spacing. The probe here is defined as a 1.5 um by 3 um +# flat square, but this container will typically come from a 2d +# transmission ptycho scan of an easy test object. +Cprobe = ptypy.core.Container(data_dims=2, data_type='float') +Sprobe = Cprobe.new_storage(psize=10e-9, shape=500) +z, y = Sprobe.grids() +Sprobe.data[(z > -.75e-6) & (z < .75e-6) & (y > -1.5e-6) & (y < 1.5e-6)] = 1 + +# The Bragg geometry has a method to prepare a 3d Storage by extruding +# the 2d probe and interpolating to the right grid. The returned storage +# contains a single view compatible with the object views. +Sprobe_3d = g.prepare_3d_probe(Sprobe, system='natural') +probeView = Sprobe_3d.views[0] + +# Calculate diffraction patterns by using the geometry's propagator. +diff = [] +for v in views: + diff.append(np.abs(g.propagator.fw(v.data * probeView.data))**2) + +# Visualize a single field of view with probe and object +# ------------------------------------------------------ + +# A risk with 3d Bragg ptycho is that undersampling along theta leads to +# a situation where the actual exit wave is not contained within the +# field of view. This plot shows the situation for this combination of +# object, probe, and geometry. Note that the thin sample is what makes +# the current test experiment possible. + +# In order to visualize the field of view, we'll create a copy of the +# object storage and set its value equal to 1 where covered by the first +# view. +S_display = S.copy(owner=C) +S_display.fill(0.0) +S_display[S.views[0]] = 1 + +# Then, to see how the probe is contained by this field of view, we add +# the probe and the object itself to the above view. +S_display[S.views[0]] += probeView.data +S_display.data += S.data * 2 + +# To visualize how this looks in cartesian real space, make a shifted +# (nearest-neighbor interpolated) copy of the object Storage. +S_display_cart = g.coordinate_shift(S_display, input_system='natural', input_space='real', keep_dims=False) + +# Plot that +fig, ax = plt.subplots(nrows=1, ncols=2) +z, y, x = S_display_cart.grids() +ax[0].imshow(np.mean(np.abs(S_display_cart.data[0]), axis=1), extent=[x.min(), x.max(), z.min(), z.max()], interpolation='none', origin='lower') +plt.setp(ax[0], ylabel='z', xlabel='x', title='side view') +ax[1].imshow(np.mean(np.abs(S_display_cart.data[0]), axis=0), extent=[x.min(), x.max(), y.min(), y.max()], interpolation='none', origin='lower') +plt.setp(ax[1], ylabel='y', xlabel='x', title='top view') + +# Visualize the probe positions along the scan +# -------------------------------------------- + +# beam/sample overlap in non-orthogonal coordinates +import matplotlib.gridspec as gridspec +plt.figure() +gs = gridspec.GridSpec(Npos, 2, width_ratios=[3,1], wspace=.0) +ax, ax2 = [], [] +r1, r2, r3 = Sprobe_3d.grids() +for i in range(len(S.views)): + # overlap + ax.append(plt.subplot(gs[i, 0])) + ax[-1].imshow(np.mean(np.abs(views[i].data + probeView.data), axis=0).T, vmin=0, vmax=.07, extent=[r2.min(), r2.max(), r3.min(), r3.max()]) + plt.setp(ax[-1], xlabel='r2', ylabel='r3', xlim=[r2.min(), r2.max()], ylim=[r3.min(), r3.max()], yticks=[]) + # diffraction + ax2.append(plt.subplot(gs[i, 1])) + ax2[-1].imshow(diff[i][:,:,18]) + plt.setp(ax2[-1], ylabel='q1', xlabel='q2', xticks=[], yticks=[]) +plt.suptitle('Probe, sample, and slices of 3d diffraction peaks along the scan') +plt.draw() + +# Reconstruct the numerical data +# ------------------------------ + +# Reconstruction should be done considering the algorithms developed +# (for example using good regularization and considering noise models +# etc), but here I've just used a naive PIE generalization which seems +# to work OK for numerical noise-free data. + +# Keep a copy of the object storage, and fill the actual one with an +# initial guess (like zeros everywhere). +S_true = S.copy(owner=C) +S.fill(0.0) + +# Then run this very temporary and inefficient reconstruction loop. +fig, ax = plt.subplots(ncols=3) +errors = [] +ferrors = [] +r1, r2, r3 = S.grids() +for i in range(100): + print i + ferrors_ = [] + for j in range(len(views)): + exit_ = views[j].data * probeView.data + prop = g.propagator.fw(exit_) + ferrors_.append(np.abs(prop)**2 - diff[j]) + prop[:] = np.sqrt(diff[j]) * np.exp(1j * np.angle(prop)) + exit = g.propagator.bw(prop) + views[j].data += 1.0 * np.conj(probeView.data) / (np.abs(probeView.data).max())**2 * (exit - exit_) + errors.append(np.abs(S.data - S_true.data).sum()) + ferrors.append(np.mean(ferrors_)) + + if not (i % 5): + ax[0].clear() + ax[0].plot(errors/errors[0]) + ax[0].plot(ferrors/ferrors[0]) + ax[1].clear() + S_cart = g.coordinate_shift(S, input_space='real', input_system='natural', keep_dims=False) + z, y, x = S_cart.grids() + ax[1].imshow(np.mean(np.abs(S_cart.data[0]), axis=0), extent=[x.min(), x.max(), y.min(), y.max()], interpolation='none', origin='lower') + plt.setp(ax[1], ylabel='y', xlabel='x', title='top view') + ax[2].clear() + ax[2].imshow(np.mean(np.abs(S_cart.data[0]), axis=1), extent=[x.min(), x.max(), z.min(), z.max()], interpolation='none', origin='lower') + plt.setp(ax[2], ylabel='z', xlabel='x', title='side view') + plt.draw() + plt.pause(.01) From fc7869ffb5548657e04ae85668fbfe67c357848f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Fri, 27 Jan 2017 18:00:33 +0100 Subject: [PATCH 012/363] just added a comment --- tutorial/bragg3d_initial.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tutorial/bragg3d_initial.py b/tutorial/bragg3d_initial.py index f843fb0df..f61399acd 100644 --- a/tutorial/bragg3d_initial.py +++ b/tutorial/bragg3d_initial.py @@ -21,6 +21,12 @@ # tightly contains the exit wave. g = ptypy.core.geometry_bragg.Geo_Bragg(psize=(13e-6, 13e-6, 0.01/4), shape=(128, 128, 9*4), energy=8.5, distance=2.0, theta_bragg=22.32) +# The Geo_Bragg object contains mostly the same things as Geo, but in +# three dimensions. The third element of the shape is the number of +# rocking curve positions, the third element of the psize denotes theta +# step in degrees. +print g + # Set up scan positions along y, perpendicular to the incoming beam and # to the thin layer stripes. Npos = 11 From 5ce338492eb875c3d64c5f45452e497d5227928f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Thu, 2 Feb 2017 12:12:26 +0100 Subject: [PATCH 013/363] Added various Bragg algorithms to the tutorial --- tutorial/bragg3d_initial.py | 181 ++++++++++++++++++++++++++++-------- 1 file changed, 142 insertions(+), 39 deletions(-) diff --git a/tutorial/bragg3d_initial.py b/tutorial/bragg3d_initial.py index f61399acd..5aad3485a 100644 --- a/tutorial/bragg3d_initial.py +++ b/tutorial/bragg3d_initial.py @@ -47,8 +47,8 @@ views = [] for pos in positions: pos_ = g._r1r2r3(pos) - views.append(ptypy.core.View(C, storageID='S0000', psize=g.resolution, coord=pos_, shape=g.shape)) -S = C.storages['S0000'] + views.append(ptypy.core.View(C, storageID='Sobj', psize=g.resolution, coord=pos_, shape=g.shape)) +S = C.storages['Sobj'] C.reformat() # Define the test sample based on the orthogonal position of each voxel. @@ -69,7 +69,10 @@ Cprobe = ptypy.core.Container(data_dims=2, data_type='float') Sprobe = Cprobe.new_storage(psize=10e-9, shape=500) z, y = Sprobe.grids() +# square probe Sprobe.data[(z > -.75e-6) & (z < .75e-6) & (y > -1.5e-6) & (y < 1.5e-6)] = 1 +# gaussian probe +Sprobe.data = np.exp(-z**2 / (2 * (.75e-6)**2) - y**2 / (2 * (1.0e-6)**2)) # The Bragg geometry has a method to prepare a 3d Storage by extruding # the 2d probe and interpolating to the right grid. The returned storage @@ -94,7 +97,7 @@ # In order to visualize the field of view, we'll create a copy of the # object storage and set its value equal to 1 where covered by the first # view. -S_display = S.copy(owner=C) +S_display = S.copy(owner=C, ID='Sdisplay') S_display.fill(0.0) S_display[S.views[0]] = 1 @@ -139,45 +142,145 @@ # Reconstruct the numerical data # ------------------------------ -# Reconstruction should be done considering the algorithms developed -# (for example using good regularization and considering noise models -# etc), but here I've just used a naive PIE generalization which seems -# to work OK for numerical noise-free data. +# Here I compare different algorithms and scaling options. +algorithm = 'DM' # Keep a copy of the object storage, and fill the actual one with an # initial guess (like zeros everywhere). -S_true = S.copy(owner=C) +S_true = S.copy(owner=C, ID='Strue') +# zero everything S.fill(0.0) +# unit magnitude, random phase: +# S.data[:] = 1.0 * np.exp(1j * (2 * np.random.rand(*S.data.shape) - 1) * np.pi) +# random magnitude, random phase +# S.data[:] = np.random.rand(*S.data.shape) * np.exp(1j * (2 * np.random.rand(*S.data.shape) - 1) * np.pi) -# Then run this very temporary and inefficient reconstruction loop. -fig, ax = plt.subplots(ncols=3) -errors = [] -ferrors = [] -r1, r2, r3 = S.grids() -for i in range(100): - print i - ferrors_ = [] +# Here's an implementation of the OS (preconditioned PIE) algorithm from +# Pateras' thesis. +if algorithm == 'OS': + alpha, beta = .1, 1.0 + fig, ax = plt.subplots(ncols=3) + errors = [] + criterion = [] + # first calculate the weighting factor Lambda, here called scaling = 1/Lambda + scaling = S.copy(owner=C, ID='Sscaling') + scaling.fill(alpha) + for v in views: + scaling[v] += np.abs(probeView.data)**2 + scaling.data[:] = 1 / scaling.data + # then iterate with the appropriate update rule + for i in range(100): + print i + criterion_ = 0.0 + obj_error_ = 0.0 + for j in range(len(views)): + prop = g.propagator.fw(views[j].data * probeView.data) + criterion_ += np.sum(np.sqrt(diff[j]) - np.abs(prop))**2 + prop_ = np.sqrt(diff[j]) * np.exp(1j * np.angle(prop)) + gradient = 2 * probeView.data * g.propagator.bw(prop - prop_) + views[j].data -= beta * gradient * scaling[views[j]] + errors.append(np.abs(S.data - S_true.data).sum()) + criterion.append(criterion_) + + if not (i % 5): + ax[0].clear() + ax[0].plot(errors/errors[0]) + #ax[0].plot(criterion/criterion[0]) + ax[1].clear() + S_cart = g.coordinate_shift(S, input_space='real', input_system='natural', keep_dims=False) + z, y, x = S_cart.grids() + ax[1].imshow(np.mean(np.abs(S_cart.data[0]), axis=0), extent=[x.min(), x.max(), y.min(), y.max()], interpolation='none', origin='lower') + plt.setp(ax[1], ylabel='y', xlabel='x', title='top view') + ax[2].clear() + ax[2].imshow(np.mean(np.abs(S_cart.data[0]), axis=1), extent=[x.min(), x.max(), z.min(), z.max()], interpolation='none', origin='lower') + plt.setp(ax[2], ylabel='z', xlabel='x', title='side view') + plt.draw() + plt.pause(.01) + + +# Here's a PIE/cPIE implementation +if algorithm == 'PIE': + beta = 1.0 + eps = 1e-3 + fig, ax = plt.subplots(ncols=3) + errors = [] + ferrors = [] + for i in range(100): + print i + ferrors_ = [] + for j in range(len(views)): + exit_ = views[j].data * probeView.data + prop = g.propagator.fw(exit_) + ferrors_.append(np.abs(prop)**2 - diff[j]) + prop[:] = np.sqrt(diff[j]) * np.exp(1j * np.angle(prop)) + exit = g.propagator.bw(prop) + # ePIE scaling (Maiden2009) + #views[j].data += beta * np.conj(probeView.data) / (np.abs(probeView.data).max())**2 * (exit - exit_) + # PIE and cPIE scaling (Rodenburg2004 and Godard2011b) + views[j].data += beta * np.abs(probeView.data) / np.abs(probeView.data).max() * np.conj(probeView.data) / (np.abs(probeView.data)**2 + eps) * (exit - exit_) + errors.append(np.abs(S.data - S_true.data).sum()) + ferrors.append(np.mean(ferrors_)) + + if not (i % 5): + ax[0].clear() + ax[0].plot(errors/errors[0]) + ax[0].plot(ferrors/ferrors[0]) + ax[1].clear() + S_cart = g.coordinate_shift(S, input_space='real', input_system='natural', keep_dims=False) + z, y, x = S_cart.grids() + ax[1].imshow(np.mean(np.abs(S_cart.data[0]), axis=0), extent=[x.min(), x.max(), y.min(), y.max()], interpolation='none', origin='lower') + plt.setp(ax[1], ylabel='y', xlabel='x', title='top view') + ax[2].clear() + ax[2].imshow(np.mean(np.abs(S_cart.data[0]), axis=1), extent=[x.min(), x.max(), z.min(), z.max()], interpolation='none', origin='lower') + plt.setp(ax[2], ylabel='z', xlabel='x', title='side view') + plt.draw() + plt.pause(.01) + +if algorithm == 'DM': + alpha = 1.0 + fig, ax = plt.subplots(ncols=3) + errors = [] + ferrors = [] + # create initial exit waves + exitwaves = [] + for j in range(len(views)): + exitwaves.append(views[j].data * probeView.data) + # we also need a constant normalization storage, which contains the + # denominator of the DM object update equation. + Snorm = S.copy(owner=C) + Snorm.fill(0.0) for j in range(len(views)): - exit_ = views[j].data * probeView.data - prop = g.propagator.fw(exit_) - ferrors_.append(np.abs(prop)**2 - diff[j]) - prop[:] = np.sqrt(diff[j]) * np.exp(1j * np.angle(prop)) - exit = g.propagator.bw(prop) - views[j].data += 1.0 * np.conj(probeView.data) / (np.abs(probeView.data).max())**2 * (exit - exit_) - errors.append(np.abs(S.data - S_true.data).sum()) - ferrors.append(np.mean(ferrors_)) - - if not (i % 5): - ax[0].clear() - ax[0].plot(errors/errors[0]) - ax[0].plot(ferrors/ferrors[0]) - ax[1].clear() - S_cart = g.coordinate_shift(S, input_space='real', input_system='natural', keep_dims=False) - z, y, x = S_cart.grids() - ax[1].imshow(np.mean(np.abs(S_cart.data[0]), axis=0), extent=[x.min(), x.max(), y.min(), y.max()], interpolation='none', origin='lower') - plt.setp(ax[1], ylabel='y', xlabel='x', title='top view') - ax[2].clear() - ax[2].imshow(np.mean(np.abs(S_cart.data[0]), axis=1), extent=[x.min(), x.max(), z.min(), z.max()], interpolation='none', origin='lower') - plt.setp(ax[2], ylabel='z', xlabel='x', title='side view') - plt.draw() - plt.pause(.01) + Snorm[views[j]] += np.abs(probeView.data)**2 + + # iterate + for i in range(100): + print i + ferrors_ = [] + # fourier update, updates all the exit waves + for j in range(len(views)): + # in DM, you propagate the following linear combination + im = g.propagator.fw((1 + alpha) * probeView.data * views[j].data - alpha * exitwaves[j]) + im = np.sqrt(diff[j]) * np.exp(1j * np.angle(im)) + exitwaves[j][:] += g.propagator.bw(im) - views[j].data * probeView.data + # object update, now skipping the iteration because the probe is constant + S.fill(0.0) + for j in range(len(views)): + views[j].data += np.conj(probeView.data) * exitwaves[j] + S.data[:] /= Snorm.data + 1e-10 + errors.append(np.abs(S.data - S_true.data).sum()) + ferrors.append(np.mean(ferrors_)) + + if not (i % 5): + ax[0].clear() + ax[0].plot(errors/errors[0]) + ax[0].plot(ferrors/ferrors[0]) + ax[1].clear() + S_cart = g.coordinate_shift(S, input_space='real', input_system='natural', keep_dims=False) + z, y, x = S_cart.grids() + ax[1].imshow(np.mean(np.abs(S_cart.data[0]), axis=0), extent=[x.min(), x.max(), y.min(), y.max()], interpolation='none', origin='lower') + plt.setp(ax[1], ylabel='y', xlabel='x', title='top view') + ax[2].clear() + ax[2].imshow(np.mean(np.abs(S_cart.data[0]), axis=1), extent=[x.min(), x.max(), z.min(), z.max()], interpolation='none', origin='lower') + plt.setp(ax[2], ylabel='z', xlabel='x', title='side view') + plt.draw() + plt.pause(.01) From 5329084aae3344d7e300c67eb9f965a8e62f8525 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Fri, 24 Feb 2017 13:33:37 +0100 Subject: [PATCH 014/363] Added dq1, dq2, dq3 convenience variables --- ptypy/core/geometry_bragg.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ptypy/core/geometry_bragg.py b/ptypy/core/geometry_bragg.py index b5bbbabfc..80d08f2cd 100644 --- a/ptypy/core/geometry_bragg.py +++ b/ptypy/core/geometry_bragg.py @@ -143,6 +143,11 @@ def update(self, update_propagator=True): raise ValueError( 'Both pixel size and sample resolution specified.') + # These are useful to have on hand + self.dq1 = dq1 + self.dq2 = dq2 + self.dq3 = dq3 + # Establish transforms between coordinate systems # ...from {z y x} to {r1 r2 r3} self.A_r1r2r3 = [[1, 0, -self.sintheta / self.costheta], From e554ea128d589f6183fd79ae20fd508454fdcc93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Fri, 24 Feb 2017 14:52:31 +0100 Subject: [PATCH 015/363] Fixed a bug in the reciprocal space coordinate change --- ptypy/core/geometry_bragg.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ptypy/core/geometry_bragg.py b/ptypy/core/geometry_bragg.py index 80d08f2cd..8b0183e28 100644 --- a/ptypy/core/geometry_bragg.py +++ b/ptypy/core/geometry_bragg.py @@ -402,7 +402,7 @@ def coordinate_shift(self, input_storage, input_space='real', # roll the q3 axis in the positive direction for more # negative q1 shift = int(round((shape[0] - i) * self.sintheta)) - d[i, :, :] = np.roll(d[i, :, :], shift, axis=2) + d[i, :, :] = np.roll(d[i, :, :], shift, axis=-1) # optionally crop the new array if keep_dims: d = d[:, :, pad / 2:shape[0] + pad / 2] From 807415a1d79b9fad83485a4633cd6ebb9fa132ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?= Date: Wed, 1 Mar 2017 15:28:43 +0100 Subject: [PATCH 016/363] Probe extrusion now also handles complex wavefronts --- ptypy/core/geometry_bragg.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ptypy/core/geometry_bragg.py b/ptypy/core/geometry_bragg.py index 8b0183e28..71a0b5484 100644 --- a/ptypy/core/geometry_bragg.py +++ b/ptypy/core/geometry_bragg.py @@ -479,7 +479,11 @@ def prepare_3d_probe(self, S_2d, auto_center=False, system='cartesian', layer=0) yi[:] = yi / S_2d.psize[0] + S_2d.center[0] # interpolate - S_3d.data[0][:] = map_coordinates(S_2d.data[layer], (zi, yi)) + if np.iscomplexobj(S_2d.data): + S_3d.data[0][:] = map_coordinates(np.abs(S_2d.data[layer]), (zi, yi)) + S_3d.data[0][:] *= np.exp(1j * map_coordinates(np.angle(S_2d.data[layer]), (zi, yi))) + else: + S_3d.data[0][:] = map_coordinates(S_2d.data[layer], (zi, yi)) #import ipdb; ipdb.set_trace() From 368b68e6a75a25ee429e472f692c40866cd3a41b Mon Sep 17 00:00:00 2001 From: Bjoern Enders Date: Mon, 27 Mar 2017 18:14:43 -0700 Subject: [PATCH 017/363] First attempt at rewriting the parameter config input. --- ptypy/utils/validator.py | 55 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/ptypy/utils/validator.py b/ptypy/utils/validator.py index f72535c28..9eb6669c5 100644 --- a/ptypy/utils/validator.py +++ b/ptypy/utils/validator.py @@ -19,6 +19,7 @@ _desc_list = list(csv.DictReader(file(_csvfile, 'r'))) del csv del pkg_resources +import ast if __name__=='__main__': from ptypy.utils.parameters import Param @@ -78,6 +79,60 @@ _evaltypes = ['int','float','tuple','list','complex'] _copytypes = ['str','file'] +class Parameter(object): + """ + """ + def __init__(self,parent='',separator='.'): + self.children = None + self.num_ID = 0 + self.name = '' + self.options ={} + + def get_root(self): + """ + Return root of parameter tree. + """ + if self.parent is None: + return self + else: + return self.parent.get_root() + + + def load_csv(self, fbuffer): + _desc_list = list(csv.DictReader(fbuffer)) + + def save_csv(self, fbuffer): + + raise NotImplementedError + + def load_json(self,fbuffer): + + raise NotImplementedError + + def save_json(self,fbuffer): + + raise NotImplementedError + + def load_conf_parser(self,fbuffer, separator='.'): + """ + Load Parameter defaults using Pythons ConfigParser + + Each parameter each parameter occupies its own section. + Separator characters in sections names map to a tree-hierarchy + """ + parser = ConfigParser.SafeConfigParser() + parser.read(fbuffer) + for sec in parser.sections: + + pdesc = Parameter(parent = self) + self.children[name] = Parameter(parent = self) + # grow the options dictionary + self.options.update(section.options) + pass + + def save_conf_parser(self,fbuffer): + + raise NotImplementedError class PDesc(object): From f9a787e260c0d5f28b4e6100d051e77173be6ae1 Mon Sep 17 00:00:00 2001 From: Bjoern Enders Date: Wed, 29 Mar 2017 21:51:57 -0700 Subject: [PATCH 018/363] Parameter parsing to csv and txt matures --- ptypy/utils/validator.py | 316 +++++++++++++++++++++++++++++++++------ 1 file changed, 272 insertions(+), 44 deletions(-) diff --git a/ptypy/utils/validator.py b/ptypy/utils/validator.py index 9eb6669c5..4e8daa89d 100644 --- a/ptypy/utils/validator.py +++ b/ptypy/utils/validator.py @@ -14,6 +14,7 @@ """ import pkg_resources import csv +import weakref # Load all documentation on import _csvfile = pkg_resources.resource_filename('ptypy', 'resources/parameter_descriptions.csv') _desc_list = list(csv.DictReader(file(_csvfile, 'r'))) @@ -82,13 +83,100 @@ class Parameter(object): """ """ - def __init__(self,parent='',separator='.'): - self.children = None - self.num_ID = 0 - self.name = '' - self.options ={} + def __init__(self, parent=None, + name = 'any', + separator='.', + info=None): + + self.name = name + self.parent = parent + self.separator = separator + + self.required = [] + self.optional = [] + self.info = {} + self._parse_info(info) + + if self._is_child: + import weakref + self.descendants = weakref.WeakValueDictionary() + else: + self.descendants = {} + + self.children = {} + self.num_id = 0 + self.options = dict.fromkeys(self.required,'') + self._all_options = {} + + @property + def descendants_options(self): + return self._all_options.keys() + + @property + def _is_child(self): + """ + Type check + """ + return type(self.parent) is self.__class__ - def get_root(self): + def _parse_info(self,info=None): + if info is not None: + self.info.update(info) + + r = [] + o = [] + + for option,text in self.info.items(): + if ('required' in text or 'mandatory' in text): + r+=[option] + else: + o+=[option] + self.required = r + self.optional = o + + + def _new_child(self, name=None): + n = name if name is not None and str(name)==name else 'ch%02d' % len(self.descendants) + return self.__class__(parent = self, + separator = self.separator, + info = self.info, + name =n + ) + + def _name_descendants(self, separator =None): + """ + This function transforms the flat list of descendants + into tree hierarchy. Creates roots if paramater has a + dangling root. + """ + sep = separator if separator is not None else self.separator + + for name, desc in self.descendants.items(): + if sep not in name: + desc.name = name + self.children[name] = desc + else: + names = name.split(sep) + + nm = names[0] + + p = self.descendants.get(nm) + + if p is None: + # Found dangling parameter. Create a root + p = self._new_child(name = nm) + self._new_desc(nm, p) + self.children[nm] = p + + # transfer ownership + p.descendants[sep.join(names[1:])] = desc + desc.parent = p + + # recursion + for desc in self.children.values(): + desc._name_descendants() + + def _get_root(self): """ Return root of parameter tree. """ @@ -97,13 +185,101 @@ def get_root(self): else: return self.parent.get_root() + def _store_options(self,dct): + """ + Read and store options and check that the the minimum selections + of options is present. + """ + + if self.required is not None and type(self.required) is list: + missing = [r for r in self.required if r not in dct.keys()] + if missing: + raise ValueError('Missing required option(s) <%s> for parameter %s.' % (', '.join(missing),self.name)) + + self.options = dict.fromkeys(self.required) + self.options.update(dct) + + @property + def root(self): + self._get_root() + + + def _new_desc(self, name, desc, update_in_parent = True): + """ + Update the new entry to the root. + """ + self.descendants[name] = desc + + # add all options to parent class + self._all_options.update(desc.options) + + if update_in_parent: + if self._is_child: + # You are not the root + self.parent._new_desc(self.name+self.separator+name,desc) + else: + # You are the root. Do root things here. + pass + + def load_csv(self, fbuffer, **kwargs): + """ + Load from csv as a fielded array. Keyword arguments are passed + on to csv.DictReader + """ + from csv import DictReader + CD = DictReader(fbuffer, **kwargs) + + if 'level' in CD.fieldnames: + chain = [] + + # old style CSV, name + level sets the path + for num, dct in enumerate(list(CD)): + + # Get parameter name and level in the hierarchy + level = int(dct.pop('level')) + name = dct.pop('name') - def load_csv(self, fbuffer): - _desc_list = list(csv.DictReader(fbuffer)) + # translations + dct['help']= dct.pop('shortdoc') + dct['doc']= dct.pop('longdoc') + if dct.pop('static').lower()!='yes': continue + + desc = self._new_child(name) + desc._store_options(dct) + desc.num_id = num + + if level == 0: + chain = [name] + else: + chain = chain[:level]+[name] + + self._new_desc(self.separator.join(chain), desc) + else: + # new style csv, name and path are synonymous + for dct in list(CD): + name = dct['path'] + desc = self._new_child(name) + desc._store_options(dct) + self._new_desc(name, desc) - def save_csv(self, fbuffer): + self._name_descendants() - raise NotImplementedError + def save_csv(self, fbuffer, **kwargs): + """ + Save to fbuffer. Keyword arguments are passed + on to csv.DictWriter + """ + from csv import DictWriter + + fieldnames = self.required + self.optional + fieldnames += [k for k in self._all_options.keys() if k not in fieldnames] + + DW = DictWriter(fbuffer, ['path'] + fieldnames) + DW.writeheader() + for key in sorted(self.descendants.keys()): + dct = {'path':key} + dct.update(self.descendants[key].options) + DW.writerow(dct) def load_json(self,fbuffer): @@ -112,27 +288,99 @@ def load_json(self,fbuffer): def save_json(self,fbuffer): raise NotImplementedError - - def load_conf_parser(self,fbuffer, separator='.'): + + + def load_conf_parser(self,fbuffer, **kwargs): """ Load Parameter defaults using Pythons ConfigParser Each parameter each parameter occupies its own section. - Separator characters in sections names map to a tree-hierarchy + Separator characters in sections names map to a tree-hierarchy. + + Keyword arguments are forwarded to `ConfigParser.RawConfigParser` """ - parser = ConfigParser.SafeConfigParser() - parser.read(fbuffer) - for sec in parser.sections: + from ConfigParser import RawConfigParser as Parser + parser = Parser(**kwargs) + parser.readfp(fbuffer) + parser = parser + for num, sec in enumerate(parser.sections()): + desc = self._new_child(name=sec) + desc._store_options(dict(parser.items(sec))) + self._new_desc(sec, desc) + + self._name_descendants() + return parser - pdesc = Parameter(parent = self) - self.children[name] = Parameter(parent = self) - # grow the options dictionary - self.options.update(section.options) - pass + def save_conf_parser(self,fbuffer, print_optional=True): + """ + Save Parameter defaults using Pythons ConfigParser - def save_conf_parser(self,fbuffer): - - raise NotImplementedError + Each parameter each parameter occupies its own section. + Separator characters in sections names map to a tree-hierarchy + """ + from ConfigParser import RawConfigParser as Parser + parser = Parser() + dct = self.descendants + for name in sorted(dct.keys()): + if dct[name] is None: + continue + else: + parser.add_section(name) + for k,v in self.descendants[name].options.items(): + if (v or print_optional) or (k in self.required): + parser.set(name, k, v) + + parser.write(fbuffer) + return parser + + def make_doc_rst(self,prst, use_root=True): + + Header= '.. _parameters:\n\n' + Header+= '************************\n' + Header+= 'Parameter tree structure\n' + Header+= '************************\n\n' + prst.write(Header) + + root = self.get_root() # if use_root else self + shortdoc = 'shortdoc' + longdoc = 'longdoc' + default = 'default' + lowlim ='lowlim' + uplim='uplim' + + start = self.get_root() + + for name,desc in root.descendants.iteritems(): + if name=='': + continue + if hasattr(desc,'children') and desc.parent is root: + prst.write('\n'+name+'\n') + prst.write('='*len(name)+'\n\n') + if hasattr(desc,'children') and desc.parent.parent is root: + prst.write('\n'+name+'\n') + prst.write('-'*len(name)+'\n\n') + + opt = desc.options + + prst.write('.. py:data:: '+name) + #prst.write('('+', '.join([t for t in opt['type']])+')') + prst.write('('+opt['type']+')') + prst.write('\n\n') + num = str(opt.get('ID')) + prst.write(' *('+num+')* '+opt[shortdoc]+'\n\n') + prst.write(' '+opt[longdoc].replace('\n','\n ')+'\n\n') + prst.write(' *default* = ``'+str(opt[default])) + if opt[lowlim] is not None and opt[uplim] is not None: + prst.write(' (>'+str(opt[lowlim])+', <'+str(opt[uplim])+')``\n') + elif opt[lowlim] is not None and opt[uplim] is None: + prst.write(' (>'+str(opt[lowlim])+')``\n') + elif opt[lowlim] is None and opt[uplim] is not None: + prst.write(' (<'+str(opt[uplim])+')``\n') + else: + prst.write('``\n') + + prst.write('\n') + prst.close() class PDesc(object): @@ -266,26 +514,6 @@ def set_default(self,default='', check=False): self.value = out return out - """ - @property - def value(self): - - if self.default is None: - out = None - # should be only strings now - elif self.default.lower()=='none': - out = None - elif self.default.lower()=='true': - out = True - elif self.default.lower()=='false': - out = False - elif self.is_evaluable: - out = eval(self.default) - else: - out = self.default - - return out - """ def check(self, pars, walk): """ From 8785390b9a1b049f091d83f524501f97fef236d7 Mon Sep 17 00:00:00 2001 From: Bjoern Enders Date: Thu, 30 Mar 2017 20:18:18 -0700 Subject: [PATCH 019/363] PDesc replacement almost ready. Modules can now their own parameter into the tree. --- ptypy/__init__.py | 15 ++ ptypy/utils/validator.py | 398 ++++++++++++++++++++++++++++++--------- 2 files changed, 322 insertions(+), 91 deletions(-) diff --git a/ptypy/__init__.py b/ptypy/__init__.py index 0dd5b5f16..e5ae58ae5 100644 --- a/ptypy/__init__.py +++ b/ptypy/__init__.py @@ -48,7 +48,22 @@ from utils import verbose #verbose.set_level(2) +# Start a paramter tree +from utils.validator import EvalParameter +parameter = EvalParameter(name = 'root') + +# FIXME: replace with paramater additions on module load +# Here we load the .csv base +import pkg_resources +_csvfile = open(pkg_resources.resource_filename('ptypy', + 'resources/parameter_descriptions.csv'),'r') +parameter.load_csv(_csvfile) +del pkg_resources + + import utils + + # Import core modules import io import experiment diff --git a/ptypy/utils/validator.py b/ptypy/utils/validator.py index 4e8daa89d..9e060e2fc 100644 --- a/ptypy/utils/validator.py +++ b/ptypy/utils/validator.py @@ -12,15 +12,10 @@ :copyright: Copyright 2014 by the PTYPY team, see AUTHORS. :license: GPLv2, see LICENSE for details. """ -import pkg_resources -import csv -import weakref -# Load all documentation on import -_csvfile = pkg_resources.resource_filename('ptypy', 'resources/parameter_descriptions.csv') -_desc_list = list(csv.DictReader(file(_csvfile, 'r'))) -del csv -del pkg_resources + import ast +import weakref +from collections import OrderedDict if __name__=='__main__': from ptypy.utils.parameters import Param @@ -41,8 +36,8 @@ #! Inverse message codes CODE_LABEL = dict((v, k) for k, v in CODES.items()) +""" # Populate the dictionary of all entry points. -from collections import OrderedDict # All ptypy parameters in an ordered dictionary as (entry_point, PDesc) pairs. parameter_descriptions = OrderedDict() @@ -51,6 +46,7 @@ # All parameter containers in a dictionary as (entry_point, PDesc) pairs. # Subset of :py:data:`parameter_descriptions` entry_points_dct = {} +""" # Logging levels import logging @@ -88,8 +84,19 @@ def __init__(self, parent=None, separator='.', info=None): + #: Name of parameter self.name = name + + #: Parent parameter (:py:class:`Parameter` type) if it has one. self.parent = parent + + self.descendants = {} + """ Flat list of all sub-Parameters. These are weak references + if not root.""" + + #: Hierarchical tree of sub-Parameters. + self.children = {} + self.separator = separator self.required = [] @@ -100,10 +107,8 @@ def __init__(self, parent=None, if self._is_child: import weakref self.descendants = weakref.WeakValueDictionary() - else: - self.descendants = {} - self.children = {} + self.num_id = 0 self.options = dict.fromkeys(self.required,'') self._all_options = {} @@ -135,7 +140,7 @@ def _parse_info(self,info=None): self.optional = o - def _new_child(self, name=None): + def _new(self, name=None): n = name if name is not None and str(name)==name else 'ch%02d' % len(self.descendants) return self.__class__(parent = self, separator = self.separator, @@ -164,7 +169,7 @@ def _name_descendants(self, separator =None): if p is None: # Found dangling parameter. Create a root - p = self._new_child(name = nm) + p = self._new(name = nm) self._new_desc(nm, p) self.children[nm] = p @@ -183,7 +188,16 @@ def _get_root(self): if self.parent is None: return self else: - return self.parent.get_root() + return self.parent._get_root() + + def _get_path(self): + """ + Return root of parameter tree. + """ + if self.parent is None: + return self + else: + return self.parent._get_root() def _store_options(self,dct): """ @@ -201,9 +215,15 @@ def _store_options(self,dct): @property def root(self): - self._get_root() + return self._get_root() + + @property + def path(self): + if self.parent is None: + return self.name + else: + return self.parent.path + self.separator + self.name - def _new_desc(self, name, desc, update_in_parent = True): """ Update the new entry to the root. @@ -244,7 +264,7 @@ def load_csv(self, fbuffer, **kwargs): dct['doc']= dct.pop('longdoc') if dct.pop('static').lower()!='yes': continue - desc = self._new_child(name) + desc = self._new(name) desc._store_options(dct) desc.num_id = num @@ -258,7 +278,7 @@ def load_csv(self, fbuffer, **kwargs): # new style csv, name and path are synonymous for dct in list(CD): name = dct['path'] - desc = self._new_child(name) + desc = self._new(name) desc._store_options(dct) self._new_desc(name, desc) @@ -304,7 +324,7 @@ def load_conf_parser(self,fbuffer, **kwargs): parser.readfp(fbuffer) parser = parser for num, sec in enumerate(parser.sections()): - desc = self._new_child(name=sec) + desc = self._new(name=sec) desc._store_options(dict(parser.items(sec))) self._new_desc(sec, desc) @@ -383,10 +403,275 @@ def make_doc_rst(self,prst, use_root=True): prst.close() +class EvalParameter(Parameter): + """ + Small class to store all attributes of a ptypy parameter + + """ + _typemap = {'int': 'int', + 'float': 'float', + 'complex': 'complex', + 'str': 'str', + 'bool': 'bool', + 'tuple': 'tuple', + 'list': 'list', + 'array': 'ndarray', + 'Param': 'Param', + 'None': 'NoneType', + 'file': 'str', + '': 'NoneType'} + + _evaltypes = ['int','float','tuple','list','complex'] + _copytypes = ['str','file'] + + DEFAULTS = OrderedDict( + default = 'Default value for parameter (required).', + help = 'A small docstring for command line parsing (required).', + type = 'Komma separated list of acceptable types.', + doc = 'A small docstring for command line parsing.', + uplim = 'Upper limit for scalar / integer values', + lowlim = 'Lower limit for scalar / integer values', + choices = 'If parameter is list of choices, these are listed here.', + userlevel = """User level, a higher level means a parameter that is + less likely to vary or harder to understand.""", + ) + + def __init__(self, *args,**kwargs): + + kwargs['info']=self.DEFAULTS.copy() + + super(EvalParameter, self).__init__(*args,**kwargs) + + @property + def type(self): + + types = self.options.get('type', None) + tm = self._typemap + if types is not None: + types = [tm[x.strip()] if x.strip() in tm else x.strip() for x in types.split(',')] + + return types + + @property + def default(self,default='', check=False): + """ + Returns default as a Python type + """ + default = str(self.options.get('default', '')) + + # this destroys empty strings + default = default if default else None + + if default is None: + out = None + # should be only strings now + elif default.lower()=='none': + out = None + elif default.lower()=='true': + out = True + elif default.lower()=='false': + out = False + elif self.is_evaluable: + out = ast.literal_eval(default) + else: + out = default + + return out + + + @property + def limits(self): + if self.type is None: + return None, None + + ll = self.options.get('lowlim', None) + ul = self.options.get('uplim', None) + if 'int' in self.type: + lowlim = int(ll) if ll else None + uplim = int(ul) if ul else None + else: + lowlim = float(ll) if ll else None + uplim = float(ul) if ul else None + + return lowlim,uplim + + @property + def choices(self): + """ + If parameter is a list of choices, these are listed here. + """ + # choices is an evaluable list + c = self.options.get('choices', '') + #print c, self.name + if str(c)=='': + c=None + else: + try: + c = ast.literal_eval(c.strip()) + except SyntaxError('Evaluating `choices` %s for parameter %s failed' %(str(c),self.name)): + c = None + + return c + + @property + def help(self): + """ + Short descriptive explanation of parameter + """ + return self.options.get('help', '') + + @property + def doc(self): + """ + Longer documentation, may contain *sphinx* inline markup. + """ + return self.options.get('doc', '') + + @property + def userlevel(self): + """ + User level, a higher level means a parameter that is less + likely to vary or harder to understand. + """ + # User level (for gui stuff) is an int + ul = self.options.get('userlevel', 1) + + return int(ul) if ul else None + + @property + def is_evaluable(self): + for t in self.type: + if t in self._evaltypes: + return True + break + return False + + + def check(self, pars, walk): + """ + Check that input parameter pars is consistent with parameter description. + If walk is True and pars is a Param object, checks are also conducted for all + sub-parameters. + """ + ep = self.path + out = {} + val = {} + + # 1. Data type + if self.type is None: + # Unconclusive + val['type'] = CODES.UNKNOWN + val['lowlim'] = CODES.UNKNOWN + val['uplim'] = CODES.UNKNOWN + return {ep : val} + else: + val['type'] = CODES.PASS if (type(pars).__name__ in self.type) else CODES.FAIL + + # 2. limits + lowlim, uplim = self.limits + + if lowlim is None: + val['lowlim'] = CODES.UNKNOWN + else: + val['lowlim'] = CODES.PASS if (pars >= self.lowlim) else CODES.FAIL + if uplim is None: + val['uplim'] = CODES.UNKNOWN + else: + val['uplim'] = CODES.PASS if (pars <= self.uplim) else CODES.FAIL + + # 3. Extra work for parameter entries + if 'Param' in self.type: + # Check for missing entries + for k, v in self.children.items(): + if k not in pars: + val[k] = CODES.MISSING + + # Check for excess entries + for k, v in pars.items(): + if k not in self.children: + val[k] = CODES.INVALID + elif walk: + # Validate child + out.update(self.children[k].check(v, walk)) + + out[ep] = val + return out + + def make_default(self, depth=1): + """ + Creates a default parameter structure, from the loaded parameter + descriptions in this module + + Parameters + ---------- + depth : int + The depth in the structure to which all sub nodes are expanded + All nodes beyond depth will be returned as empty dictionaries + + Returns + ------- + pars : dict + A parameter branch as nested dicts. + + Examples + -------- + >>> from ptypy import parameter + >>> print parameter.children['io'].make_default() + """ + out = {} + if depth<=0: + return out + for name,child in self.children.iteritems(): + if child.children and child.default is None: + out[name] = child.make_default(depth=depth-1) + else: + out[name] = child.default + return out + + def validate(self,pars, walk=True, raisecodes=[CODES.FAIL, CODES.INVALID]): + """ + Check that the parameter structure `pars` matches the documented + constraints for this node / parameter. + + The function raises a RuntimeError if one of the code in the list + `raisecodes` has been found. If raisecode is empty, the function will + always return successfully but problems will be logged using logger. + + Parameters + ---------- + pars : Param + A parameter set to validate + + walk : bool + If ``True`` (*default*), navigate sub-parameters. + + raisecodes: list + List of codes that will raise a RuntimeError. + """ + from ptypy.utils.verbose import logger + + d = self.check(pars, walk=walk) + do_raise = False + for ep, v in d.items(): + for tocheck, outcome in v.items(): + logger.log(_logging_levels[CODE_LABEL[outcome]], '%-50s %-20s %7s' % (ep, tocheck, CODE_LABEL[outcome])) + do_raise |= (outcome in raisecodes) + if do_raise: + raise RuntimeError('Parameter validation failed.') + + def sanity_check(self, depth=10): + """ + Checks if default parameters from configuration are + self-constistent with limits and choices. + """ + self.validate(self.make_default(depth =depth)) + + class PDesc(object): """ Small class to store all attributes of a ptypy parameter + **Depracated** """ def __init__(self, description, parent=None): @@ -570,7 +855,7 @@ def make_doc(self): return '{self.entry_point}\n\n{self.shortdoc}\n{self.longdoc}'.format(self=self) - +""" ## maybe this should be a function in the end. # Create the root pdroot = PDesc(description={'name': '', 'type': 'Param', 'shortdoc': 'Parameter root'}, parent=None) @@ -606,77 +891,8 @@ def make_doc(self): #cleanup del level, name, entry_level, pd, entry_pt, entry_dcts +""" -def make_sub_default(entry_point, depth=1): - """ - Creates a default parameter structure, from the loaded parameter - descriptions in this module - - Parameters - ---------- - entry_point : str - The node in the default parameter file - - depth : int - The depth in the structure to which all sub nodes are expanded - All nodes beyond depth will be returned as empty :any:`Param` - structure. - - Returns - ------- - pars : Param - A parameter branch. - - Examples - -------- - >>> from ptypy.utils import validator - >>> print validator.make_sub_default('.io') - """ - pd = entry_points_dct[entry_point] - out = Param() - if depth<=0: - return out - for name,child in pd.children.iteritems(): - if child.children is not None and child.value is None: - out[name] = make_sub_default(child.entry_point, depth=depth-1) - else: - out[name] = child.value - return out - -def validate(pars, entry_point, walk=True, raisecodes=[CODES.FAIL, CODES.INVALID]): - """ - Check that the parameter structure `pars` matches the documented - constraints at the given entry_point. - - The function raises a RuntimeError if one of the code in the list - `raisecodes` has been found. If raisecode is empty, the function will - always return successfully but problems will be logged using logger. - - Parameters - ---------- - pars : Param - A parameter set to validate - - entry_point : str - The node in the parameter structure to match to. - - walk : bool - If ``True`` (*default*), navigate sub-parameters. - - raisecodes: list - List of codes that will raise a RuntimeError. - """ - from ptypy.utils.verbose import logger - - pdesc = parameter_descriptions[entry_point] - d = pdesc.check(pars, walk=walk) - do_raise = False - for ep, v in d.items(): - for tocheck, outcome in v.items(): - logger.log(_logging_levels[CODE_LABEL[outcome]], '%-50s %-20s %7s' % (ep, tocheck, CODE_LABEL[outcome])) - do_raise |= (outcome in raisecode) - if do_raise: - raise RuntimeError('Parameter validation failed.') def create_default_template(filename=None,user_level=0,doc_level=2): """ From d6bcd554a3f0855d4a54872ae6932f58fc656a2f Mon Sep 17 00:00:00 2001 From: Bjoern Enders Date: Wed, 14 Jun 2017 15:02:38 -0700 Subject: [PATCH 020/363] Some changes to validator. Manager on way towards multi=pass model --- ptypy/core/manager.py | 123 +++++++++++ ptypy/utils/misc.py | 103 +++++++++- ptypy/utils/validator.py | 434 +++++++++++++++++++++++++-------------- 3 files changed, 504 insertions(+), 156 deletions(-) diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py index f3fd5dc56..265f2d827 100644 --- a/ptypy/core/manager.py +++ b/ptypy/core/manager.py @@ -22,6 +22,7 @@ import xy import data +from collections import OrderedDict from .. import utils as u from ..utils.verbose import logger, headerline, log from classes import * @@ -89,6 +90,8 @@ ) + + class ModelManager(object): """ Manages ptypy objects creation and update. @@ -115,6 +118,14 @@ class ModelManager(object): and a short listing below """ _PREFIX = MODEL_PREFIX + + _BASE_MODEL = OrderedDict( + index = 0, + energy = 0.0, + pmode = 0, + x = 0.0, + y = 0.0, + ) def __init__(self, ptycho, pars=None, scans=None, **kwargs): """ @@ -317,6 +328,118 @@ def make_datasource(self, data_pars=None): return data.DataSource(self.scans) + def new_configure(self,dp): + """ + Configures + """ + parallel.barrier() + + meta = dp['common'] + label = meta['ptylabel'] + + # We expect a string for the label. + assert label == str(label) + + logger.info('Importing data from %s as scan %s.' + % (meta['label'], label)) + + # Prepare scan dictionary or dig up the already prepared one + scan = self.prepare_scan(label) + scan.meta = meta + + sh = scan.meta['shape'] + psize = scan.meta['psize'] + + # Storage generation if not already existing + if scan.get('diff') is None: + # This scan is brand new so we create storages for it + scan.diff = self.ptycho.diff.new_storage( + shape=(1, sh[-2], sh[-1]), + psize=geo.psize, + padonly=True, + layermap=None) + + # Same for mask + if scan.get('mask') is None: + scan.mask = self.ptycho.mask.new_storage( + shape=(1, sh[-2], sh[-1]), + psize=geo.psize, + padonly=True, + layermap=None) + + def new_data_package(self,scan,dp): + """ + Receives and stores data chunk, emits records. + """ + parallel.barrier() + + # Buffer incoming data and evaluate if we got Nones in data + for dct in dp['iterable']: + + active = dct['data'] is not None + index = dct['index'] + sh = scan.meta['shape'] + psize = scan.meta['psize'] + + + dv = View(container=self.ptycho.diff, + accessrule={'shape': sh, + 'psize': psize, + 'coord': 0.0, + 'storageID': scan.diff.ID, + 'layer': index, + 'active': active}) + + diff_views.append(dv) + + mv = View(container=self.ptycho.mask, + accessrule={'shape': sh, + 'psize': psize, + 'coord': 0.0, + 'storageID': scan.mask.ID, + 'layer': index, + 'active': active}) + + mask_views.append(mv) + + pos = dct.get('position') + if pos is None: + logger.warning('No position set to scan point %d of scan %s' + % (index, label)) + + positions.append(pos) + + # Now we should have the right views to these storages. Let them + # reformat(), which creates the right sizes and the datalist access + scan.diff.reformat() + scan.mask.reformat() + # parallel.barrier() + + for dct in dp['iterable']: + parallel.barrier() + if not dct['data'] is None: + continue + + data = dct['data'] + idx = dct['index'] + + scan.diff.data[scan.diff.layermap.index(idx)][:] = data + scan.mask.data[scan.mask.layermap.index(idx)][:] = dct.get( + 'mask', np.ones_like(data)) + # del dct['data'] + + scan.diff.nlayers = parallel.MPImax(scan.diff.layermap) + 1 + scan.mask.nlayers = parallel.MPImax(scan.mask.layermap) + 1 + # Empty iterable buffer + # scan.iterable = [] + scan.new_positions = positions + scan.new_diff_views = diff_views + scan.new_mask_views = mask_views + scan.diff_views += diff_views + scan.mask_views += mask_views + + self._update_stats(scan) + def new_data(self): """ Get all new diffraction patterns and create all views and pods diff --git a/ptypy/utils/misc.py b/ptypy/utils/misc.py index 4f54c7d0f..69547f2e4 100644 --- a/ptypy/utils/misc.py +++ b/ptypy/utils/misc.py @@ -10,10 +10,111 @@ import os import numpy as np from functools import wraps +from collections import OrderedDict +from collections import namedtuple __all__ = ['str2int','str2range',\ 'complex_overload','expect2','expect3',\ - 'keV2m','keV2nm','nm2keV', 'clean_path','unique_path'] + 'keV2m','keV2nm','nm2keV', 'clean_path','unique_path','Table'] + + + +class Table(object): + """ + Basic table implemented using numpy.recarray + Ideally subclassed to be used with faster or more + flexible databases. + """ + def __init__(self,dct,name='pods'): + self._table_name = name + self._record_factory_from_dict(dct) + + def _record_factory_from_dict(self,dct,suffix='_record'): + self._record_factory = namedtuple(self._table_name+suffix,dct.keys()) + self._record_default = self._record_factory._make(dct.values()) + self._record_dtype = [np.array(v).dtype for v in self._record_default] + + def new_table(self, records = 0): + r = self._record_default + dtype = zip(r._fields,self._record_dtype) + self._table = np.array([tuple(self._record_default)] * records,dtype) + + def new_fields(self,**kwargs): + """ + Add fields (columns) to the table. This is probably slow. + """ + # save old stuff + addition = OrderedDict(**kwargs) + t = self._table + records = self.pull_records() + new = self._record_factory._asdict(self._record_default) + new.update(addition) + self._record_factory_from_dict(new) + self.new_table() + a = tuple(addition.values()) + self.add_records( [r + a for r in records] ) + + + def pull_records(self,record_ids=None): + if record_ids is None: + return map(self._record_factory._make, self._table) + else: + return map(self._record_factory._make, self._table[record_ids]) + + def add_records(self,records): + """ Add records at the end of the table. """ + start = len(T._table) + stop = len(records)+start + record_ids = range(start,stop) + self._table.resize((len(T._table)+len(records),)) + self._table[start:stop]=records + + def insert_records(self,records, record_ids): + """ Insert records and overwrite existing content. """ + self._table[record_ids]=records + + def select_func(self,func,fields=None): + """ + Find all records where search function `func` evaluates True. + Arguments to the function are selected by `fields`. + The search function will always receive the record_id as first argument. + """ + a = range(len(self._table)) + if fields is None: + res = [n for n in a if func(a)] + else: + t = self._table[fields].T # pretty inefficient. Is like a dual transpose + res =[n for n,rec in zip(a,t) if func(n, *rec)] + + return np.array(res) + + def select_range(self,field,low,high): + """ + Find all records whose values are in the range [`low`,`high`] for + the field entry `field`. Should be a numerical value. + """ + t = self._table + record_ids = np.argwhere(t[field] >=low and t[field] <=high).squeeze(-1) + return record_ids + + def select_match(self,field,match): + """ + Find all records whose values are in the range [`low`,`high`] for + the field entry `field` + """ + t = self._table + record_ids = np.argwhere(t[field] == match).squeeze(-1) + return record_ids + + def modify_add(self,record_ids,**kwargs): + """ + Take selected record ids and overwrite fields with values + **kwargs. + """ + old_records = self.pull_records(record_ids) + recs = [r._replace(**kwargs) for r in old_records] + self.add_records(recs) + def str2index(s): """ diff --git a/ptypy/utils/validator.py b/ptypy/utils/validator.py index 9e060e2fc..f1f8cd106 100644 --- a/ptypy/utils/validator.py +++ b/ptypy/utils/validator.py @@ -123,6 +123,7 @@ def _is_child(self): Type check """ return type(self.parent) is self.__class__ + def _parse_info(self,info=None): if info is not None: @@ -402,8 +403,189 @@ def make_doc_rst(self,prst, use_root=True): prst.write('\n') prst.close() +class ArgParseParameter(Parameter): + DEFAULTS = OrderedDict( + default = 'Default value for parameter.', + help = 'A small docstring for command line parsing (required).', + choices = 'If parameter is list of choices, these are listed here.' + ) + def __init__(self, *args,**kwargs): + + info = self.DEFAULTS.copy() + ninfo = kwargs.get('info') + if ninfo is not None: + info.update(ninfo) + + kwargs['info'] = info + + super(ArgParseParameter, self).__init__(*args,**kwargs) + + @property + def help(self): + """ + Short descriptive explanation of parameter + """ + return self.options.get('help', '') + + @property + def default(self): + """ + Returns default as a Python type + """ + default = str(self.options.get('default', '')) + + if not default: + return None + else: + return self.eval(default) + + + def eval(self,val): + """ + A more verbose wrapper around `ast.literal_eval` + """ + try: + return ast.literal_eval(val) + except ValueError as e: + msg = e.message+". could not read %s for parameter %s" % (val,self.name) + raise ValueError(msg) + + @property + def choices(self): + """ + If parameter is a list of choices, these are listed here. + """ + # choices is an evaluable list + c = self.options.get('choices', '') + #print c, self.name + if str(c)=='': + c=None + else: + try: + c = ast.literal_eval(c.strip()) + except SyntaxError('Evaluating `choices` %s for parameter %s failed' %(str(c),self.name)): + c = None + + return c + + + def make_default(self, depth=1): + """ + Creates a default parameter structure, from the loaded parameter + descriptions in this module + + Parameters + ---------- + depth : int + The depth in the structure to which all sub nodes are expanded + All nodes beyond depth will be returned as empty dictionaries + + Returns + ------- + pars : dict + A parameter branch as nested dicts. + + Examples + -------- + >>> from ptypy import parameter + >>> print parameter.children['io'].make_default() + """ + out = {} + if depth<=0: + return out + for name,child in self.children.iteritems(): + if child.children and child.default is None: + out[name] = child.make_default(depth=depth-1) + else: + out[name] = child.default + return out + + def _get_type_argparse(self): + """ + Returns type or callable that the argparser uses for + reading in cmd line argements. + """ + return type(self.default) + + def add2argparser(self,parser=None, prefix='', + excludes=('scans','engines'), mode = 'add'): + + sep = self.separator + + pd = self + + argsep='-' + + if parser is None: + from argparse import ArgumentParser + description = """ + Parser for %s + Doc: %s + """ % (pd.name, pd.help) + parser = ArgumentParser(description=description) + + # overload the parser + if not hasattr(parser,'_aux_translator'): + parser._aux_translator={} + + + # get list of descendants and remove separator + ndesc = dict((k.replace(sep,argsep),v) for k,v in self.descendants.items()) + + + groups = {} + + for name, pd in ndesc.items(): + if pd.name in excludes: continue + if pd.children: + groups[name] = parser.add_argument_group(title=prefix+name, description=pd.help) + + for name, pd in ndesc.iteritems(): + + if pd.name in excludes: continue + up = argsep.join(name.split(argsep)[:-1]) + # recursive part + parse = groups.get(up,parser) + + """ + # this should be part of PDesc I guess. + typ = type(pd.default) + + for t in pd.type: + try: + typ= eval(t) + except BaseException: + continue + if typ is not None: + break + + if typ is None: + u.verbose.logger.debug('Failed evaluate type strings %s of parameter %s in python' % (str(pd.type),name)) + return parser + + if type(typ) is not type: + u.verbose.logger.debug('Type %s of parameter %s is not python type' % (str(typ),name)) + return parser + """ + typ = pd._get_type_argparse() + + if typ is bool: + flag = '--no-'+name if pd.value else '--'+name + action='store_false' if pd.value else 'store_true' + parse.add_argument(flag, dest=name, action=action, + help=pd.shortdoc ) + else: + d = pd.default + defstr = d.replace('%(','%%(') if str(d)==d else str(d) + parse.add_argument('--'+name, dest=name, type=typ, default = pd.default, choices=pd.choices, + help=pd.help +' (default=%s)' % defstr) + + parser._aux_translator[name] = pd + + return parser -class EvalParameter(Parameter): + +class EvalParameter(ArgParseParameter): """ Small class to store all attributes of a ptypy parameter @@ -428,7 +610,7 @@ class EvalParameter(Parameter): default = 'Default value for parameter (required).', help = 'A small docstring for command line parsing (required).', type = 'Komma separated list of acceptable types.', - doc = 'A small docstring for command line parsing.', + doc = 'A longer explanation for the online docs.', uplim = 'Upper limit for scalar / integer values', lowlim = 'Lower limit for scalar / integer values', choices = 'If parameter is list of choices, these are listed here.', @@ -443,17 +625,7 @@ def __init__(self, *args,**kwargs): super(EvalParameter, self).__init__(*args,**kwargs) @property - def type(self): - - types = self.options.get('type', None) - tm = self._typemap - if types is not None: - types = [tm[x.strip()] if x.strip() in tm else x.strip() for x in types.split(',')] - - return types - - @property - def default(self,default='', check=False): + def default(self): """ Returns default as a Python type """ @@ -476,8 +648,17 @@ def default(self,default='', check=False): else: out = default - return out + return out + @property + def type(self): + + types = self.options.get('type', None) + tm = self._typemap + if types is not None: + types = [tm[x.strip()] if x.strip() in tm else x.strip() for x in types.split(',')] + + return types @property def limits(self): @@ -495,31 +676,6 @@ def limits(self): return lowlim,uplim - @property - def choices(self): - """ - If parameter is a list of choices, these are listed here. - """ - # choices is an evaluable list - c = self.options.get('choices', '') - #print c, self.name - if str(c)=='': - c=None - else: - try: - c = ast.literal_eval(c.strip()) - except SyntaxError('Evaluating `choices` %s for parameter %s failed' %(str(c),self.name)): - c = None - - return c - - @property - def help(self): - """ - Short descriptive explanation of parameter - """ - return self.options.get('help', '') - @property def doc(self): """ @@ -596,37 +752,6 @@ def check(self, pars, walk): out[ep] = val return out - - def make_default(self, depth=1): - """ - Creates a default parameter structure, from the loaded parameter - descriptions in this module - - Parameters - ---------- - depth : int - The depth in the structure to which all sub nodes are expanded - All nodes beyond depth will be returned as empty dictionaries - - Returns - ------- - pars : dict - A parameter branch as nested dicts. - - Examples - -------- - >>> from ptypy import parameter - >>> print parameter.children['io'].make_default() - """ - out = {} - if depth<=0: - return out - for name,child in self.children.iteritems(): - if child.children and child.default is None: - out[name] = child.make_default(depth=depth-1) - else: - out[name] = child.default - return out def validate(self,pars, walk=True, raisecodes=[CODES.FAIL, CODES.INVALID]): """ @@ -892,7 +1017,91 @@ def make_doc(self): #cleanup del level, name, entry_level, pd, entry_pt, entry_dcts """ +def add2argparser(parser=None, entry_point='',root=None,\ + excludes=('scans','engines'), mode = 'add',group=None): + sep = '.' + + pd = parameter_descriptions[entry_point] + + has_children = hasattr(pd,'children') and pd.children is not None + + if root is None: + root = pd.entry_point if has_children else pd.parent.entry_point + + assert root in entry_points_dct.keys() + + if excludes is not None: + # remove leading '.' + lexcludes = [e.strip()[1:] for e in excludes if e.strip().startswith(sep) ] + loc_exclude = [e.split(sep)[0] for e in lexcludes if len(e.split())==1 ] + else: + excludes =['baguette'] #:) + + if pd.name in excludes: return + + if parser is None: + from argparse import ArgumentParser + description = """ + Parser for % %s + Doc: %s + """ % (pd.name, pd.shortdoc) + parser = ArgumentParser(description=description) + + # overload the parser + if not hasattr(parser,'_ptypy_translator'): + parser._ptypy_translator={} + + + # convert to parser variable + name = pd.entry_point.replace(root,'',1).partition(sep)[2].replace('.','-') + + if has_children: + entry = pd.entry_point + if name !='': + ngroup=parser.add_argument_group(title=name, description=None) + else: + ngroup=None + # recursive behavior here + new_excludes = [e[len(entry):] for e in excludes if e.startswith(entry)] + for key,child in pd.children.iteritems(): + _add2argparser(parser, entry_point=child.entry_point,root=root,\ + excludes=excludes,mode = 'add',group=ngroup) + + if name=='': return parser + + parse = parser if group is None else group + + # this should be part of PDesc I guess. + typ = None + for t in pd.type: + try: + typ= eval(t) + except BaseException: + continue + if typ is not None: + break + if typ is None: + u.verbose.logger.debug('Failed evaluate type strings %s of parameter %s in python' % (str(pd.type),name)) + return parser + + if type(typ) is not type: + u.verbose.logger.debug('Type %s of parameter %s is not python type' % (str(typ),name)) + return parser + + elif typ is bool: + flag = '--no-'+name if pd.value else '--'+name + action='store_false' if pd.value else 'store_true' + parse.add_argument(flag, dest=name, action=action, + help=pd.shortdoc ) + else: + d = pd.default + defstr = d.replace('%(','%%(') if str(d)==d else str(d) + parse.add_argument('--'+name, dest=name, type=typ, default = pd.value, choices=pd.choices, + help=pd.shortdoc +' (default=%s)' % defstr) + parser._ptypy_translator[name] = pd + + return parser def create_default_template(filename=None,user_level=0,doc_level=2): """ @@ -975,92 +1184,7 @@ def _format_longdoc(doc): f.write('\n\nPtycho(p,level=5)\n') f.close() -def _add2argparser(parser=None, entry_point='',root=None,\ - excludes=('scans','engines'), mode = 'add',group=None): - - sep = '.' - - pd = parameter_descriptions[entry_point] - - has_children = hasattr(pd,'children') and pd.children is not None - - if root is None: - root = pd.entry_point if has_children else pd.parent.entry_point - - assert root in entry_points_dct.keys() - - if excludes is not None: - # remove leading '.' - lexcludes = [e.strip()[1:] for e in excludes if e.strip().startswith(sep) ] - loc_exclude = [e.split(sep)[0] for e in lexcludes if len(e.split())==1 ] - else: - excludes =['baguette'] #:) - - if pd.name in excludes: return - - if parser is None: - from argparse import ArgumentParser - description = """ - Parser for PDesc %s - Doc: %s - """ % (pd.name, pd.shortdoc) - parser = ArgumentParser(description=description) - - # overload the parser - if not hasattr(parser,'_ptypy_translator'): - parser._ptypy_translator={} - - - # convert to parser variable - name = pd.entry_point.replace(root,'',1).partition(sep)[2].replace('.','-') - - if has_children: - entry = pd.entry_point - if name !='': - ngroup=parser.add_argument_group(title=name, description=None) - else: - ngroup=None - # recursive behavior here - new_excludes = [e[len(entry):] for e in excludes if e.startswith(entry)] - for key,child in pd.children.iteritems(): - _add2argparser(parser, entry_point=child.entry_point,root=root,\ - excludes=excludes,mode = 'add',group=ngroup) - - if name=='': return parser - - parse = parser if group is None else group - - # this should be part of PDesc I guess. - typ = None - for t in pd.type: - try: - typ= eval(t) - except BaseException: - continue - if typ is not None: - break - if typ is None: - u.verbose.logger.debug('Failed evaluate type strings %s of parameter %s in python' % (str(pd.type),name)) - return parser - - if type(typ) is not type: - u.verbose.logger.debug('Type %s of parameter %s is not python type' % (str(typ),name)) - return parser - - elif typ is bool: - flag = '--no-'+name if pd.value else '--'+name - action='store_false' if pd.value else 'store_true' - parse.add_argument(flag, dest=name, action=action, - help=pd.shortdoc ) - else: - d = pd.default - defstr = d.replace('%(','%%(') if str(d)==d else str(d) - parse.add_argument('--'+name, dest=name, type=typ, default = pd.value, choices=pd.choices, - help=pd.shortdoc +' (default=%s)' % defstr) - parser._ptypy_translator[name] = pd - - return parser if __name__ =='__main__': from ptypy import utils as u From 43f4241220a265b031b9d735bb4f69055aaf1fb4 Mon Sep 17 00:00:00 2001 From: bjoernenders Date: Wed, 21 Jun 2017 10:31:25 +0200 Subject: [PATCH 021/363] First attempt to refactor model --- ptypy/utils/misc.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/ptypy/utils/misc.py b/ptypy/utils/misc.py index 69547f2e4..be5f0f530 100644 --- a/ptypy/utils/misc.py +++ b/ptypy/utils/misc.py @@ -63,12 +63,14 @@ def pull_records(self,record_ids=None): def add_records(self,records): """ Add records at the end of the table. """ - start = len(T._table) + start = len(self._table) stop = len(records)+start record_ids = range(start,stop) - self._table.resize((len(T._table)+len(records),)) + self._table.resize((len(self._table)+len(records),)) self._table[start:stop]=records + return record_ids + def insert_records(self,records, record_ids): """ Insert records and overwrite existing content. """ self._table[record_ids]=records From 91b1260c9a3646b8abb8f2006b19ebfc64e6f287 Mon Sep 17 00:00:00 2001 From: bjoernenders Date: Thu, 22 Jun 2017 11:31:14 +0200 Subject: [PATCH 022/363] Fixed small bug --- ptypy/core/manager.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py index 4c39f54ab..14dd56536 100644 --- a/ptypy/core/manager.py +++ b/ptypy/core/manager.py @@ -34,11 +34,8 @@ FType = np.float64 CType = np.complex128 -<<<<<<< HEAD -__all__ = ['DEFAULT', 'ModelManager'] -======= + __all__ = ['DEFAULT', 'ModelManager', 'ScanModel'] ->>>>>>> origin/new_scan_structure DESCRIPTION = u.Param() From c8fa0551b018d55d811848fdbbf183c4894b0c5b Mon Sep 17 00:00:00 2001 From: Pierre Thibault Date: Thu, 22 Jun 2017 11:04:58 +0100 Subject: [PATCH 023/363] Bug fix for new scan structure --- ptypy/core/data.py | 46 ----------------------------- ptypy/core/manager.py | 4 ++- ptypy/experiment/__init__.py | 56 ++++++++++++++++++++++++++++++++++-- 3 files changed, 57 insertions(+), 49 deletions(-) diff --git a/ptypy/core/data.py b/ptypy/core/data.py index b153bfd02..c29ab6e9e 100644 --- a/ptypy/core/data.py +++ b/ptypy/core/data.py @@ -21,7 +21,6 @@ from ptypy import utils as u from ptypy import io from ptypy import resources - from ptypy.experiment import PtyScanTypes from ptypy.core import geometry from ptypy.utils.verbose import logger, log, headerline from ..utils import parallel @@ -34,7 +33,6 @@ from .. import utils as u from .. import io from .. import resources - from ..experiment import PtyScanTypes from ..utils.verbose import logger, log, headerline from ..utils import parallel from ptypy import resources @@ -1428,50 +1426,6 @@ def load(self, indices): return raw, {}, {} - -def makePtyScan(scan_pars, scanmodel=None): - """ - Factory for PtyScan object. Return an instance of the appropriate PtyScan subclass based on the - input parameters. - - Parameters - ---------- - scan_pars: dict or Param - Input parameters according to :py:data:`.scan.data`. - - scanmodel: ScanModel object - FIXME: This seems to be needed for simulations but broken for now. - """ - - # Extract information on the type of object to build - pars = scan_pars.data - source = pars.source - recipe = pars.get('recipe', {}) - - if source is not None: - source = source.lower() - - if source in PtyScanTypes: - ps_obj = PtyScanTypes[source] - logger.info('Scan will be prepared with the recipe "%s"' % source) - ps_instance = ps_obj(pars, recipe=recipe) - elif source.endswith('.ptyd') or source.endswith('.pty') or str(source) == 'file': - ps_instance = PtydScan(pars, source=source) - elif source == 'test': - ps_instance = MoonFlowerScan(pars) - elif source == 'sim': - from ..simulations import SimScan - logger.info('Scan will simulated') - ps_instance = SimScan(pars, scanmodel) - elif source == 'empty' or source is None: - pars.recipe = None - logger.warning('Generating dummy PtyScan - This label will source only zeros as data') - ps_instance = PtyScan(pars) - else: - raise RuntimeError('Could not manage source "%s"' % str(source)) - - return ps_instance - if __name__ == "__main__": u.verbose.set_level(3) MS = MoonFlowerScan(num_frames=100) diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py index 14dd56536..1d6455a1c 100644 --- a/ptypy/core/manager.py +++ b/ptypy/core/manager.py @@ -112,6 +112,8 @@ def __init__(self, ptycho=None, specific_pars=None, generic_pars=None, label=Non Input parameters (see :py:attr:`DEFAULT`) If None uses defaults """ + from .. import experiment + # Update parameter structure p = u.Param(self.DEFAULT.copy()) p.update(generic_pars, in_place_depth=4) @@ -131,7 +133,7 @@ def __init__(self, ptycho=None, specific_pars=None, generic_pars=None, label=Non self.Cmask = ptycho.mask # Create Associated PtyScan object - self.ptyscan = data.makePtyScan(self.p) + self.ptyscan = experiment.makePtyScan(self.p.data) # Initialize instance attributes self.mask = None diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py index 4e9d47592..524e0113e 100644 --- a/ptypy/experiment/__init__.py +++ b/ptypy/experiment/__init__.py @@ -14,8 +14,6 @@ :copyright: Copyright 2014 by the PTYPY team, see AUTHORS. :license: GPLv2, see LICENSE for details. """ -#from data_structure import * - # Import instrument-specific modules #import cSAXS from I13_ffp import I13ScanFFP @@ -31,6 +29,13 @@ from UCL import UCLLaserScan from ALS_5321 import ALS5321Scan +if __name__ == "__main__": + from ptypy.utils.verbose import logger + from ptypy.core.data import PtydScan, MoonFlowerScan, PtyScan +else: + from ..utils.verbose import logger + from ..core.data import PtydScan, MoonFlowerScan, PtyScan + PtyScanTypes = dict( i13dls_ffp = I13ScanFFP, i13dls_nfp = I13ScanNFP, @@ -45,3 +50,50 @@ laser_ucl = UCLLaserScan, als5321 = ALS5321Scan, ) + +def makePtyScan(pars, scanmodel=None): + """ + Factory for PtyScan object. Return an instance of the appropriate PtyScan subclass based on the + input parameters. + + Parameters + ---------- + pars: dict or Param + Input parameters according to :py:data:`.scan.data`. + + scanmodel: ScanModel object + FIXME: This seems to be needed for simulations but broken for now. + """ + + if __name__ == "__main__": + from ptypy.experiment import PtyScanTypes + else: + from ..experiment import PtyScanTypes + + # Extract information on the type of object to build + source = pars.source + recipe = pars.get('recipe', {}) + + if source is not None: + source = source.lower() + + if source in PtyScanTypes: + ps_obj = PtyScanTypes[source] + logger.info('Scan will be prepared with the recipe "%s"' % source) + ps_instance = ps_obj(pars, recipe=recipe) + elif source.endswith('.ptyd') or source.endswith('.pty') or str(source) == 'file': + ps_instance = PtydScan(pars, source=source) + elif source == 'test': + ps_instance = MoonFlowerScan(pars) + elif source == 'sim': + from ..simulations import SimScan + logger.info('Scan will simulated') + ps_instance = SimScan(pars, scanmodel) + elif source == 'empty' or source is None: + pars.recipe = None + logger.warning('Generating dummy PtyScan - This label will source only zeros as data') + ps_instance = PtyScan(pars) + else: + raise RuntimeError('Could not manage source "%s"' % str(source)) + + return ps_instance From b8e8d7f0d95370c34d223cccae5001492f7823b1 Mon Sep 17 00:00:00 2001 From: Pierre Thibault Date: Thu, 22 Jun 2017 11:17:29 +0100 Subject: [PATCH 024/363] Remove datasource from Ptycho object --- ptypy/core/ptycho.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py index 69bdf531f..94cc2b1bc 100644 --- a/ptypy/core/ptycho.py +++ b/ptypy/core/ptycho.py @@ -175,9 +175,6 @@ def __init__(self, pars=None, level=2, **kwargs): self.mask = None self.modelm = None - # Data - self.datasource = None - # Communication self.interactor = None self.plotter = None @@ -805,10 +802,9 @@ def save_run(self, alt_file=None, kind='minimal', force_overwrite=True): if kind == 'fullflat': self.interactor.stop() - logger.info('Deleting references for interactor, ' - 'datasource and engines.') + logger.info('Deleting references for interactor ' + ' and engines.') del self.interactor - del self.datasource del self.paths try: del self.engines From caa63d2fdd9f4dc3b18625d224d9f0091c36aef5 Mon Sep 17 00:00:00 2001 From: Pierre Thibault Date: Thu, 22 Jun 2017 11:36:04 +0100 Subject: [PATCH 025/363] Bug fix - scan.pars renamed to scan.p --- ptypy/core/manager.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py index 1d6455a1c..5092caa7f 100644 --- a/ptypy/core/manager.py +++ b/ptypy/core/manager.py @@ -158,7 +158,7 @@ def __init__(self, ptycho=None, specific_pars=None, generic_pars=None, label=Non def new_data(self): """ Feed data from ptyscan object. - :return: + :return: None if no data is available, True otherwise. """ # Initialize if that has not been done yet @@ -323,6 +323,8 @@ def new_data(self): self._update_stats() + return True + def _update_stats(self): """ (Re)compute the statistics for the data stored in the scan. @@ -688,16 +690,16 @@ def _initialize_object(self, object_ids): logger.info('Initializing object storage %s using scan %s.' % (oid, scan.label)) - sample_pars = scan.pars.sample + sample_pars = scan.p.sample if type(sample_pars) is u.Param: # Deep copy sample_pars = sample_pars.copy(depth=10) # Quickfix spectral contribution. - if (scan.pars.coherence.object_dispersion + if (scan.p.coherence.object_dispersion not in [None, 'achromatic'] - and scan.pars.coherence.probe_dispersion + and scan.p.coherence.probe_dispersion in [None, 'achromatic']): logger.info( 'Applying spectral distribution input to object fill.') @@ -765,7 +767,7 @@ def _create_pods(self, new_scans): # Object and probe position pos_pr = u.expect2(0.0) - pos_obj = positions[i] if 'empty' not in scan.pars.tags else 0.0 + pos_obj = positions[i] if 'empty' not in scan.p.tags else 0.0 t, object_id = self.sharing_rules(obj_label, index) probe_id, t = self.sharing_rules(pr_label, index) From 971fc7769a5b04f03f3b0dc89e430295a3173ec4 Mon Sep 17 00:00:00 2001 From: Pierre Thibault Date: Thu, 22 Jun 2017 11:57:23 +0100 Subject: [PATCH 026/363] Bug fix: ptycho needs to be passed to ScanManager --- ptypy/core/manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py index 5092caa7f..c930ff197 100644 --- a/ptypy/core/manager.py +++ b/ptypy/core/manager.py @@ -450,7 +450,7 @@ def __init__(self, ptycho, pars=None, scans=None, **kwargs): # Create scan objects from information already available for label, scan_pars in self.scans_pars.iteritems(): - self.scans[label] = ScanModel(specific_pars=scan_pars, generic_pars=self.p, label=label) + self.scans[label] = ScanModel(ptycho=self.ptycho, specific_pars=scan_pars, generic_pars=self.p, label=label) # Sharing dictionary that stores sharing behavior self.sharing = {'probe_ids': {}, 'object_ids': {}} From b61924947be8e9a4711eb14eafaff5ebc18bf2f9 Mon Sep 17 00:00:00 2001 From: bjoernenders Date: Thu, 29 Jun 2017 10:09:36 +0200 Subject: [PATCH 027/363] small changes to validator, started parameter transformation in illumination.py --- ptypy/core/illumination.py | 170 +++++++++++++++++++++++++++++++++++++ ptypy/utils/validator.py | 38 ++++----- 2 files changed, 189 insertions(+), 19 deletions(-) diff --git a/ptypy/core/illumination.py b/ptypy/core/illumination.py index 70a7fca6b..d37053cd3 100644 --- a/ptypy/core/illumination.py +++ b/ptypy/core/illumination.py @@ -18,6 +18,176 @@ from .. import resources TEMPLATES = dict() +from io import StringIO + +StringIO( +u""" +[aperture] +help = Beam aperture parameters + +[aperture.central_stop] +help = size of central stop as a fraction of aperture.size +default = None +doc = If not None: places a central beam stop in aperture. The value given here is the fraction of the beam stop compared to `size` +lowlim = 0. +uplim = 1. +userlevel = 1 +type = float + +[aperture.diffuser] +help = Noise in the transparen part of the aperture +default = None +doc = Can be either: + - ``None`` : no noise + - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size) + - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod) +userlevel = 2 +type = tuple + +[aperture.edge] +help = Edge width of aperture (in pixels!) +default = 2.0 +userlevel = 2 + +[aperture.form] +help = One of None, 'rect' or 'circ' +default = circ +doc = One of: + - ``None`` : no aperture, this may be useful for nearfield + - ``'rect'`` : rectangular aperture + - ``'circ'`` : circular aperture +choices = None,'rect','circ' +userlevel = 2 +type = None + +[aperture.offset] +help = Offset between center of aperture and optical axes +default = 0. +doc = May also be a tuple (vertical,horizontal) for size in case of an asymmetric offset +userlevel = 2 +type = float, tuple + +[aperture.size] +lowlim = 0. +help = Aperture width or diameter +default = None +doc = May also be a tuple *(vertical,horizontal)* in case of an asymmetric aperture +userlevel = 0 +type = float + +[diversity] +help = Probe mode(s) diversity parameters +doc = Can be ``None`` i.e. no diversity +validity = +choices = +uplim = +userlevel = +type = Param + +[diversity.noise] +lowlim = +help = Noise in the generated modes of the illumination +default = None +doc = Can be either: + - ``None`` : no noise + - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size) + - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod) +validity = +choices = +uplim = +userlevel = 1 +type = tuple + +[diversity.power] +lowlim = 0.0 +help = Power of modes relative to main mode (zero-layer) +default = 0.1 +uplim = 1.0 +userlevel = 1 +type = tuple, float + +[diversity.shift] +help = Lateral shift of modes relative to main mode +default = None +doc = **[not implemented]** +userlevel = 2 +type = float + +[model] +help = Type of illumination model +default = None +doc = One of: + - ``None`` : model initialitziation defaults to flat array filled with the specified number of photons + - ``'recon'`` : load model from previous reconstruction, see `recon` Parameters + - ``'stxm'`` : Estimate model from autocorrelation of mean diffraction data + - ** : one of ptypys internal image resource strings + - *