python setup.py install --user
@@ -27,7 +27,7 @@
Beamlines
We aim to support as much beamlines as possible.
-To this end, PtyPy supports data from the follwing
+To this end, PtyPy supports data from the following
beamlines:
- I13, Diamond
diff --git a/doc/index.rst b/doc/index.rst
index 441daa799..a59d037ed 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -1,12 +1,12 @@
Welcome Ptychonaut!
===================
-|ptypy| [#pronounciation]_ is a
+|ptypy| [#Enders2016]_ is a
framework for scientific ptychography compiled by
P.Thibault and B. Enders and licensed under the GPLv2 license.
It comprises 7 years of experience in the field of ptychography condensed
-to a veratile python package. The package covers the whole path of
+to a versatile python package. The package covers the whole path of
ptychographic analysis after the actual experiment
- from data management to reconstruction to visualization.
@@ -40,8 +40,8 @@ Highlights
computer displays the reconstruction progress.
-* **Mixed-state** reconstructions of probe and object [Thi2013]_ for
- overcoming partial coherence or related phenomane.
+* **Mixed-state** reconstructions of probe and object [#Thi2013]_ for
+ overcoming partial coherence or related phenomena.
* **On-the-fly** reconstructions (while data is being acquired) using the
the :any:`PtyScan` class in the linking mode :ref:`linking mode`
@@ -68,9 +68,9 @@ Quicklinks
.. rubric:: Footnotes
-.. [#pronounciation] Pronounced *typy*, forget the *p*, as in psychology.
+.. [#Enders2016] B.Enders and P.Thibault, **Proc. R. Soc. A** 472, 20160640 (2016), `doi `_
-.. [Thi2013] P.Thibault and A.Menzel, **Nature** 494, 68 (2013), `doi `_
+.. [#Thi2013] P.Thibault and A.Menzel, **Nature** 494, 68 (2013), `doi `_
.. [#ml] P.Thibault and M.Guizar-Sicairos, **New J. of Phys.** 14, 6 (2012), `doi `_
From 8bba39caa76c07b0e220bc91f6188d2ad528393b Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Wed, 18 Oct 2017 21:47:37 +0100
Subject: [PATCH 140/363] Updates related to DEFAULTs
---
ptypy/core/data.py | 71 +++++++++++++++++++++++++++++++--------
ptypy/engines/__init__.py | 1 -
ptypy/engines/base.py | 2 +-
3 files changed, 58 insertions(+), 16 deletions(-)
diff --git a/ptypy/core/data.py b/ptypy/core/data.py
index 61ed7a606..2954a0d6f 100644
--- a/ptypy/core/data.py
+++ b/ptypy/core/data.py
@@ -28,7 +28,7 @@
from ptypy import resources
from ptypy.utils import parallel
from ptypy.utils.verbose import logger, log, headerline
- from ptypy.utils.descriptor import defaults_tree
+ from ptypy.utils.descriptor import defaults_tree, EvalDescriptor
else:
import geometry
import xy
@@ -37,7 +37,7 @@
from .. import resources
from ..utils import parallel
from ..utils.verbose import logger, log, headerline
- from ..utils.descriptor import defaults_tree
+ from ..utils.descriptor import defaults_tree, EvalDescriptor
PTYD = dict(
# frames, positions
@@ -59,6 +59,7 @@
__all__ = ['PtyScan', 'PTYD', 'PtydScan',
'MoonFlowerScan', 'makePtyScan']
+local_tree = EvalDescriptor('')
@defaults_tree.parse_doc('scan.data')
class PtyScan(object):
@@ -1251,7 +1252,7 @@ def __init__(self, pars=None, source=None, **kwargs):
dfile = pars['dfile']
# Check for conflict
- if str(u.unique_path(source)) == str(u.unique_path(dfile)):
+ if dfile and (str(u.unique_path(source)) == str(u.unique_path(dfile))):
logger.info('Source and Sink files are the same.')
dfile = os.path.splitext(dfile)
dfile = dfile[0] + '_n.' + dfile[1]
@@ -1297,14 +1298,14 @@ def __init__(self, pars=None, source=None, **kwargs):
# Update given parameters when they are None
if not manipulate:
- super(PtydScan, self).__init__(meta, **kwargs)
+ p.update(meta)
else:
- # Overwrite only those set to None
+ # Replace only None entries in p
for k, v in meta.items():
- if p.get(k) is None: # should be replace by 'unset'
+ if p.get(k) is None:
p[k] = v
- # Initialize parent class and fill self
- super(PtydScan, self).__init__(p, **kwargs)
+
+ super(PtydScan, self).__init__(p, **kwargs)
if source_frames is not None:
if self.num_frames is None:
@@ -1415,15 +1416,57 @@ def load(self, indices):
return (out.get(key, {}) for key in ['data', 'positions', 'weights'])
-
+@local_tree.parse_doc('moonflowerscan')
class MoonFlowerScan(PtyScan):
"""
Test PtyScan class producing a romantic ptychographic data set of a moon
illuminating flowers.
+
+ Override parent class default:
+
+ Defaults:
+
+ [shape]
+ type = int, tuple
+ default = 128
+ help = Shape of the region of interest cropped from the raw data.
+ doc = Cropping dimension of the diffraction frame
+ Can be None, (dimx, dimy), or dim. In the latter case shape will be (dim, dim).
+ userlevel = 1
+
+ [center]
+ type = tuple, str
+ default = 'fftshift'
+ help = Center (pixel) of the optical axes in raw data
+ doc = If ``None``, this parameter will be set by :py:data:`~.scan.data.auto_center` or elsewhere
+ userlevel = 1
+
+ [psize]
+ type = float, tuple
+ default = 0.000172
+ help = Detector pixel size
+ doc = Dimensions of the detector pixels (in meters)
+ userlevel = 0
+ lowlim = 0
+
+ [distance]
+ type = float
+ default = 7.19
+ help = Sample to detector distance
+ doc = In meters.
+ userlevel = 0
+ lowlim = 0
+
+ [energy]
+ type = float
+ default = 7.2
+ help = Photon energy of the incident radiation in keV
+ doc =
+ userlevel = 0
+ lowlim = 0
+
"""
- DEFAULT = PtyScan.DEFAULT.copy()
- DEFAULT.update(geometry.Geo.DEFAULT.copy())
RECIPE = u.Param(
# Position distance in fraction of illumination frame
density=0.2,
@@ -1435,9 +1478,9 @@ def __init__(self, pars=None, **kwargs):
"""
Parent pars are for the
"""
- p = geometry.Geo.DEFAULT.copy()
- if pars is not None:
- p.update(pars)
+
+ p = self.DEFAULT.copy(depth=99)
+ p.update(pars)
# Initialize parent class
super(MoonFlowerScan, self).__init__(p, **kwargs)
diff --git a/ptypy/engines/__init__.py b/ptypy/engines/__init__.py
index 6c5ee5832..546b815a8 100644
--- a/ptypy/engines/__init__.py
+++ b/ptypy/engines/__init__.py
@@ -25,7 +25,6 @@
# Supported engines defaults
DEFAULTS = u.Param(
- common=BaseEngine.DEFAULT,
Dummy=dummy.Dummy.DEFAULT,
DM_simple=DM_simple.DM_simple.DEFAULT,
DM=DM.DM.DEFAULT,
diff --git a/ptypy/engines/base.py b/ptypy/engines/base.py
index eb22d45d6..c15c71374 100644
--- a/ptypy/engines/base.py
+++ b/ptypy/engines/base.py
@@ -25,7 +25,7 @@
error=np.zeros((3,))
)
-@defaults_tree.parse_doc('engine.common')
+
class BaseEngine(object):
"""
Base reconstruction engine.
From 39ec3115b82bfb60c64091a68a0f443847ca03c0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 19 Oct 2017 12:16:53 +0200
Subject: [PATCH 141/363] Started migrating PtyScan subclasses
---
ptypy/core/data.py | 13 ++++--
ptypy/core/manager.py | 2 +-
ptypy/core/ptycho.py | 60 +++++++++++--------------
ptypy/engines/base.py | 1 +
ptypy/experiment/__init__.py | 86 ++++++++++++------------------------
5 files changed, 67 insertions(+), 95 deletions(-)
diff --git a/ptypy/core/data.py b/ptypy/core/data.py
index 2954a0d6f..09a795e2f 100644
--- a/ptypy/core/data.py
+++ b/ptypy/core/data.py
@@ -37,7 +37,7 @@
from .. import resources
from ..utils import parallel
from ..utils.verbose import logger, log, headerline
- from ..utils.descriptor import defaults_tree, EvalDescriptor
+ from ..utils.descriptor import defaults_tree
PTYD = dict(
# frames, positions
@@ -59,9 +59,7 @@
__all__ = ['PtyScan', 'PTYD', 'PtydScan',
'MoonFlowerScan', 'makePtyScan']
-local_tree = EvalDescriptor('')
-@defaults_tree.parse_doc('scan.data')
class PtyScan(object):
"""
PtyScan: A single ptychography scan, created on the fly or read from file.
@@ -1416,7 +1414,7 @@ def load(self, indices):
return (out.get(key, {}) for key in ['data', 'positions', 'weights'])
-@local_tree.parse_doc('moonflowerscan')
+@defaults_tree.parse_doc('scandata.MoonFlowerScan')
class MoonFlowerScan(PtyScan):
"""
Test PtyScan class producing a romantic ptychographic data set of a moon
@@ -1426,6 +1424,12 @@ class MoonFlowerScan(PtyScan):
Defaults:
+ [num_frames]
+ default = 100
+ type = int
+ help = Number of frames to simulate
+ doc =
+
[shape]
type = int, tuple
default = 128
@@ -1541,6 +1545,7 @@ def load(self, indices):
return raw, {}, {}
+
if __name__ == "__main__":
u.verbose.set_level(3)
MS = MoonFlowerScan(num_frames=100)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 2c7995736..b00067202 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -42,7 +42,7 @@
@defaults_tree.parse_doc('scanmodel.Full')
class ScanModel(object):
"""
- Manage a single scan model (data, illumination, geometry, ...)
+ Manage a single scan model (sharing, coherence, propagation, ...)
Defaults:
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index f7f696e59..03199515a 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -110,29 +110,6 @@ class Ptycho(Base):
help = Start an ipython kernel for debugging
doc = Start an ipython kernel for debugging.
- [scans]
- default = None
- type = Param
- help = Container for instances of scan parameters
- doc =
-
- [scans.*]
- default = @scan
- type = @scan
- help = Wildcard entry for list of scans to load. See :py:data:`scan`
-
- [engines]
- default = None
- type = Param
- help = Container for instances of engine parameters
- doc =
-
- [engines.*]
- type = @engine.*
- default = @engine.DM
- help = Wildcard entry for list of engines to run. See :py:data:`engine`
- doc = The value of engines.*.name is used to choose among the available engines.
-
[io]
default = None
type = Param
@@ -230,28 +207,45 @@ class Ptycho(Base):
doc = Switch to request the production of a movie from the dumped plots at the end of the
reconstruction.
- [scan]
+ [scans]
default = None
type = Param
help = Container for instances of scan parameters
doc =
- #[model]
- #default =
- #type = @scanmodel.Vanilla, @scanmodel.Full
- #help = Physical scan model
- #doc = The value of model.name is used to choose among the available models.
+ [scans.*]
+ default = @scan
+ type = @scan
+ help = Wildcard entry for list of scans to load. See :py:data:`scan`
- [engine]
+ [engines]
default = None
type = Param
- help = Default engine parameters
+ help = Container for instances of engine parameters
doc =
- [geometry]
+ [engines.*]
+ default = @engine.DM
+ type = @engine.*
+ help = Wildcard entry for list of engines to run. See :py:data:`engine`
+ doc = The value of engines.*.name is used to choose among the available engines.
+
+ [scan]
default = None
type = Param
- help = Defaults for available geometry descriptions
+ help = Template for scan.* instances
+ doc =
+
+ [scan.data]
+ default = @scandata.MoonFlowerScan
+ type = @scandata.*
+ help = Link to container for data preparation
+ doc =
+
+ [scan.model]
+ default = @scanmodel.Full
+ type = @scanmodel.*
+ help = Link to container for data interpretation model
doc =
"""
diff --git a/ptypy/engines/base.py b/ptypy/engines/base.py
index c15c71374..22587370c 100644
--- a/ptypy/engines/base.py
+++ b/ptypy/engines/base.py
@@ -26,6 +26,7 @@
)
+@defaults_tree.parse_doc('engine.common')
class BaseEngine(object):
"""
Base reconstruction engine.
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 399bb3f84..cef75e746 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -18,19 +18,19 @@
# Import instrument-specific modules
#import cSAXS
-from I13_ffp import I13ScanFFP
-from I13_nfp import I13ScanNFP
-from DLS import DlsScan
-from I08 import I08Scan
-from savu import Savu
-from plugin import makeScanPlugin
-from ID16Anfp import ID16AScan
-from AMO_LCLS import AMOScan
-from DiProI_FERMI import DiProIFERMIScan
-from optiklabor import FliSpecScanMultexp
-from UCL import UCLLaserScan
-from nanomax import NanomaxStepscanNov2016, NanomaxStepscanMay2017, NanomaxFlyscanJune2017
-from ALS_5321 import ALS5321Scan
+# from I13_ffp import I13ScanFFP
+# from I13_nfp import I13ScanNFP
+# from DLS import DlsScan
+# from I08 import I08Scan
+# from savu import Savu
+# from plugin import makeScanPlugin
+# from ID16Anfp import ID16AScan
+# from AMO_LCLS import AMOScan
+# from DiProI_FERMI import DiProIFERMIScan
+# from optiklabor import FliSpecScanMultexp
+# from UCL import UCLLaserScan
+# from nanomax import NanomaxStepscanNov2016, NanomaxStepscanMay2017, NanomaxFlyscanJune2017
+# from ALS_5321 import ALS5321Scan
if __name__ == "__main__":
@@ -40,23 +40,16 @@
from ..utils.verbose import logger
from ..core.data import PtydScan, MoonFlowerScan, PtyScan
-PtyScanTypes = dict(
- i13dls_ffp = I13ScanFFP,
- i13dls_nfp = I13ScanNFP,
- dls = DlsScan,
- i08dls = I08Scan,
- savu = Savu,
- plugin = makeScanPlugin,
- id16a_nfp = ID16AScan,
- amo_lcls = AMOScan,
- diproi_fermi = DiProIFERMIScan,
- fli_spec_multexp = FliSpecScanMultexp,
- laser_ucl = UCLLaserScan,
- nanomaxstepscannov2016 = NanomaxStepscanNov2016,
- nanomaxstepscanmay2017 = NanomaxStepscanMay2017,
- nanomaxflyscanjune2017 = NanomaxFlyscanJune2017,
- als5321 = ALS5321Scan,
-)
+def all_subclasses(cls, names=False):
+ """
+ Helper function for finding all subclasses of a base class.
+ """
+ subs = cls.__subclasses__() + [g for s in cls.__subclasses__()
+ for g in all_subclasses(s)]
+ if names:
+ return [c.__name__ for c in subs]
+ else:
+ return subs
def makePtyScan(pars, scanmodel=None):
"""
@@ -72,35 +65,14 @@ def makePtyScan(pars, scanmodel=None):
FIXME: This seems to be needed for simulations but broken for now.
"""
- if __name__ == "__main__":
- from ptypy.experiment import PtyScanTypes
- else:
- from ..experiment import PtyScanTypes
-
# Extract information on the type of object to build
- source = pars.source
- recipe = pars.get('recipe', {})
-
- if source is not None:
- source = source.lower()
+ name = pars.name
- if source in PtyScanTypes:
- ps_obj = PtyScanTypes[source]
- logger.info('Scan will be prepared with the recipe "%s"' % source)
- ps_instance = ps_obj(pars, recipe=recipe)
- elif source.endswith('.ptyd') or source.endswith('.pty') or str(source) == 'file':
- ps_instance = PtydScan(pars, source=source)
- elif source == 'test':
- ps_instance = MoonFlowerScan(pars)
- elif source == 'sim':
- from ..simulations import SimScan
- logger.info('Scan will simulated')
- ps_instance = SimScan(pars, scanmodel)
- elif source == 'empty' or source is None:
- pars.recipe = None
- logger.warning('Generating dummy PtyScan - This label will source only zeros as data')
- ps_instance = PtyScan(pars)
+ if name in all_subclasses(PtyScan, names=True):
+ ps_class = eval(name)
+ logger.info('Scan will be prepared with the PtyScan subclass "%s"' % name)
+ ps_instance = ps_class(pars)
else:
- raise RuntimeError('Could not manage source "%s"' % str(source))
+ raise RuntimeError('Could not manage source "%s"' % str(name))
return ps_instance
From e9cd9d5d1133a74d2b42aaa47ec7885bc4814bc4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 19 Oct 2017 14:03:11 +0200
Subject: [PATCH 142/363] Migrated PtydScan
---
ptypy/core/data.py | 32 +++++++++++++++-----------------
ptypy/core/manager.py | 7 +++++++
2 files changed, 22 insertions(+), 17 deletions(-)
diff --git a/ptypy/core/data.py b/ptypy/core/data.py
index 09a795e2f..3705278f2 100644
--- a/ptypy/core/data.py
+++ b/ptypy/core/data.py
@@ -57,7 +57,7 @@
__all__ = ['PtyScan', 'PTYD', 'PtydScan',
- 'MoonFlowerScan', 'makePtyScan']
+ 'MoonFlowerScan']
class PtyScan(object):
@@ -173,16 +173,6 @@ class PtyScan(object):
help = Data preparation recipe container
doc = Will be deprecated soon.
- [source]
- type = file
- default = None
- help = Describes where to get the data from.
- doc = Accepted values are:
- - ``'file'``: data will be read from a .ptyd file.
- - any valid recipe name: data will be prepared using the recipe.
- - ``'sim'`` : data will be simulated according to parameters in simulation
- userlevel = 0
-
[label]
type = str
default = None
@@ -1217,22 +1207,30 @@ def _mpi_save_chunk(self, kind='link', chunk=None):
parallel.barrier()
+@defaults_tree.parse_doc('scandata.PtydScan')
class PtydScan(PtyScan):
"""
PtyScan provided by native "ptyd" file format.
+
+ Defaults:
+
+ [source]
+ default = 'scan.ptyd'
+ type = str
+ help = Input ptyd file
+ doc =
+
"""
- def __init__(self, pars=None, source=None, **kwargs):
+ def __init__(self, pars=None, **kwargs):
"""
PtyScan provided by native "ptyd" file format.
-
- :param source: Explicit source file. If not None or 'file',
- the data may get processed depending on user input
-
- :param pars: Input like PtyScan
"""
# Create parameter set
p = self.DEFAULT.copy(99)
+ p.update(pars)
+
+ source = p.source
if source is None or str(source) == 'file':
# This is the case of absolutely no additional work
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index b00067202..3c6d6d81c 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -443,6 +443,13 @@ def _update_stats(self):
'\n--- Scan %(label)s photon report ---\nTotal photons : %(tot).2e \nAverage photons : %(mean).2e\nMaximum photons : %(max).2e\n' % info + '-' * 29)
+@defaults_tree.parse_doc('scanmodel.Vanilla')
+class ScanModel2(object):
+ """
+ Dummy for testing, there must be more than one for validate to react
+ to invalid names.
+ """
+ pass
class ModelManager(object):
From 2d3c386b6946415b1d772d8536fed367aa788098 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 19 Oct 2017 15:25:53 +0200
Subject: [PATCH 143/363] EvalDescriptor.check() was overwriting its own error
codes, so didn't find invalid params
---
ptypy/core/data.py | 6 +++++
ptypy/core/manager.py | 6 +++++
ptypy/engines/DM.py | 6 +++++
ptypy/engines/DM_simple.py | 6 +++++
ptypy/engines/ML.py | 6 +++++
ptypy/engines/dummy.py | 6 +++++
ptypy/engines/ePIE.py | 6 +++++
ptypy/test/util_tests/descriptor_test.py | 14 ++++++----
ptypy/utils/descriptor.py | 34 ++++++++++--------------
9 files changed, 65 insertions(+), 25 deletions(-)
diff --git a/ptypy/core/data.py b/ptypy/core/data.py
index 3705278f2..2de94c547 100644
--- a/ptypy/core/data.py
+++ b/ptypy/core/data.py
@@ -1422,6 +1422,12 @@ class MoonFlowerScan(PtyScan):
Defaults:
+ [name]
+ default = MoonFlowerScan
+ type = str
+ help =
+ doc =
+
[num_frames]
default = 100
type = int
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 3c6d6d81c..4825bd97d 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -46,6 +46,12 @@ class ScanModel(object):
Defaults:
+ [name]
+ default = DM
+ type = str
+ help =
+ doc =
+
[tags]
default = ['dummy']
help = Comma seperated string tags describing the data input
diff --git a/ptypy/engines/DM.py b/ptypy/engines/DM.py
index 8aae4f588..b9770e637 100644
--- a/ptypy/engines/DM.py
+++ b/ptypy/engines/DM.py
@@ -26,6 +26,12 @@ class DM(BaseEngine):
Defaults:
+ [name]
+ default = DM
+ type = str
+ help =
+ doc =
+
[alpha]
default = 1
type = float
diff --git a/ptypy/engines/DM_simple.py b/ptypy/engines/DM_simple.py
index 6866cce19..7a603cdc2 100644
--- a/ptypy/engines/DM_simple.py
+++ b/ptypy/engines/DM_simple.py
@@ -26,6 +26,12 @@ class DM_simple(BaseEngine):
Defaults:
+ [name]
+ default = DM_simple
+ type = str
+ help =
+ doc =
+
[numiter]
default = 123
type = int
diff --git a/ptypy/engines/ML.py b/ptypy/engines/ML.py
index 3c60dcadc..8e2724cf7 100644
--- a/ptypy/engines/ML.py
+++ b/ptypy/engines/ML.py
@@ -29,6 +29,12 @@ class ML(BaseEngine):
Defaults:
+ [name]
+ default = ML
+ type = str
+ help =
+ doc =
+
[ML_type]
default = 'gaussian'
type = str
diff --git a/ptypy/engines/dummy.py b/ptypy/engines/dummy.py
index b767ed82b..ee24c32ad 100644
--- a/ptypy/engines/dummy.py
+++ b/ptypy/engines/dummy.py
@@ -27,6 +27,12 @@ class Dummy(BaseEngine):
Defaults:
+ [name]
+ default = Dummy
+ type = str
+ help =
+ doc =
+
[itertime]
default = .2
type = float
diff --git a/ptypy/engines/ePIE.py b/ptypy/engines/ePIE.py
index 11ec842ac..b69916f68 100644
--- a/ptypy/engines/ePIE.py
+++ b/ptypy/engines/ePIE.py
@@ -41,6 +41,12 @@ class EPIE(BaseEngine):
Defaults:
+ [name]
+ default = ePIE
+ type = str
+ help =
+ doc =
+
[alpha]
default = 1.
type = float
diff --git a/ptypy/test/util_tests/descriptor_test.py b/ptypy/test/util_tests/descriptor_test.py
index cff0e6b98..8e824b85c 100644
--- a/ptypy/test/util_tests/descriptor_test.py
+++ b/ptypy/test/util_tests/descriptor_test.py
@@ -212,7 +212,7 @@ def test_parse_doc_wildcards(self):
"""
root = EvalDescriptor('')
- @root.parse_doc('scans.*')
+ @root.parse_doc('scan.FakeScan')
class FakeScanClass(object):
"""
General info.
@@ -246,6 +246,11 @@ class FakePtychoClass(object):
default = {}
help = Engine container
+ [scans.*]
+ type = @scan.*
+ default = @scan.FakeScan
+ help =
+
[run]
type = str
default = run
@@ -255,7 +260,6 @@ class FakePtychoClass(object):
pass
assert FakeScanClass.DEFAULT == Param({'comment': None, 'energy': 11.4})
- assert FakePtychoClass.DEFAULT == Param({'run': 'run', 'scans': {}})
# a correct param tree
p = Param()
@@ -366,7 +370,7 @@ class FakePtychoClass(object):
help = Container for all engines
[engines.*]
- type = @engine.DM, @engine.ML
+ type = @engine.*
default = @engine.DM
help = Engine wildcard. Defaults to DM
"""
@@ -389,7 +393,7 @@ class FakePtychoClass(object):
p.engines.engine01 = Param()
p.engines.engine01.numiter = 10
out = root.check(p)
- assert out['engines.engine01']['symlink'] == CODES.INVALID
+ assert out['engines.*']['symlink'] == CODES.INVALID
# wrong name
p = Param()
@@ -398,7 +402,7 @@ class FakePtychoClass(object):
p.engines.engine01.name = 'ePIE'
p.engines.engine01.numiter = 10
out = root.check(p)
- assert out['engines.engine01']['symlink'] == CODES.INVALID
+ assert out['engines.*']['symlink'] == CODES.INVALID
if __name__ == "__main__":
diff --git a/ptypy/utils/descriptor.py b/ptypy/utils/descriptor.py
index 26a50fd4f..d2bfe5b25 100644
--- a/ptypy/utils/descriptor.py
+++ b/ptypy/utils/descriptor.py
@@ -843,46 +843,40 @@ def check(self, pars, depth=99):
"""
out = OrderedDict()
for res in self._walk(depth=depth, pars=pars):
+ path = res['d'].path
+ out[path] = {}
# Switch through all possible statuses
if res['status'] == 'ok':
# Check limits
d = res['d']
- val = {'type': CODES.PASS}
+ out[path]['type'] = CODES.PASS
if any([i in d._limtypes for i in d.type]):
lowlim, uplim = d.limits
- # if lowlim is None or pars[res['path']] is None:
- # val['lowlim'] = CODES.UNKNOWN
- # else:
- # val['lowlim'] = CODES.PASS if (pars[res['path']] >= lowlim) else CODES.FAIL
- # if uplim is None or pars[res['path']] is None:
- # val['uplim'] = CODES.UNKNOWN
- # else:
- # val['uplim'] = CODES.PASS if (pars[res['path']] <= uplim) else CODES.FAIL
if lowlim is None or pars[res['path']] is None:
- val['lowlim'] = CODES.PASS
+ out[path]['lowlim'] = CODES.PASS
else:
- val['lowlim'] = CODES.PASS if (pars[res['path']] >= lowlim) else CODES.FAIL
+ out[path]['lowlim'] = CODES.PASS if (pars[res['path']] >= lowlim) else CODES.FAIL
if uplim is None or pars[res['path']] is None:
- val['uplim'] = CODES.PASS
+ out[path]['uplim'] = CODES.PASS
else:
- val['uplim'] = CODES.PASS if (pars[res['path']] <= uplim) else CODES.FAIL
- out[res['path']] = val
+ out[path]['uplim'] = CODES.PASS if (pars[res['path']] <= uplim) else CODES.FAIL
elif res['status'] == 'wrongtype':
# Wrong type
- out[res['path']] = {'type': CODES.INVALID}
+ out[path]['type'] = CODES.INVALID
elif res['status'] == 'noname':
# Symlink name could not be found
- out[res['path']] = {'symlink': CODES.INVALID, 'name': CODES.MISSING}
+ out[path]['symlink'] = CODES.INVALID
+ out[path]['name'] = CODES.MISSING
elif res['status'] == 'nolink':
# Link was not resolved
- out[res['path']] = {'symlink': CODES.INVALID, 'name': CODES.UNKNOWN}
+ out[path]['symlink'] = CODES.INVALID
+ out[path]['name'] = CODES.UNKNOWN
elif res['status'] == 'nochild':
# Parameter entry without corresponding Descriptor
- out[res['path']] = {res['info']: CODES.INVALID}
+ out[path][res['info']] = CODES.INVALID
elif res['status'] == 'nopar':
# Missing parameter entry
- out[res['path']] = {res['info']: CODES.MISSING}
-
+ out[path][res['info']] = CODES.MISSING
return out
def validate(self, pars, raisecodes=(CODES.FAIL, CODES.INVALID)):
From 3e95d63013a34456226ca8335964d528e8ebe682 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 19 Oct 2017 15:57:49 +0200
Subject: [PATCH 144/363] We need to import the experiments module for PtyScan
options to be registered
---
ptypy/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ptypy/__init__.py b/ptypy/__init__.py
index a9bc2439e..ce8fb2f04 100644
--- a/ptypy/__init__.py
+++ b/ptypy/__init__.py
@@ -74,7 +74,7 @@
# Import core modules
import io
-#import experiment
+import experiment
import core
From a84a8634f7becca4a99247c94ab3b4f601de389e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 19 Oct 2017 16:17:26 +0200
Subject: [PATCH 145/363] Migrated the nanomax ptyscan classes as a first step
---
ptypy/experiment/__init__.py | 2 +-
ptypy/experiment/nanomax.py | 266 ++++++++++++++++++++++-------------
2 files changed, 166 insertions(+), 102 deletions(-)
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index cef75e746..3f813706e 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -29,7 +29,7 @@
# from DiProI_FERMI import DiProIFERMIScan
# from optiklabor import FliSpecScanMultexp
# from UCL import UCLLaserScan
-# from nanomax import NanomaxStepscanNov2016, NanomaxStepscanMay2017, NanomaxFlyscanJune2017
+from nanomax import NanomaxStepscanMay2017, NanomaxStepscanNov2016, NanomaxFlyscanJune2017
# from ALS_5321 import ALS5321Scan
diff --git a/ptypy/experiment/nanomax.py b/ptypy/experiment/nanomax.py
index a37a5acee..d20b73f60 100644
--- a/ptypy/experiment/nanomax.py
+++ b/ptypy/experiment/nanomax.py
@@ -7,6 +7,7 @@
import ptypy
from ptypy.core.data import PtyScan
import ptypy.utils as u
+from ptypy.utils.descriptor import defaults_tree
import h5py
import numpy as np
@@ -14,31 +15,72 @@
logger = u.verbose.logger
-# new recipe for this one
-RECIPE = u.Param()
-RECIPE.dataPath = None
-RECIPE.datafile = None
-RECIPE.maskfile = None
-RECIPE.pilatusPath = None
-RECIPE.pilatusPattern = None
-RECIPE.scannr = None
+class NanomaxBase(PtyScan):
+ """
+ Defaults:
+
+ [dataPath]
+ default = None
+ type = str
+ help = Path to folder containing the Sardana master file
+ doc =
+
+ [datafile]
+ default = None
+ type = str
+ help = Sardana master file
+ doc =
+
+ [maskfile]
+ default = None
+ type = str
+ help = Arbitrary mask file
+ doc = Hdf5 file containing an array called 'mask' at the root level.
+
+ [pilatusPath]
+ default = None
+ type = str
+ help = Path to folder containing detector image files
+ doc =
+
+ [pilatusPattern]
+ default = None
+ type = str
+ help = Format string for detector image files
+ doc = A format string with two integer fields, the first holds the scan number while the second holds the image number.
+
+ [scannr]
+ default = None
+ type = int
+ help = Scan number
+ doc =
+ """
+ pass
-class NanomaxStepscanNov2016(PtyScan):
+@defaults_tree.parse_doc('scandata.NanomaxStepscanNov2016')
+class NanomaxStepscanNov2016(NanomaxBase):
"""
Loads Nanomax step scan data in the format of week Nov/Dec 2016
+
+ Defaults:
+
+ [name]
+ default = NanomaxStepscanNov2016
+ type = str
+ help =
+ doc =
+
"""
def __init__(self, pars=None, **kwargs):
-
- p = PtyScan.DEFAULT.copy(depth=10)
- p.recipe = RECIPE.copy()
- p.update(pars, in_place_depth=10)
- super(NanomaxStepscanNov2016, self).__init__(p)
+ self.p = self.DEFAULT.copy(99)
+ self.p.update(pars)
+ super(NanomaxStepscanNov2016, self).__init__(self.p)
def load_positions(self):
- fileName = self.info.recipe.dataPath + self.info.recipe.datafile
- entry = 'entry%d' % self.info.recipe.scannr
+ fileName = self.p.dataPath + self.p.datafile
+ entry = 'entry%d' % self.p.scannr
with h5py.File(fileName, 'r') as hf:
x = np.array(hf.get(entry + '/measurement/samx'))
@@ -50,9 +92,9 @@ def load_positions(self):
def load(self, indices):
raw, weights, positions = {}, {}, {}
- scannr = self.info.recipe.scannr
- path = self.info.recipe.pilatusPath
- filepattern = self.info.recipe.pilatusPattern
+ scannr = self.p.scannr
+ path = self.p.pilatusPath
+ filepattern = self.p.pilatusPattern
if not (path[-1] == '/'):
path += '/'
@@ -74,19 +116,19 @@ def load_weight(self):
frame.
"""
- scannr = self.info.recipe.scannr
- path = self.info.recipe.pilatusPath
- pattern = self.info.recipe.pilatusPattern
+ scannr = self.p.scannr
+ path = self.p.pilatusPath
+ pattern = self.p.pilatusPattern
if not (path[-1] == '/'):
path += '/'
- if self.info.recipe.maskfile:
- with h5py.File(self.info.recipe.maskfile, 'r') as hf:
+ if self.p.maskfile:
+ with h5py.File(self.p.maskfile, 'r') as hf:
mask = np.array(hf.get('mask'))
logger.info("loaded mask, %u x %u, sum %u" %
(mask.shape + (np.sum(mask),)))
else:
- filename = self.info.recipe.dataPath + self.info.recipe.datafile
+ filename = self.p.dataPath + self.p.datafile
with h5py.File(path + pattern % (scannr, 0), 'r') as hf:
data = hf.get('entry_0000/measurement/Pilatus/data')
shape = np.asarray(data[0]).shape
@@ -95,46 +137,65 @@ def load_weight(self):
(mask.shape + (np.sum(mask),)))
return mask
-# new recipe for this one too
-RECIPE = u.Param()
-RECIPE.dataPath = None
-RECIPE.datafile = None
-RECIPE.maskfile = None
-RECIPE.pilatusPath = None
-RECIPE.pilatusPattern = None
-RECIPE.hdfPath = 'entry_0000/measurement/Pilatus/data'
-RECIPE.scannr = None
-RECIPE.xMotorFlipped = None
-RECIPE.yMotorFlipped = None
-RECIPE.xMotorAngle = 0.0
-
-
-class NanomaxStepscanMay2017(PtyScan):
+
+@defaults_tree.parse_doc('scandata.NanomaxStepscanMay2017')
+class NanomaxStepscanMay2017(NanomaxBase):
"""
Loads Nanomax step scan data in the format of May 2017.
+
+ Defaults:
+
+ [name]
+ default = NanomaxStepscanMay2017
+ type = str
+ help =
+ doc =
+
+ [hdfPath]
+ default = 'entry_0000/measurement/Pilatus/data'
+ type = str
+ help = Path to image array within detector hdf5 file
+ doc =
+
+ [xMotorFlipped]
+ default = False
+ type = bool
+ help = Flip detector x positions
+ doc =
+
+ [yMotorFlipped]
+ default = False
+ type = bool
+ help = Flip detector y positions
+ doc =
+
+ [xMotorAngle]
+ default = 0.0
+ type = float
+ help = Angle of the motor x axis relative to the lab x axis
+ doc = Use this if the stage is mounted at an angle around the y axis, the sign doesn't matter as a cos factor is added.
+
"""
def __init__(self, pars=None, **kwargs):
-
- p = PtyScan.DEFAULT.copy(depth=10)
- p.recipe = RECIPE.copy()
- p.update(pars, in_place_depth=10)
- super(NanomaxStepscanMay2017, self).__init__(p)
+ self.p = self.DEFAULT.copy(99)
+ self.p.update(pars)
+ super(NanomaxStepscanMay2017, self).__init__(self.p)
def load_positions(self):
- fileName = self.info.recipe.dataPath + self.info.recipe.datafile
- entry = 'entry%d' % self.info.recipe.scannr
+ fileName = self.p.dataPath + self.p.datafile
+ entry = 'entry%d' % self.p.scannr
xFlipper, yFlipper = 1, 1
- if self.info.recipe.xMotorFlipped:
+ if self.p.xMotorFlipped:
xFlipper = -1
logger.warning("note: x motor is specified as flipped")
- if self.info.recipe.yMotorFlipped:
+ if self.p.yMotorFlipped:
yFlipper = -1
logger.warning("note: y motor is specified as flipped")
# if the x axis is tilted, take that into account.
- xCosFactor = np.cos(self.info.recipe.xMotorAngle / 180.0 * np.pi)
+ xCosFactor = np.cos(self.p.xMotorAngle / 180.0 * np.pi)
logger.info(
"x motor angle results in multiplication by %.2f" % xCosFactor)
@@ -149,16 +210,16 @@ def load_positions(self):
def load(self, indices):
raw, weights, positions = {}, {}, {}
- scannr = self.info.recipe.scannr
- path = self.info.recipe.pilatusPath
- filepattern = self.info.recipe.pilatusPattern
+ scannr = self.p.scannr
+ path = self.p.pilatusPath
+ filepattern = self.p.pilatusPattern
if not (path[-1] == '/'):
path += '/'
data = []
for im in range(self.info.positions_scan.shape[0]):
with h5py.File(path + filepattern % (scannr, im), 'r') as hf:
- dataset = hf.get(self.info.recipe.hdfPath)
+ dataset = hf.get(self.p.hdfPath)
data.append(np.array(dataset)[0])
# pick out the requested indices
@@ -173,23 +234,23 @@ def load_weight(self):
frame.
"""
- scannr = self.info.recipe.scannr
- path = self.info.recipe.pilatusPath
- pattern = self.info.recipe.pilatusPattern
+ scannr = self.p.scannr
+ path = self.p.pilatusPath
+ pattern = self.p.pilatusPattern
if not (path[-1] == '/'):
path += '/'
- filename = self.info.recipe.dataPath + self.info.recipe.datafile
+ filename = self.p.dataPath + self.p.datafile
with h5py.File(path + pattern % (scannr, 0), 'r') as hf:
- data = hf.get(self.info.recipe.hdfPath)
+ data = hf.get(self.p.hdfPath)
shape = np.asarray(data[0]).shape
mask = np.ones(shape)
mask[np.where(data[0] == -2)] = 0
logger.info("took account of the pilatus mask, %u x %u, sum %u" %
(mask.shape + (np.sum(mask),)))
- if self.info.recipe.maskfile:
- with h5py.File(self.info.recipe.maskfile, 'r') as hf:
+ if self.p.maskfile:
+ with h5py.File(self.p.maskfile, 'r') as hf:
mask2 = np.array(hf.get('mask'))
logger.info("loaded additional mask, %u x %u, sum %u" %
(mask2.shape + (np.sum(mask2),)))
@@ -199,36 +260,39 @@ def load_weight(self):
return mask
-# new recipe for this one too
-RECIPE = u.Param()
-RECIPE.dataPath = None
-RECIPE.datafile = None
-RECIPE.maskfile = None
-RECIPE.detFilePath = None
-RECIPE.detFilePattern = None
-RECIPE.detNormalizationFilePattern = None
-RECIPE.detNormalizationIndices = None
-RECIPE.hdfPath = 'entry_0000/measurement/Pilatus/data'
-RECIPE.scannr = None
-RECIPE.xMotorFlipped = None
-RECIPE.yMotorFlipped = None
-RECIPE.xMotorAngle = 0.0
-
-
-class NanomaxFlyscanJune2017(PtyScan):
+
+@defaults_tree.parse_doc('scandata.NanomaxFlyscanJune2017')
+class NanomaxFlyscanJune2017(NanomaxStepscanMay2017):
"""
Loads Nanomax fly scan data in the format of June 2017.
+
+ Defaults:
+
+ [name]
+ default = NanomaxFlyscanJune2017
+ type = str
+ help =
+
+ [detNormalizationFilePattern]
+ default = None
+ type = str
+ help = Format string for detector file containing data over which to normalize
+
+ [detNormalizationIndices]
+ default = None
+ type = str
+ help = Indices over which to normalize
+
"""
def __init__(self, pars=None, **kwargs):
- p = PtyScan.DEFAULT.copy(depth=10)
- p.recipe = RECIPE.copy()
- p.update(pars, in_place_depth=10)
- super(NanomaxFlyscanJune2017, self).__init__(p)
+ self.p = self.DEFAULT.copy(99)
+ self.p.update(pars)
+ super(NanomaxFlyscanJune2017, self).__init__(self.p)
def load_positions(self):
- fileName = self.info.recipe.dataPath + self.info.recipe.datafile
- entry = 'entry%d' % self.info.recipe.scannr
+ fileName = self.p.dataPath + self.p.datafile
+ entry = 'entry%d' % self.p.scannr
x, y = None, None
with h5py.File(fileName, 'r') as hf:
@@ -250,15 +314,15 @@ def load_positions(self):
raise Exception('Something''s wrong with the positions')
y = np.repeat(yall, Nx)
- if self.info.recipe.xMotorFlipped:
+ if self.p.xMotorFlipped:
x *= -1
logger.warning("note: x motor is specified as flipped")
- if self.info.recipe.yMotorFlipped:
+ if self.p.yMotorFlipped:
y *= -1
logger.warning("note: y motor is specified as flipped")
# if the x axis is tilted, take that into account.
- xCosFactor = np.cos(self.info.recipe.xMotorAngle / 180.0 * np.pi)
+ xCosFactor = np.cos(self.p.xMotorAngle / 180.0 * np.pi)
x *= xCosFactor
logger.info(
"x motor angle results in multiplication by %.2f" % xCosFactor)
@@ -269,11 +333,11 @@ def load_positions(self):
def load(self, indices):
raw, weights, positions = {}, {}, {}
- scannr = self.info.recipe.scannr
- path = self.info.recipe.detFilePath
- pattern = self.info.recipe.detFilePattern
- normfile = self.info.recipe.detNormalizationFilePattern
- normind = self.info.recipe.detNormalizationIndices
+ scannr = self.p.scannr
+ path = self.p.detFilePath
+ pattern = self.p.detFilePattern
+ normfile = self.p.detNormalizationFilePattern
+ normind = self.p.detNormalizationIndices
# read the entire dataset
done = False
@@ -283,7 +347,7 @@ def load(self, indices):
try:
with h5py.File(path + pattern % (scannr, line), 'r') as hf:
logger.info('loading data: ' + pattern % (scannr, line))
- dataset = hf.get(self.info.recipe.hdfPath)
+ dataset = hf.get(self.p.hdfPath)
linedata = np.array(dataset)
if normfile:
dtype = linedata.dtype
@@ -292,7 +356,7 @@ def load(self, indices):
logger.info('loading normalization data: ' +
normfile % (scannr, line))
dataset = hf.get(
- self.info.recipe.detNormalizationHdfPath)
+ self.p.detNormalizationHdfPath)
normdata = np.array(dataset)
if not normind:
shape = linedata[0].shape
@@ -326,23 +390,23 @@ def load_weight(self):
frame.
"""
- scannr = self.info.recipe.scannr
- path = self.info.recipe.detFilePath
- pattern = self.info.recipe.detFilePattern
+ scannr = self.p.scannr
+ path = self.p.detFilePath
+ pattern = self.p.detFilePattern
if not (path[-1] == '/'):
path += '/'
- filename = self.info.recipe.dataPath + self.info.recipe.datafile
+ filename = self.p.dataPath + self.p.datafile
with h5py.File(path + pattern % (scannr, 0), 'r') as hf:
- data = hf.get(self.info.recipe.hdfPath)
+ data = hf.get(self.p.hdfPath)
shape = np.asarray(data[0]).shape
mask = np.ones(shape)
mask[np.where(data[0] == -2)] = 0
logger.info("took account of the pilatus mask, %u x %u, sum %u" %
(mask.shape + (np.sum(mask),)))
- if self.info.recipe.maskfile:
- with h5py.File(self.info.recipe.maskfile, 'r') as hf:
+ if self.p.maskfile:
+ with h5py.File(self.p.maskfile, 'r') as hf:
mask2 = np.array(hf.get('mask'))
logger.info("loaded additional mask, %u x %u, sum %u" %
(mask2.shape + (np.sum(mask2),)))
From 84fcd586bcbb7465d448f7a3b0ae8b524460c593 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 19 Oct 2017 17:02:27 +0200
Subject: [PATCH 146/363] Properly migrated MoonFlowerScan
---
ptypy/core/data.py | 64 ++++++++-----------------------
templates/minimal_prep_and_run.py | 8 ++--
2 files changed, 19 insertions(+), 53 deletions(-)
diff --git a/ptypy/core/data.py b/ptypy/core/data.py
index 2de94c547..d4f5010cd 100644
--- a/ptypy/core/data.py
+++ b/ptypy/core/data.py
@@ -167,12 +167,6 @@ class PtyScan(object):
positions available
userlevel = 1
- [recipe]
- type = Param
- default =
- help = Data preparation recipe container
- doc = Will be deprecated soon.
-
[label]
type = str
default = None
@@ -1442,46 +1436,23 @@ class MoonFlowerScan(PtyScan):
Can be None, (dimx, dimy), or dim. In the latter case shape will be (dim, dim).
userlevel = 1
- [center]
- type = tuple, str
- default = 'fftshift'
- help = Center (pixel) of the optical axes in raw data
- doc = If ``None``, this parameter will be set by :py:data:`~.scan.data.auto_center` or elsewhere
- userlevel = 1
-
- [psize]
- type = float, tuple
- default = 0.000172
- help = Detector pixel size
- doc = Dimensions of the detector pixels (in meters)
- userlevel = 0
- lowlim = 0
+ [density]
+ default = 0.2
+ type = float
+ help = Position distance in fraction of illumination frame
- [distance]
+ [photons]
+ default = 1e8
type = float
- default = 7.19
- help = Sample to detector distance
- doc = In meters.
- userlevel = 0
- lowlim = 0
+ help = Total number of photons for Poisson noise
- [energy]
+ [psf]
+ default = 0.
type = float
- default = 7.2
- help = Photon energy of the incident radiation in keV
- doc =
- userlevel = 0
- lowlim = 0
+ help = Point spread function of the detector
"""
- RECIPE = u.Param(
- # Position distance in fraction of illumination frame
- density=0.2,
- photons=1e8,
- psf=0.
- )
-
def __init__(self, pars=None, **kwargs):
"""
Parent pars are for the
@@ -1498,13 +1469,9 @@ def __init__(self, pars=None, **kwargs):
geo_pars = u.Param({k: self.info[k] for k in keys})
geo = geometry.Geo(pars=geo_pars)
- # Recipe specific things
- r = self.RECIPE.copy()
- r.update(self.info.recipe)
-
# Derive scan pattern
pos = u.Param()
- pos.spacing = geo.resolution * geo.shape * r.density
+ pos.spacing = geo.resolution * geo.shape * p.density
pos.steps = np.int(np.round(np.sqrt(self.num_frames))) + 1
pos.extent = pos.steps * pos.spacing
pos.model = 'round'
@@ -1521,10 +1488,11 @@ def __init__(self, pars=None, **kwargs):
# Get probe
moon = resources.moon_pr(self.geo.shape)
- moon /= np.sqrt(u.abs2(moon).sum() / r.photons)
+ moon /= np.sqrt(u.abs2(moon).sum() / p.photons)
self.pr = moon
self.load_common_in_parallel = True
- self.r = r
+
+ self.p = p
def load_positions(self):
return self.pos
@@ -1542,8 +1510,8 @@ def load(self, indices):
self.pr * self.obj[p[k][0]:p[k][0] + s[0],
p[k][1]:p[k][1] + s[1]]))
- if self.r.psf > 0.:
- intensity_j = u.gf(intensity_j, self.r.psf)
+ if self.p.psf > 0.:
+ intensity_j = u.gf(intensity_j, self.p.psf)
raw[k] = np.random.poisson(intensity_j).astype(np.int32)
diff --git a/templates/minimal_prep_and_run.py b/templates/minimal_prep_and_run.py
index d4227b55d..747b5805a 100644
--- a/templates/minimal_prep_and_run.py
+++ b/templates/minimal_prep_and_run.py
@@ -25,14 +25,12 @@
p.scans.MF.data.num_frames = 100
p.scans.MF.data.save = None
-## special recipe paramters for this scan ##
-p.scans.MF.data.recipe = u.Param()
# position distance in fraction of illumination frame
-p.scans.MF.data.recipe.density = 0.2
+p.scans.MF.data.density = 0.2
# total number of photon in empty beam
-p.scans.MF.data.recipe.photons = 1e8
+p.scans.MF.data.photons = 1e8
# Gaussian FWHM of possible detector blurring
-p.scans.MF.data.recipe.psf = 0.
+p.scans.MF.data.psf = 0.
# attach a reconstrucion engine
p.engines = u.Param()
From 1cda9081852a447f54b7755bab76503be39baa7b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 19 Oct 2017 18:00:06 +0200
Subject: [PATCH 147/363] No defaults for scan.data or engines.*, plus a typo
---
ptypy/core/manager.py | 2 +-
ptypy/core/ptycho.py | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 4825bd97d..b45c59d54 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -47,7 +47,7 @@ class ScanModel(object):
Defaults:
[name]
- default = DM
+ default = Full
type = str
help =
doc =
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index 03199515a..1bfc6bde7 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -225,7 +225,7 @@ class Ptycho(Base):
doc =
[engines.*]
- default = @engine.DM
+ default =
type = @engine.*
help = Wildcard entry for list of engines to run. See :py:data:`engine`
doc = The value of engines.*.name is used to choose among the available engines.
@@ -237,7 +237,7 @@ class Ptycho(Base):
doc =
[scan.data]
- default = @scandata.MoonFlowerScan
+ default =
type = @scandata.*
help = Link to container for data preparation
doc =
From c8f76b906771d821dc89091e9c242e0a8a317893 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Fri, 20 Oct 2017 08:34:02 +0200
Subject: [PATCH 148/363] Adapted PtydScan - needs more work
---
ptypy/core/data.py | 6 +++++
templates/minimal_load_and_run.py | 42 +++----------------------------
2 files changed, 9 insertions(+), 39 deletions(-)
diff --git a/ptypy/core/data.py b/ptypy/core/data.py
index d4f5010cd..6fccfd785 100644
--- a/ptypy/core/data.py
+++ b/ptypy/core/data.py
@@ -1208,6 +1208,12 @@ class PtydScan(PtyScan):
Defaults:
+ [name]
+ default = PtydScan
+ type = str
+ help =
+ doc =
+
[source]
default = 'scan.ptyd'
type = str
diff --git a/templates/minimal_load_and_run.py b/templates/minimal_load_and_run.py
index eac8977bf..09f69ac73 100644
--- a/templates/minimal_load_and_run.py
+++ b/templates/minimal_load_and_run.py
@@ -11,49 +11,13 @@
p.verbose_level = 3
p.io = u.Param()
p.io.home = "/tmp/ptypy/"
-p.autosave = None
p.scans = u.Param()
p.scans.MF = u.Param()
p.scans.MF.data= u.Param()
-p.scans.MF.data.source = 'sample.ptyd'#'file'
-p.scans.MF.data.dfile = None#'sample.ptyd'
-
-p.engine = u.Param()
-
-## Common defaults for all engines
-p.engine.common = u.Param()
-# Total number of iterations
-p.engine.common.numiter = 100
-# Number of iterations to be executed in one go
-p.engine.common.numiter_contiguous = 1
-# Fraction of valid probe area (circular) in probe frame
-p.engine.common.probe_support = None
-# Number of iterations before probe update starts
-p.engine.common.probe_update_start = 2
-# Clip object amplitude into this intrervall
-p.engine.common.clip_object = None # [0,1]
-
-## DM default parameters
-p.engine.DM = u.Param()
-p.engine.DM.name = "DM"
-# HIO parameter
-p.engine.DM.alpha = 1
-# Probe fraction kept from iteration to iteration
-p.engine.DM.probe_inertia = 0.01
-# Object fraction kept from iteration to iteration
-p.engine.DM.object_inertia = 0.1
-# If False: update object before probe
-p.engine.DM.update_object_first = True
-# Gaussian smoothing (FWHM, pixel units) of object
-p.engine.DM.obj_smooth_std = 10
-# Loop the overlap constraint until probe changes lesser than this fraction
-p.engine.DM.overlap_converge_factor = 0.5
-# Maximum iterations to be spent inoverlap constraint
-p.engine.DM.overlap_max_iterations = 100
-# If rms of model vs diffraction data is smaller than this fraction,
-# Fourier constraint is considered fullfilled
-p.engine.DM.fourier_relax_factor = 0.05
+p.scans.MF.data.name = 'PtydScan'
+p.scans.MF.data.source = '/tmp/ptypy/sample.ptyd'#'file'
+p.scans.MF.data.dfile = 'out.ptyd'
p.engines = u.Param()
p.engines.engine00 = u.Param()
From 8794aef33f42a252edc39fd099314828f3b60412 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Fri, 20 Oct 2017 10:52:02 +0200
Subject: [PATCH 149/363] Adapted ALS5321Scan - tested
---
ptypy/experiment/ALS_5321.py | 116 ++++++++++++++++++++++++-----------
ptypy/experiment/__init__.py | 2 +-
2 files changed, 80 insertions(+), 38 deletions(-)
diff --git a/ptypy/experiment/ALS_5321.py b/ptypy/experiment/ALS_5321.py
index af65c51c6..587318e9a 100644
--- a/ptypy/experiment/ALS_5321.py
+++ b/ptypy/experiment/ALS_5321.py
@@ -7,75 +7,117 @@
import ptypy
from ptypy.core.data import PtyScan
import ptypy.utils as u
+from ptypy.utils.descriptor import defaults_tree
import h5py
import numpy as np
import time
logger = u.verbose.logger
-# Default recipe tree.
-RECIPE = u.Param()
-RECIPE.dataPath = None
-RECIPE.datafile = None
-RECIPE.maskfile = None
-RECIPE.energy = 0.820
-RECIPE.CXI_PATHS = u.Param()
-RECIPE.CXI_PATHS.CXI_DATA_PATH = 'entry_1/data_1/data'
-RECIPE.CXI_PATHS.CXI_MASK_PATH = 'mask'
-RECIPE.CXI_PATHS.CXI_POS_PATH = 'entry_1/data_1/translation'
-RECIPE.CXI_PATHS.CXI_DISTANCE = 'entry_1/instrument_1/detector_1/distance'
-RECIPE.CXI_PATHS.CXI_PSIZES = [
- 'entry_1/instrument_1/detector_1/x_pixel_size',
- 'entry_1/instrument_1/detector_1/y_pixel_size'
-]
-
-
+@defaults_tree.parse_doc('scandata.ALS5321Scan')
class ALS5321Scan(PtyScan):
"""
Basic class to load 5.3.2.1 data offline.
+
+ Defaults:
+
+ [name]
+ default = ALS5321Scan
+ type = str
+ help =
+
+ [dataPath]
+ default = None
+ type = str
+ help = Path to folder containing the data
+
+ [dataile]
+ default = None
+ type = str
+ help = CXI data file
+
+ [maskfile]
+ default = None
+ type = str
+ help = Optional mask file
+ doc = Should contain an array called 'mask' at the root level.
+
+ [energy]
+ default = 0.820
+ type = float
+ help = Beam energy
+
+ [CXI_PATHS]
+ default = None
+ type = Param
+ help = Container for CXI path options
+
+ [CXI_PATHS.CXI_DATA_PATH]
+ default = 'entry_1/data_1/data'
+ type = str
+ help = Data path within the CXI file
+
+ [CXI_PATHS.CXI_MASK_PATH]
+ default = 'mask'
+ type = str
+ help = Mask path within the CXI file
+
+ [CXI_PATHS.CXI_POS_PATH]
+ default = 'entry_1/data_1/translation'
+ type = str
+ help = Positions path within the CXI file
+
+ [CXI_PATHS.CXI_DISTANCE]
+ default = 'entry_1/instrument_1/detector_1/distance'
+ type = str
+ help = Distance path within the CXI file
+
+ [CXI_PATHS.CXI_PSIZES]
+ default = ['entry_1/instrument_1/detector_1/x_pixel_size', 'entry_1/instrument_1/detector_1/y_pixel_size']
+ type = list
+ help = Pixel size path within the CXI file
+
"""
def __init__(self, pars=None, **kwargs):
- # Get the default parameter tree and add a recipe branch.
- p = PtyScan.DEFAULT.copy(depth=10)
- p.recipe = RECIPE.copy()
- p.update(pars, in_place_depth=10)
+ # Get the default parameter tree.
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
+
+ # Call the base class constructor with the new updated parameters.
+ # This constructor only reads params, it doesn't modify them.
+ super(ALS5321Scan, self).__init__(p, **kwargs)
# Extract geometrical information
- filename = p.recipe.dataPath + p.recipe.datafile
+ filename = p.dataPath + p.datafile
with h5py.File(filename, 'r') as hf:
- p.energy = RECIPE.energy
- p.distance = hf.get(RECIPE.CXI_PATHS.CXI_DISTANCE).value
- p.psize = [hf.get(RECIPE.CXI_PATHS.CXI_PSIZES[0]).value,
- hf.get(RECIPE.CXI_PATHS.CXI_PSIZES[1]).value]
-
- # Call the base class constructor with the new updated parameters.
- # This constructor only reads params, it doesn't modify them. We've
- # already put the kwargs in p, so we don't need to pass them here.
- super(ALS5321Scan, self).__init__(p)
+ self.info.energy = p.energy
+ self.info.distance = hf.get(p.CXI_PATHS.CXI_DISTANCE).value
+ self.info.psize = [hf.get(p.CXI_PATHS.CXI_PSIZES[0]).value,
+ hf.get(p.CXI_PATHS.CXI_PSIZES[1]).value]
def load_positions(self):
- filename = self.info.recipe.dataPath + self.info.recipe.datafile
+ filename = self.info.dataPath + self.info.datafile
# first get the total number of positions
with h5py.File(filename, 'r') as hf:
- data = hf.get(self.info.recipe.CXI_PATHS.CXI_POS_PATH)
+ data = hf.get(self.info.CXI_PATHS.CXI_POS_PATH)
positions = np.asarray(data)[:,:2]
positions = np.fliplr(positions)
return positions
def load(self, indices):
raw, weights, positions = {}, {}, {}
- filename = self.info.recipe.dataPath + self.info.recipe.datafile
+ filename = self.info.dataPath + self.info.datafile
with h5py.File(filename) as hf:
- data = hf.get(self.info.recipe.CXI_PATHS.CXI_DATA_PATH)
+ data = hf.get(self.info.CXI_PATHS.CXI_DATA_PATH)
for i in indices:
raw[i] = np.asarray(data[i])
return raw, positions, weights
def load_weight(self):
- filename = self.info.recipe.dataPath + self.info.recipe.maskfile
+ filename = self.info.dataPath + self.info.maskfile
with h5py.File(filename) as hf:
- mask = hf.get(self.info.recipe.CXI_PATHS.CXI_MASK_PATH)
+ mask = hf.get(self.info.CXI_PATHS.CXI_MASK_PATH)
mask = np.asarray(mask)
return mask
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 3f813706e..7bf5f6d32 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -30,7 +30,7 @@
# from optiklabor import FliSpecScanMultexp
# from UCL import UCLLaserScan
from nanomax import NanomaxStepscanMay2017, NanomaxStepscanNov2016, NanomaxFlyscanJune2017
-# from ALS_5321 import ALS5321Scan
+from ALS_5321 import ALS5321Scan
if __name__ == "__main__":
From 75f58b856402f7aeb04c4f31d919a56924b784d9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Fri, 20 Oct 2017 11:28:15 +0200
Subject: [PATCH 150/363] Adapted UCLLaserScan - not tested
---
ptypy/experiment/UCL.py | 369 ++++++++++++++++++++++-------------
ptypy/experiment/__init__.py | 2 +-
2 files changed, 233 insertions(+), 138 deletions(-)
diff --git a/ptypy/experiment/UCL.py b/ptypy/experiment/UCL.py
index c87e4a2a0..be1c90288 100644
--- a/ptypy/experiment/UCL.py
+++ b/ptypy/experiment/UCL.py
@@ -16,90 +16,185 @@
from ..core.paths import Paths
#from ..core import DEFAULT_io as IO_par
from ..core import Ptycho
+from ..utils.descriptor import defaults_tree
IO_par = Ptycho.DEFAULT['io']
logger = u.verbose.logger
-# Recipe defaults
-RECIPE = u.Param()
-# Experiment identifier
-RECIPE.experimentID = None
-# Scan number
-RECIPE.scan_number = None
-RECIPE.dark_number = None
-RECIPE.flat_number = None
-RECIPE.energy = None
-RECIPE.lam = None
-# Distance from object to screen
-RECIPE.z = None
-# Name of the detector as specified in the nexus file
-RECIPE.detector_name = None
-# Motor names to determine the sample translation
-RECIPE.motors = ['t1_sx', 't1_sy']
-# Motor conversion factor to meters
-RECIPE.motors_multiplier = 1e-3
-RECIPE.base_path = './'
-RECIPE.data_file_path = '%(base_path)s' + 'raw/%(scan_number)06d'
-RECIPE.dark_file_path = '%(base_path)s' + 'raw/%(dark_number)06d'
-RECIPE.flat_file_path = '%(base_path)s' + 'raw/%(flat_number)06d'
-RECIPE.mask_file = None # '%(base_path)s' + 'processing/mask.h5'
-# Use flat as Empty Probe (EP) for probe sharing;
-# needs to be set to True in the recipe of the scan that will act as EP
-RECIPE.use_EP = False
-# Apply hot pixel correction
-RECIPE.remove_hot_pixels = u.Param(
- # Initiate by setting to True;
- # DEFAULT parameters will be used if not specified otherwise
- apply=False,
- # Size of the window on which the median filter will be applied
- # around every data point
- size=3,
- # Tolerance multiplied with the standard deviation of the data array
- # subtracted by the blurred array (difference array)
- # yields the threshold for cutoff.
- tolerance=10,
- # If True, edges of the array are ignored, which speeds up the code
- ignore_edges=False,
-)
-
-# Apply Richardson Lucy deconvolution
-RECIPE.rl_deconvolution = u.Param(
- # Initiate by setting to True;
- # DEFAULT parameters will be used if not specified otherwise
- apply=False,
- # Number of iterations
- numiter=5,
- # Provide MTF from file; no loading procedure present for now,
- # loading through recon script required
- dfile=None,
- # Create fake psf as a sum of gaussians if no MTF provided
- gaussians=u.Param(
- # DEFAULT list of gaussians for Richardson Lucy deconvolution
- g1=u.Param(
- # Standard deviation in x direction
- std_x=1.0,
- # Standard deviation in y direction
- std_y=1.0,
- # Offset / shift in x direction
- off_x=0.,
- # Offset / shift in y direction
- off_y=0.,
- )
- ),
-)
-
-# Generic defaults
-UCLDEFAULT = PtyScan.DEFAULT.copy()
-UCLDEFAULT.recipe = RECIPE
-UCLDEFAULT.auto_center = False
-UCLDEFAULT.orientation = (False, False, False)
-
+@defaults_tree.parse_doc('scandata.UCLLaserScan')
class UCLLaserScan(PtyScan):
"""
Laser imaging setup (UCL) data preparation class.
+
+ Defaults:
+
+ [name]
+ default = UCLLaserScan
+ type = str
+ help =
+
+ [auto_center]
+ default = False
+ type = bool
+ help =
+
+ [orientation]
+ default = (False, False, False)
+ type = tuple
+ help =
+
+ [scan_number]
+ default = None
+ type = int
+ help = Scan number
+
+ [dark_number]
+ default = None
+ type = int
+ help =
+
+ [flat_number]
+ default = None
+ type = int
+ help =
+
+ [energy]
+ default = None
+ type = float
+ help =
+
+ [lam]
+ default = None
+ type = float
+ help =
+
+ [z]
+ default = None
+ type = float
+ help = Distance from object to screen
+
+ [detector_name]
+ default = None
+ type = str
+ help = Name of the detector as specified in the nexus file
+
+ [motors]
+ default = ['t1_sx', 't1_sy']
+ type = list
+ help = Motor names to determine the sample translation
+
+ [motors_multiplier]
+ default = 1e-3
+ type = float
+ help = Motor conversion factor to meters
+
+ [base_path]
+ default = './'
+ type = str
+ help =
+
+ [data_file_path]
+ default = '%(base_path)s' + 'raw/%(scan_number)06d'
+ type = str
+ help =
+
+ [dark_file_path]
+ default = '%(base_path)s' + 'raw/%(dark_number)06d'
+ type = str
+ help =
+
+ [flat_file_path]
+ default = '%(base_path)s' + 'raw/%(flat_number)06d'
+ type = str
+ help =
+
+ [mask_file]
+ default = None
+ type = str
+ help =
+
+ [use_EP]
+ default = False
+ type = bool
+ help = Use flat as Empty Probe (EP) for probe sharing needs to be set to True in the recipe of the scan that will act as EP
+
+ [remove_hot_pixels]
+ default =
+ type = Param
+ help = Apply hot pixel correction
+
+ [remove_hot_pixels.apply]
+ default = False
+ type = bool
+ help = Initiate by setting to True
+
+ [remove_hot_pixels.size]
+ default = 3
+ type = int
+ help = Size of the window on which the median filter will be applied around every data point
+
+ [remove_hot_pixels.tolerance]
+ default = 10
+ type = int
+ help = Tolerance multiplied with the standard deviation of the data array subtracted by the blurred array (difference array) yields the threshold for cutoff.
+
+ [remove_hot_pixels.ignore_edges]
+ default = False
+ type = bool
+ help = If True, edges of the array are ignored, which speeds up the code
+
+ [rl_deconvolution]
+ default =
+ type = Param
+ help = Apply Richardson Lucy deconvolution
+
+ [rl_deconvolution.apply]
+ default = False
+ type = bool
+ help = Initiate by setting to True
+
+ [rl_deconvolution.numiter]
+ default = 5
+ type = int
+ help = Number of iterations
+
+ [rl_deconvolution.dfile]
+ default = None
+ type = str
+ help = Provide MTF from file; no loading procedure present for now, loading through recon script required
+
+ [rl_deconvolution.gaussians]
+ default =
+ type = Param
+ help = Create fake psf as a sum of gaussians if no MTF provided
+
+ [rl_deconvolution.gaussians.g1]
+ default =
+ type = Param
+ help = list of gaussians for Richardson Lucy deconvolution
+
+ [rl_deconvolution.gaussians.g1.std_x]
+ default = 1.0
+ type = float
+ help = Standard deviation in x direction
+
+ [rl_deconvolution.gaussians.g1.std_y]
+ default = 1.0
+ type = float
+ help = Standard deviation in y direction
+
+ [rl_deconvolution.gaussians.g1.off_x]
+ default = 0.0
+ type = float
+ help = Offset / shift in x direction
+
+ [rl_deconvolution.gaussians.g1.off_y]
+ default = 0.0
+ type = float
+ help = Offset / shift in y direction
+
"""
- DEFAULT = UCLDEFAULT
def __init__(self, pars=None, **kwargs):
"""
@@ -110,14 +205,14 @@ def __init__(self, pars=None, **kwargs):
:param kwargs: key-value pair
- additional parameters.
"""
- recipe_default = RECIPE.copy()
- recipe_default.update(pars.recipe, in_place_depth=1)
- pars.recipe.update(recipe_default)
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
+ pars = p
super(UCLLaserScan, self).__init__(pars, **kwargs)
# Try to extract base_path to access data files
- if self.info.recipe.base_path is None:
+ if self.info.base_path is None:
d = os.getcwd()
base_path = None
while True:
@@ -130,32 +225,32 @@ def __init__(self, pars=None, **kwargs):
if base_path is None:
raise RuntimeError('Could not guess base_path.')
else:
- self.info.recipe.base_path = base_path
+ self.info.base_path = base_path
# Construct path names
- self.data_path = self.info.recipe.data_file_path % self.info.recipe
+ self.data_path = self.info.data_file_path % self.info
log(3, 'Will read data from directory %s' % self.data_path)
- if self.info.recipe.dark_number is None:
+ if self.info.dark_number is None:
self.dark_file = None
log(3, 'No data for dark')
else:
- self.dark_path = self.info.recipe.dark_file_path % self.info.recipe
+ self.dark_path = self.info.dark_file_path % self.info
log(3, 'Will read dark from directory %s' % self.dark_path)
- if self.info.recipe.flat_number is None:
+ if self.info.flat_number is None:
self.flat_file = None
log(3, 'No data for flat')
else:
- self.flat_path = self.info.recipe.flat_file_path % self.info.recipe
+ self.flat_path = self.info.flat_file_path % self.info
log(3, 'Will read flat from file %s' % self.flat_path)
# Load data information
self.instrument = io.h5read(self.data_path + '/%06d_%04d.nxs'
- % (self.info.recipe.scan_number, 1),
+ % (self.info.scan_number, 1),
'entry.instrument')['instrument']
# Extract detector name if not set or wrong
- if (self.info.recipe.detector_name is None
- or self.info.recipe.detector_name
+ if (self.info.detector_name is None
+ or self.info.detector_name
not in self.instrument.keys()):
detector_name = None
for k in self.instrument.keys():
@@ -167,14 +262,14 @@ def __init__(self, pars=None, **kwargs):
raise RuntimeError(
'Not possible to extract detector name. '
'Please specify in recipe instead.')
- elif (self.info.recipe.detector_name is not None
- and detector_name is not self.info.recipe.detector_name):
+ elif (self.info.detector_name is not None
+ and detector_name is not self.info.detector_name):
u.log(2, 'Detector name changed from %s to %s.'
- % (self.info.recipe.detector_name, detector_name))
+ % (self.info.detector_name, detector_name))
else:
- detector_name = self.info.recipe.detector_name
+ detector_name = self.info.detector_name
- self.info.recipe.detector_name = detector_name
+ self.info.detector_name = detector_name
# Set up dimensions for cropping
try:
@@ -191,7 +286,7 @@ def __init__(self, pars=None, **kwargs):
# If center unset, extract offset from raw data
elif center == 'unset':
raw_shape = self.instrument[
- self.info.recipe.detector_name]['data'].shape
+ self.info.detector_name]['data'].shape
offset_x = raw_shape[-1] // 2
offset_y = raw_shape[-2] // 2
else:
@@ -202,13 +297,13 @@ def __init__(self, pars=None, **kwargs):
xdim = (offset_x - pars.shape // 2, offset_x + pars.shape // 2)
ydim = (offset_y - pars.shape // 2, offset_y + pars.shape // 2)
- self.info.recipe.array_dim = [xdim, ydim]
+ self.info.array_dim = [xdim, ydim]
# Create the ptyd file name if not specified
if self.info.dfile is None:
home = Paths(IO_par).home
self.info.dfile = ('%s/prepdata/data_%d.ptyd'
- % (home, self.info.recipe.scan_number))
+ % (home, self.info.scan_number))
log(3, 'Save file is %s' % self.info.dfile)
log(4, u.verbose.report(self.info))
@@ -224,9 +319,9 @@ def load_weight(self):
"""
# FIXME: do something better here. (detector-dependent)
# Load mask as weight
- if self.info.recipe.mask_file is not None:
+ if self.info.mask_file is not None:
return io.h5read(
- self.info.recipe.mask_file, 'mask')['mask'].astype(float)
+ self.info.mask_file, 'mask')['mask'].astype(float)
def load_positions(self):
"""
@@ -237,8 +332,8 @@ def load_positions(self):
"""
# Load positions from file if possible.
motor_positions = io.h5read(
- self.info.recipe.base_path + '/raw/%06d/%06d_metadata.h5'
- % (self.info.recipe.scan_number, self.info.recipe.scan_number),
+ self.info.base_path + '/raw/%06d/%06d_metadata.h5'
+ % (self.info.scan_number, self.info.scan_number),
'positions')['positions']
# If no positions are found at all, raise error.
@@ -246,7 +341,7 @@ def load_positions(self):
raise RuntimeError('Could not find motors.')
# Apply motor conversion factor and create transposed array.
- mmult = u.expect2(self.info.recipe.motors_multiplier)
+ mmult = u.expect2(self.info.motors_multiplier)
positions = motor_positions * mmult[0]
return positions
@@ -261,14 +356,14 @@ def load_common(self):
common = u.Param()
# Load dark.
- if self.info.recipe.dark_number is not None:
+ if self.info.dark_number is not None:
dark = [io.h5read(self.dark_path + '/%06d_%04d.nxs'
- % (self.info.recipe.dark_number, j),
+ % (self.info.dark_number, j),
'entry.instrument.detector.data')['data'][0][
- self.info.recipe.array_dim[1][0]:
- self.info.recipe.array_dim[1][1],
- self.info.recipe.array_dim[0][0]:
- self.info.recipe.array_dim[0][1]].astype(np.float32)
+ self.info.array_dim[1][0]:
+ self.info.array_dim[1][1],
+ self.info.array_dim[0][0]:
+ self.info.array_dim[0][1]].astype(np.float32)
for j in np.arange(1, len(os.listdir(self.dark_path)))]
dark = np.array(dark).mean(0)
@@ -276,14 +371,14 @@ def load_common(self):
log(3, 'Dark loaded successfully.')
# Load flat.
- if self.info.recipe.flat_number is not None:
+ if self.info.flat_number is not None:
flat = [io.h5read(self.flat_path + '/%06d_%04d.nxs'
- % (self.info.recipe.flat_number, j),
+ % (self.info.flat_number, j),
'entry.instrument.detector.data')['data'][0][
- self.info.recipe.array_dim[1][0]:
- self.info.recipe.array_dim[1][1],
- self.info.recipe.array_dim[0][0]:
- self.info.recipe.array_dim[0][1]].astype(np.float32)
+ self.info.array_dim[1][0]:
+ self.info.array_dim[1][1],
+ self.info.array_dim[0][0]:
+ self.info.array_dim[0][1]].astype(np.float32)
for j in np.arange(1, len(os.listdir(self.flat_path)))]
flat = np.array(flat).mean(0)
@@ -309,12 +404,12 @@ def load(self, indices):
for j in np.arange(1, len(indices) + 1):
data = io.h5read(self.data_path + '/%06d_%04d.nxs'
- % (self.info.recipe.scan_number, j),
+ % (self.info.scan_number, j),
'entry.instrument.detector.data')['data'][0][
- self.info.recipe.array_dim[1][0]:
- self.info.recipe.array_dim[1][1],
- self.info.recipe.array_dim[0][0]:
- self.info.recipe.array_dim[0][1]].astype(np.float32)
+ self.info.array_dim[1][0]:
+ self.info.array_dim[1][1],
+ self.info.array_dim[0][0]:
+ self.info.array_dim[0][1]].astype(np.float32)
raw[j - 1] = data
log(3, 'Data loaded successfully.')
@@ -345,43 +440,43 @@ def correct(self, raw, weights, common):
- dict: contains modified weights.
"""
# Apply hot pixel removal
- if self.info.recipe.remove_hot_pixels.apply:
+ if self.info.remove_hot_pixels.apply:
u.log(3, 'Applying hot pixel removal...')
for j in raw:
raw[j] = u.remove_hot_pixels(
raw[j],
- self.info.recipe.remove_hot_pixels.size,
- self.info.recipe.remove_hot_pixels.tolerance,
- self.info.recipe.remove_hot_pixels.ignore_edges)[0]
+ self.info.remove_hot_pixels.size,
+ self.info.remove_hot_pixels.tolerance,
+ self.info.remove_hot_pixels.ignore_edges)[0]
- if self.info.recipe.flat_number is not None:
+ if self.info.flat_number is not None:
common.dark = u.remove_hot_pixels(
common.dark,
- self.info.recipe.remove_hot_pixels.size,
- self.info.recipe.remove_hot_pixels.tolerance,
- self.info.recipe.remove_hot_pixels.ignore_edges)[0]
+ self.info.remove_hot_pixels.size,
+ self.info.remove_hot_pixels.tolerance,
+ self.info.remove_hot_pixels.ignore_edges)[0]
- if self.info.recipe.flat_number is not None:
+ if self.info.flat_number is not None:
common.flat = u.remove_hot_pixels(
common.flat,
- self.info.recipe.remove_hot_pixels.size,
- self.info.recipe.remove_hot_pixels.tolerance,
- self.info.recipe.remove_hot_pixels.ignore_edges)[0]
+ self.info.remove_hot_pixels.size,
+ self.info.remove_hot_pixels.tolerance,
+ self.info.remove_hot_pixels.ignore_edges)[0]
u.log(3, 'Hot pixel removal completed.')
# Apply deconvolution
- if self.info.recipe.rl_deconvolution.apply:
+ if self.info.rl_deconvolution.apply:
u.log(3, 'Applying deconvolution...')
# Use mtf from a file if provided in recon script
- if self.info.recipe.rl_deconvolution.dfile is not None:
+ if self.info.rl_deconvolution.dfile is not None:
mtf = self.info.rl_deconvolution.dfile
# Create fake psf as a sum of gaussians from parameters
else:
gau_sum = 0
for k in (
- self.info.recipe.rl_deconvolution.gaussians.iteritems()):
+ self.info.rl_deconvolution.gaussians.iteritems()):
gau_sum += u.gaussian2D(raw[0].shape[0],
k[1].std_x,
k[1].std_y,
@@ -395,18 +490,18 @@ def correct(self, raw, weights, common):
raw[j] = u.rl_deconvolution(
raw[j],
mtf,
- self.info.recipe.rl_deconvolution.numiter)
+ self.info.rl_deconvolution.numiter)
u.log(3, 'Deconvolution completed.')
# Apply flat and dark, only dark, or no correction
- if (self.info.recipe.flat_number is not None
- and self.info.recipe.dark_number is not None):
+ if (self.info.flat_number is not None
+ and self.info.dark_number is not None):
for j in raw:
raw[j] = (raw[j] - common.dark) / (common.flat - common.dark)
raw[j][raw[j] < 0] = 0
data = raw
- elif self.info.recipe.dark_number is not None:
+ elif self.info.dark_number is not None:
for j in raw:
raw[j] = raw[j] - common.dark
raw[j][raw[j] < 0] = 0
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 7bf5f6d32..c8deb55a7 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -28,7 +28,7 @@
# from AMO_LCLS import AMOScan
# from DiProI_FERMI import DiProIFERMIScan
# from optiklabor import FliSpecScanMultexp
-# from UCL import UCLLaserScan
+from UCL import UCLLaserScan
from nanomax import NanomaxStepscanMay2017, NanomaxStepscanNov2016, NanomaxFlyscanJune2017
from ALS_5321 import ALS5321Scan
From fc0e0fa9ab882be824fe65c0bbd24fcb3a36d6de Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Fri, 20 Oct 2017 13:03:16 +0200
Subject: [PATCH 151/363] Adapted FliSpecScanMultexp - not tested
---
ptypy/experiment/__init__.py | 2 +-
ptypy/experiment/optiklabor.py | 118 ++++++++++++++++++++++++++-------
2 files changed, 94 insertions(+), 26 deletions(-)
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index c8deb55a7..9bbf56b38 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -27,7 +27,7 @@
# from ID16Anfp import ID16AScan
# from AMO_LCLS import AMOScan
# from DiProI_FERMI import DiProIFERMIScan
-# from optiklabor import FliSpecScanMultexp
+from optiklabor import FliSpecScanMultexp
from UCL import UCLLaserScan
from nanomax import NanomaxStepscanMay2017, NanomaxStepscanNov2016, NanomaxFlyscanJune2017
from ALS_5321 import ALS5321Scan
diff --git a/ptypy/experiment/optiklabor.py b/ptypy/experiment/optiklabor.py
index 14f56156a..8e82c74f9 100644
--- a/ptypy/experiment/optiklabor.py
+++ b/ptypy/experiment/optiklabor.py
@@ -14,43 +14,111 @@
from .. import utils as u
#from pyE17 import io as io17
from ..core.data import PtyScan
+from ..utils.descriptor import defaults_tree
logger = u.verbose.logger
-DEFAULT = u.Param()
-DEFAULT.base_path = '/data/CDI/opticslab_sxdm_2013/'
-DEFAULT.scan_number = 74 #35 # scan number
-DEFAULT.dark_number = 72
-#DEFAULT.scan_label = 'S%05d' % p.scan_number
-DEFAULT.exp_string='exp_time'
-DEFAULT.hdr_thresholds = [500,50000]
-DEFAULT.lam = 650e-9
-
-DEFAULT.energy = 1.2398e-9 /DEFAULT.lam
-DEFAULT.z = 0.158 # Distance from object to screen
-DEFAULT.psize_det = 24e-6 # Camera pixel size
-DEFAULT.center = 'auto'
-DEFAULT.orientation = (True,True,False)
-# IO
-DEFAULT.base_path = '/data/CDI/opticslab_sxdm_2013/'
-#DEFAULT.base_path = './'
-DEFAULT.scan_dir = 'ccdfli/S00000-00999/'
-#DEFAULT.scan_path = DEFAULT.base_path + 'raw/'
-#DEFAULT.log_file_pattern = '%(base_path)s' + '/spec/dat-files/spec_started_2014_07_28_2158.dat' # log file
-DEFAULT.log_file_pattern = '%(base_path)s' + 'spec/dat-files/spec_started_2013_11_21_1659.dat' # log file
-DEFAULT.data_dir_pattern = '%(base_path)s'+'%(scan_dir)s'+ 'S%(scan_number)05d/'
-DEFAULT.dark_dir_pattern = '%(base_path)s'+'%(scan_dir)s'+ 'S%(dark_number)05d/'
-
pp = u.Param()
pp.filename = './foo.ptyd'
pp.roi =None
pp.num_frames = 50
pp.save = 'extlink'
+
+@defaults_tree.parse_doc('scandata.FliSpecScanMultexp')
class FliSpecScanMultexp(PtyScan):
+ """
+ Defaults:
+
+ [name]
+ default = FliSpecScanMultexp
+ type = str
+ help =
+
+ [base_path]
+ default = '/data/CDI/opticslab_sxdm_2013/'
+ type = str
+ help =
+
+ [scan_number]
+ default = 74
+ type = int
+ help =
+
+ [dark_number]
+ default = 72
+ type = int
+ help =
+
+ [exp_string]
+ default = 'exp_time'
+ type = str
+ help =
+
+ [hdr_thresholds]
+ default = [500,50000]
+ type = list
+ help =
+
+ [lam]
+ default = 650e-9
+ type = float
+ help =
+
+ [energy]
+ default = None
+ type = float
+ help =
+
+ [z]
+ default = 0.158
+ type = float
+ help =
+
+ [psize_det]
+ default = 24e-6
+ type = float
+ help =
+
+ [center]
+ default = 'auto'
+ type = str
+ help =
+
+ [orientation]
+ default = (True,True,False)
+ type = tuple
+ help =
+
+ [base_path]
+ default = '/data/CDI/opticslab_sxdm_2013/'
+ type = str
+ help =
+
+ [scan_dir]
+ default = 'ccdfli/S00000-00999/'
+ type = str
+ help =
+
+ [log_file_pattern]
+ default = '%(base_path)sspec/dat-files/spec_started_2013_11_21_1659.dat'
+ type = str
+ help =
+
+ [data_dir_pattern]
+ default = '%(base_path)s%(scan_dir)sS%(scan_number)05d/'
+ type = str
+ help =
+
+ [dark_dir_pattern]
+ default = '%(base_path)s%(scan_dir)sS%(dark_number)05d/'
+ type =
+ help =
+
+ """
def __init__(self,pars=None,**kwargs):
- p= DEFAULT.copy()
+ p = self.DEFAULT.copy()
if pars is not None:
p.update(pars)
#self.p = pars
From f88239af42fe9a58a5c94612c83dfbe38a001276 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 23 Oct 2017 08:20:42 +0200
Subject: [PATCH 152/363] Adapted DiProIFERMIScan - not tested
---
ptypy/experiment/DiProI_FERMI.py | 171 ++++++++++++++++++++++---------
ptypy/experiment/__init__.py | 2 +-
2 files changed, 121 insertions(+), 52 deletions(-)
diff --git a/ptypy/experiment/DiProI_FERMI.py b/ptypy/experiment/DiProI_FERMI.py
index e791eb333..2de54fd9d 100644
--- a/ptypy/experiment/DiProI_FERMI.py
+++ b/ptypy/experiment/DiProI_FERMI.py
@@ -13,6 +13,7 @@
import numpy as np
import os
from .. import utils as u
+from ..utils.descriptor import defaults_tree
from .. import io
from ..core.data import PtyScan
from ..core.paths import Paths
@@ -29,67 +30,135 @@
FLAT_PATHS = u.Param()
FLAT_PATHS.key = "flat"
-# DiProI recipe default parameters
-RECIPE = u.Param()
-RECIPE.base_path = None
-RECIPE.scan_name = None # this has to be a string (e.g. 'Cycle001')
-RECIPE.run_ID = None # this has to be a string (e.g. 'Scan018')
-RECIPE.dark_name = None # this has to be a string (e.g. 'Dark')
-RECIPE.dark_value = 200. # Used if dark_number is None
-RECIPE.detector_flat_file = None
-RECIPE.h5_file_pattern = '%(base_path)s/imported/%(run_ID)s/%(scan_name)s/rawdata/'
-RECIPE.dark_h5_file_pattern = '%(base_path)s/imported/%(run_ID)s/%(dark_name)s/rawdata/'
-RECIPE.date = None
-RECIPE.motors = ['sample_x', 'sample_y'] # check orientation
-RECIPE.energy = None
-RECIPE.lam = None
-RECIPE.z = None
-RECIPE.motors_multiplier = 1e-3 # DiProI-specific
-RECIPE.mask_file = None # Mask file name
-RECIPE.use_refined_positions = False
-RECIPE.refined_positions_pattern = '%(base_path)s/imported/%(run_ID)s/%(scan_name)s/'
-RECIPE.flat_division = False # Switch for flat division
-RECIPE.dark_subtraction = False # Switch for dark subtraction
-
-# Default generic parameter set from
-DiProI_FERMIDEFAULT = PtyScan.DEFAULT.copy()
-DiProI_FERMIDEFAULT.recipe = RECIPE
-DiProI_FERMIDEFAULT.auto_center = False
-
+@defaults_tree.parse_doc('scandata.DiProIFERMIScan')
class DiProIFERMIScan(PtyScan):
- DEFAULT = DiProI_FERMIDEFAULT
+ """
+ DiProI (FERMI) data preparation class.
+
+ Defaults:
+
+ [name]
+ default = DiProIFERMIScan
+ type = str
+ help =
+
+ [base_path]
+ default = None
+ type = str
+ help =
+
+ [scan_name]
+ default = None
+ type = str
+ help = has to be a string (e.g. 'Cycle001')
+
+ [run_ID]
+ default = None
+ type = str
+ help = has to be a string (e.g. 'Scan018')
+
+ [dark_name]
+ default = None
+ type = str
+ help = has to be a string (e.g. 'Dark')
+
+ [dark_value]
+ default = 200.0
+ type = float
+ help = Used if dark_number is None (?!)
+
+ [detector_flat_file]
+ default = None
+ type = str
+ help =
+
+ [h5_file_pattern]
+ default = '%(base_path)s/imported/%(run_ID)s/%(scan_name)s/rawdata/'
+ type = str
+ help =
+
+ [dark_h5_file_pattern]
+ default = '%(base_path)s/imported/%(run_ID)s/%(dark_name)s/rawdata/'
+ type = str
+ help =
+
+ [date]
+ default = None
+ type = str
+ help =
+
+ [motors]
+ default = ['sample_x', 'sample_y']
+ type = list
+ help = check orientation
+
+ [motors_multiplier]
+ default = 1e-3
+ type = float
+ help = DiProI-specific
+
+ [mask_file]
+ default = None
+ type = str
+ help = Mask file name
+
+ [use_refined_positions]
+ default = False
+ type = bool
+ help =
+
+ [refined_positions_pattern]
+ default = '%(base_path)s/imported/%(run_ID)s/%(scan_name)s/'
+ type = str
+ help =
+
+ [flat_division]
+ default = False
+ type = bool
+ help = Switch for flat division
+
+ [dark_subtraction]
+ default = False
+ type = bool
+ help = Switch for dark subtraction
+
+ [auto_center]
+ default = False
+ type = bool
+ help = Overrides PtyScan default
+
+ """
def __init__(self, pars=None, **kwargs):
"""
DiProI (FERMI) data preparation class.
"""
- # Initialize parent class. All updated parameters are now in self.info
- recipe_default = RECIPE.copy()
- recipe_default.update(pars.recipe, in_place_depth=1)
- pars.recipe.update(recipe_default)
- super(DiProIFERMIScan, self).__init__(pars, **kwargs)
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
+
+ # Initialize parent class. All updated parameters are now in self.info
+ super(DiProIFERMIScan, self).__init__(p, **kwargs)
# Check whether base_path exists
- if self.info.recipe.base_path is None:
+ if self.info.base_path is None:
raise RuntimeError('Base path missing.')
# Construct the file names
self.h5_filename_list = sorted([i for i in os.listdir(
- self.info.recipe.h5_file_pattern % self.info.recipe)
+ self.info.h5_file_pattern % self.info)
if not i.startswith('.')])
# Path to data files
- self.data_path = (self.info.recipe.h5_file_pattern %
- self.info.recipe)
+ self.data_path = (self.info.h5_file_pattern % self.info)
u.log(3, 'Will read data from h5 files in {data_path}'.format(
data_path=self.data_path))
# Path to data files
- self.dark_path = (self.info.recipe.dark_h5_file_pattern %
- self.info.recipe)
+ self.dark_path = (self.info.dark_h5_file_pattern %
+ self.info)
u.log(3, 'Will read dark from h5 files in {dark_path}'.format(
dark_path=self.dark_path))
@@ -105,21 +174,21 @@ def load_weight(self):
"""
# FIXME: do something better here. (detector-dependent)
# Load mask as weight
- if self.info.recipe.mask_file is not None:
- return io.h5read(self.info.recipe.mask_file, 'mask')['mask'].astype(
+ if self.info.mask_file is not None:
+ return io.h5read(self.info.mask_file, 'mask')['mask'].astype(
np.float32)
def load_positions(self):
"""
Load the positions and return as an (N, 2) array.
"""
- mmult = u.expect2(self.info.recipe.motors_multiplier)
+ mmult = u.expect2(self.info.motors_multiplier)
# Load positions
- if self.info.recipe.use_refined_positions:
+ if self.info.use_refined_positions:
# From prepared .h5 file
- positions = io.h5read(self.info.recipe.refined_positions_pattern %
- self.info.recipe + '/Fermi_reconstruction.h5',
+ positions = io.h5read(self.info.refined_positions_pattern %
+ self.info + '/Fermi_reconstruction.h5',
'data.probe_positions')['probe_positions']
positions = [(positions[0, i], positions[1, i])
@@ -147,14 +216,14 @@ def load_common(self):
common = u.Param()
key = H5_PATHS.frame_pattern
- if self.info.recipe.dark_name is not None:
+ if self.info.dark_name is not None:
dark = [io.h5read(self.dark_path + i, key)[key].astype(np.float32)
for i in os.listdir(self.dark_path) if i.startswith('Dark')]
else:
- dark = self.info.recipe.dark_value
+ dark = self.info.dark_value
- if self.info.recipe.detector_flat_file is not None:
- flat = io.h5read(self.info.recipe.detector_flat_file,
+ if self.info.detector_flat_file is not None:
+ flat = io.h5read(self.info.detector_flat_file,
FLAT_PATHS.key)[FLAT_PATHS.key]
else:
flat = 1.
@@ -192,12 +261,12 @@ def correct(self, raw, weights, common):
:return:
"""
# Apply flat and dark, only dark, or no correction
- if self.info.recipe.flat_division and self.info.recipe.dark_subtraction:
+ if self.info.flat_division and self.info.dark_subtraction:
for j in raw:
raw[j] = (raw[j] - common.dark) / (common.flat - common.dark)
raw[j][raw[j] < 0] = 0
data = raw
- elif self.info.recipe.dark_subtraction:
+ elif self.info.dark_subtraction:
for j in raw:
raw[j] = raw[j] - common.dark
raw[j][raw[j] < 0] = 0
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 9bbf56b38..2e1c8ff19 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -26,7 +26,7 @@
# from plugin import makeScanPlugin
# from ID16Anfp import ID16AScan
# from AMO_LCLS import AMOScan
-# from DiProI_FERMI import DiProIFERMIScan
+from DiProI_FERMI import DiProIFERMIScan
from optiklabor import FliSpecScanMultexp
from UCL import UCLLaserScan
from nanomax import NanomaxStepscanMay2017, NanomaxStepscanNov2016, NanomaxFlyscanJune2017
From bea0f3bc98b12209ae2f28e4eba5e59bcb9f9414 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 23 Oct 2017 08:39:45 +0200
Subject: [PATCH 153/363] Adapted AMOScan - not tested
---
ptypy/experiment/AMO_LCLS.py | 155 ++++++++++++++++++++++++-----------
ptypy/experiment/__init__.py | 2 +-
2 files changed, 108 insertions(+), 49 deletions(-)
diff --git a/ptypy/experiment/AMO_LCLS.py b/ptypy/experiment/AMO_LCLS.py
index 558ba94e3..a10912d0a 100644
--- a/ptypy/experiment/AMO_LCLS.py
+++ b/ptypy/experiment/AMO_LCLS.py
@@ -19,6 +19,7 @@
from ..core.paths import Paths
#from ..core import DEFAULT_io as IO_par
from ..core import Ptycho
+from ..utils.descriptor import defaults_tree
IO_par = Ptycho.DEFAULT['io']
logger = u.verbose.logger
@@ -27,49 +28,107 @@
H5_PATHS = u.Param()
H5_PATHS.frame_pattern = 'data/photons'
-# Recipe defaults
-RECIPE = u.Param()
-RECIPE.experimentID = None # Experiment identifier
-RECIPE.scan_number = None # scan number
-RECIPE.dark_number = None
-RECIPE.flat_number = None
-RECIPE.energy = None
-RECIPE.lam = None # 1.2398e-9 / RECIPE.energy
-RECIPE.z = None # Distance from object to screen
-RECIPE.detector_name = None # Name of the detector as specified in the nexus file
-RECIPE.motors = ['t1_sx', 't1_sy'] # Motor names to determine the sample translation
-RECIPE.motors_multiplier = 1e-3 #for AMO #1e-6 #for CXI # Motor conversion factor to meters
-RECIPE.base_path = './'
-RECIPE.data_file_pattern = '%(base_path)s' + 'input/r%(scan_number)04d.h5'
-RECIPE.dark_file_pattern = '%(base_path)s' + 'input/r%(dark_number)04d.h5'
-RECIPE.flat_file_pattern = '%(base_path)s' + 'input/r%(flat_number)04d.h5'
-RECIPE.mask_file = None # '%(base_path)s' + 'processing/mask.h5'
-RECIPE.averaging_number = 1 # Number of frames to be averaged
-
-
-# Generic defaults
-AMODEFAULT = core.data.PtyScan.DEFAULT.copy()
-AMODEFAULT.recipe = RECIPE
-AMODEFAULT.auto_center = False
-AMODEFAULT.orientation = (False, False, False)
-
+@defaults_tree.parse_doc('scandata.AMOScan')
class AMOScan(core.data.PtyScan):
- DEFAULT = AMODEFAULT
+ """
+ Defaults:
+
+ [name]
+ default = AMOScan
+ type = str
+ help =
+
+ [experimentID]
+ default = None
+ type = str
+ help = Experiment identifier
+
+ [scan_number]
+ default = None
+ type = int
+ help =
+
+ [dark_number]
+ default = None
+ type = int
+ help =
+
+ [flat_number]
+ default = None
+ type = int
+ help =
+
+ [detector_name]
+ default = None
+ type = str
+ help = Name of the detector as specified in the nexus file
+
+ [motors]
+ default = ['t1_sx', 't1_sy']
+ type = list
+ help = Motor names to determine the sample translation
+
+ [motors_multiplier]
+ default = 1e-3
+ type = float
+ help = Motor conversion factor to meters
+ doc = 1e-3 AMO, 1e-6 for CXI
+
+ [base_path]
+ default = './'
+ type = str
+ help =
+
+ [data_file_pattern]
+ default = '%(base_path)sinput/r%(scan_number)04d.h5'
+ type = str
+ help =
+
+ [dark_file_pattern]
+ default = '%(base_path)sinput/r%(dark_number)04d.h5'
+ type = str
+ help =
+
+ [flat_file_pattern]
+ default = '%(base_path)sinput/r%(flat_number)04d.h5'
+ type = str
+ help =
+
+ [mask_file]
+ default = None
+ type = str
+ help =
+
+ [averaging_number]
+ default = 1
+ type = int
+ help = Number of frames to be averaged
+
+ [auto_center]
+ default = False
+ type = bool
+ help = Overrides PtyScan default
+
+ [orientation]
+ default = (False, False, False)
+ type = tuple
+ help = Overrides PtyScan default
+
+ """
def __init__(self, pars=None, **kwargs):
"""
AMO (Atomic Molecular and Optical Science, LCLS) data preparation class.
"""
# Initialise parent class
- RDEFAULT = RECIPE.copy()
- RDEFAULT.update(pars.recipe)
- pars.recipe.update(RDEFAULT)
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
- super(AMOScan, self).__init__(pars, **kwargs)
+ super(AMOScan, self).__init__(p, **kwargs)
# Try to extract base_path to access data files
- if self.info.recipe.base_path is None:
+ if self.info.base_path is None:
d = os.getcwd()
base_path = None
while True:
@@ -82,32 +141,32 @@ def __init__(self, pars=None, **kwargs):
if base_path is None:
raise RuntimeError('Could not guess base_path.')
else:
- self.info.recipe.base_path = base_path
+ self.info.base_path = base_path
# Default scan label
# if self.info.label is None:
# self.info.label = 'S%5d' % rinfo.scan_number
# Construct file names
- self.data_file = self.info.recipe.data_file_pattern % self.info.recipe
+ self.data_file = self.info.data_file_pattern % self.info
log(3, 'Will read data from file %s' % self.data_file)
- if self.info.recipe.dark_number is None:
+ if self.info.dark_number is None:
self.dark_file = None
log(3, 'No data for dark')
else:
- self.dark_file = self.info.recipe.dark_file_pattern % self.info.recipe
+ self.dark_file = self.info.dark_file_pattern % self.info
log(3, 'Will read dark from file %s' % self.dark_file)
- if self.info.recipe.flat_number is None:
+ if self.info.flat_number is None:
self.flat_file = None
log(3, 'No data for flat')
else:
- self.flat_file = self.info.recipe.flat_file_pattern % self.info.recipe
+ self.flat_file = self.info.flat_file_pattern % self.info
log(3, 'Will read flat from file %s' % self.flat_file)
# Create the ptyd file name if not specified
if self.info.dfile is None:
- home = Paths(io_par).home
- self.info.dfile = '%s/prepdata/data_%05d.ptyd' % (home, self.info.recipe.scan_number)
+ home = Paths(IO_par).home
+ self.info.dfile = '%s/prepdata/data_%05d.ptyd' % (home, self.info.scan_number)
log(3, 'Save file is %s' % self.info.dfile)
log(4, u.verbose.report(self.info))
@@ -120,8 +179,8 @@ def load_common(self):
# FIXME: do something better here. (detector-dependent)
# Load mask
# common.weight2d = None
- if self.info.recipe.mask_file is not None:
- common.weight2d = io.h5read(self.info.recipe.mask_file, 'mask')['mask'].astype(float)
+ if self.info.mask_file is not None:
+ common.weight2d = io.h5read(self.info.mask_file, 'mask')['mask'].astype(float)
return common
@@ -131,12 +190,12 @@ def load_positions(self):
"""
# Load positions from file if possible.
- mmult = u.expect2(self.info.recipe.motors_multiplier)
+ mmult = u.expect2(self.info.motors_multiplier)
x = mmult[0] * io.h5read(self.data_file, 'data.posx')['posx']
y = mmult[1] * io.h5read(self.data_file, 'data.posy')['posy']
pos_list = []
- for i in range(0,len(x),self.info.recipe.averaging_number):
+ for i in range(0,len(x),self.info.averaging_number):
pos_list.append([y[i],x[i]])
positions = np.array(pos_list)
@@ -171,14 +230,14 @@ def load(self, indices):
i = 0
h = 0
- key = H5_PATHS.frame_pattern % self.info.recipe
+ key = H5_PATHS.frame_pattern % self.info
for j in indices:
mean = []
- while h < (i+self.info.recipe.averaging_number):
- mean.append(io.h5read(self.data_file, H5_PATHS.frame_pattern % self.info.recipe, slice=h)[key].astype(np.float32))
+ while h < (i+self.info.averaging_number):
+ mean.append(io.h5read(self.data_file, H5_PATHS.frame_pattern % self.info, slice=h)[key].astype(np.float32))
h+=1
raw[j] = np.array(mean).mean(0).T
- i+=self.info.recipe.averaging_number
+ i+=self.info.averaging_number
log(3, 'Data loaded successfully.')
return raw, pos, weights
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 2e1c8ff19..5638366e5 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -25,7 +25,7 @@
# from savu import Savu
# from plugin import makeScanPlugin
# from ID16Anfp import ID16AScan
-# from AMO_LCLS import AMOScan
+from AMO_LCLS import AMOScan
from DiProI_FERMI import DiProIFERMIScan
from optiklabor import FliSpecScanMultexp
from UCL import UCLLaserScan
From 052994443df9f0e1402012e3fb4ca6e7206f40cf Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 23 Oct 2017 08:54:49 +0200
Subject: [PATCH 154/363] Adapted ID16AScan - not tested
---
ptypy/experiment/ID16Anfp.py | 312 +++++++++++++++++------------------
ptypy/experiment/__init__.py | 2 +-
2 files changed, 153 insertions(+), 161 deletions(-)
diff --git a/ptypy/experiment/ID16Anfp.py b/ptypy/experiment/ID16Anfp.py
index 489bef1f8..f1201273e 100644
--- a/ptypy/experiment/ID16Anfp.py
+++ b/ptypy/experiment/ID16Anfp.py
@@ -13,6 +13,7 @@
from .. import utils as u
from .. import io
from ..utils import parallel
+from ..utils.descriptor import defaults_tree
from ..core.data import PtyScan
from ..utils.verbose import log
from ..core.paths import Paths
@@ -40,48 +41,121 @@
H5_PATHS.frames = '{entry}/ptycho/data'
H5_PATHS.motors = '{entry}/ptycho/motors'
-# Recipe defaults
-RECIPE = u.Param()
-RECIPE.experimentID = None # Experiment identifier - will be read from h5
-RECIPE.energy = None # Energy in keV - will be read from h5
-RECIPE.lam = None # 1.2398e-9 / RECIPE.energy
-RECIPE.z = None # Distance from object to screen
-RECIPE.motors = ['spy', 'spz'] # 'Motor names to determine the sample translation'
-RECIPE.motors_multiplier = 1e-6 # 'Motor conversion factor to meters'
-RECIPE.base_path = None # Base path to read and write data - can be guessed.
-RECIPE.sample_name = None # Sample name - will be read from h5
-RECIPE.scan_label = None # Scan label - will be read from h5
-RECIPE.flat_label = None # Flat label - equal to scan_label by default
-RECIPE.dark_label = None # Dark label - equal to scan_label by default
-RECIPE.mask_file = None # Mask file name
-RECIPE.use_h5 = False # Load data from prepared h5 file
-RECIPE.flat_division = False # Switch for flat division
-RECIPE.dark_subtraction = False # Switch for dark subtraction
-
-# These are home-made wrapped data
-RECIPE.data_file_pattern = '{[base_path]}/{[sample_name]}/{[scan_label]}_data.h5'
-RECIPE.flat_file_pattern = '{[base_path]}/{[sample_name]}/{[flat_label]}_flat.h5'
-RECIPE.dark_file_pattern = '{[base_path]}/{[sample_name]}/{[dark_label]}_dark.h5'
-
-# The h and v are inverted here - that's on purpose!
-RECIPE.distortion_h_file = '/data/id16a/inhouse1/instrument/img1/optique_peter_distortion/detector_distortion2d_v.edf'
-RECIPE.distortion_v_file = '/data/id16a/inhouse1/instrument/img1/optique_peter_distortion/detector_distortion2d_h.edf'
-RECIPE.whitefield_file = '/data/id16a/inhouse1/instrument/whitefield/white.edf'
-
-# Generic defaults
-ID16A_DEFAULT = PtyScan.DEFAULT.copy()
-ID16A_DEFAULT.recipe = RECIPE
-ID16A_DEFAULT.auto_center = False
-# Orientation of Frelon frame - only LR flip
-ID16A_DEFAULT.orientation = (False, True, False)
-
+@defaults_tree.parse_doc('scandata.ID16AScan')
class ID16AScan(PtyScan):
"""
Subclass of PtyScan for ID16A beamline (specifically for near-field
ptychography).
+
+ Defaults:
+
+ [name]
+ default = 'ID16AScan'
+ type = str
+ help =
+
+ [experimentID]
+ default = None
+ type = str
+ help = Experiment identifier - will be read from h5
+
+ [motors]
+ default = ['spy', 'spz']
+ type = list
+ help = Motor names to determine the sample translation
+
+ [motors_multiplier]
+ default = 1e-6
+ type = float
+ help = Motor conversion factor to meters
+
+ [base_path]
+ default = None
+ type = str
+ help = Base path to read and write data - can be guessed
+
+ [sample_name]
+ default = None
+ type = str
+ help = Sample name - will be read from h5
+
+ [scan_label]
+ default = None
+ type = int
+ help = Scan label - will be read from h5
+
+ [flat_label]
+ default = None
+ type = int
+ help = Flat label - equal to scan_label by default
+
+ [dark_label]
+ default = None
+ type = int
+ help = Dark label - equal to scan_label by default
+
+ [mask_file]
+ default = None
+ type = str
+ help = Mask file name
+
+ [use_h5]
+ default = False
+ type = bool
+ help = Load data from prepared h5 file
+
+ [flat_division]
+ default = False
+ type = bool
+ help = Switch for flat division
+
+ [dark_subtraction]
+ default = False
+ type = bool
+ help = Switch for dark subtraction
+
+ [data_file_pattern]
+ default = '{[base_path]}/{[sample_name]}/{[scan_label]}_data.h5'
+ type = str
+ help =
+
+ [flat_file_pattern]
+ default = '{[base_path]}/{[sample_name]}/{[flat_label]}_flat.h5'
+ type = str
+ help =
+
+ [dark_file_pattern]
+ default = '{[base_path]}/{[sample_name]}/{[dark_label]}_dark.h5'
+ type = str
+ help =
+
+ [distortion_h_file]
+ default = '/data/id16a/inhouse1/instrument/img1/optique_peter_distortion/detector_distortion2d_v.edf'
+ type = str
+ help = The h and v are inverted here - that's on purpose!
+
+ [distortion_v_file]
+ default = '/data/id16a/inhouse1/instrument/img1/optique_peter_distortion/detector_distortion2d_h.edf'
+ type = str
+ help = The h and v are inverted here - that's on purpose!
+
+ [whitefield_file]
+ default = '/data/id16a/inhouse1/instrument/whitefield/white.edf'
+ type = str
+ help =
+
+ [auto_center]
+ default = False
+ type = bool
+ help = Overrides the PtyScan default
+
+ [orientation]
+ default = (False, True, False)
+ type = tuple
+ help = Frelon frame - only LR flip
+
"""
- DEFAULT = ID16A_DEFAULT
def __init__(self, pars=None, **kwargs):
"""
@@ -90,59 +164,15 @@ def __init__(self, pars=None, **kwargs):
:param pars: preparation parameters
:param kwargs: Additive parameters
"""
+
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
+
# Initialise parent class
- recipe_default = RECIPE.copy()
- recipe_default.update(pars.recipe, in_place_depth=1)
- pars.recipe.update(recipe_default)
-
- super(ID16AScan, self).__init__(pars, **kwargs)
-
- # Apply beamline-specific generic defaults
- #pars = PREP_DEFAULT.copy().update(pars)
- #pars.update(**kwargs)
-
- # Apply beamline parameters ("recipe")
- #rinfo = DEFAULT.copy()
- #rinfo.update(pars.recipe)
-
- # Initialise parent class with input parameters
- #super(self.__class__, self).__init__(pars)
-
- # Store recipe parameters in self.info
- #self.info.recipe = rinfo
-
- # Default scan label
- #if self.info.label is not None:
- # assert (self.info.label == rinfo.scan_label), (
- # 'Incompatible scan labels')
- #self.info.label = rinfo.scan_label
- #logger.info('Scan label: %s' % rinfo.scan_label)
-
- # Default flat and dark labels.
- #if rinfo.flat_label is None:
- # rinfo.flat_label = rinfo.scan_label
- #if rinfo.dark_label is None:
- # rinfo.dark_label = rinfo.scan_label
-
- # Attempt to extract base_path if missing
- #if rinfo.base_path is None:
- # d = os.getcwd()
- # base_path = None
- # while True:
- # if 'id16a' in os.listdir(d):
- # base_path = os.path.join(d, 'id16a')
- # break
- # d, rest = os.path.split(d)
- # if not rest:
- # break
- # if base_path is None:
- # raise RuntimeError('Could not guess base_path.')
- # else:
- # rinfo.base_path = base_path
- # logger.info('Base path: %s' % base_path)
+ super(ID16AScan, self).__init__(p, **kwargs)
# Try to extract base_path to access data files
- if self.info.recipe.base_path is None:
+ if self.info.base_path is None:
d = os.getcwd()
base_path = None
while True:
@@ -155,51 +185,13 @@ def __init__(self, pars=None, **kwargs):
if base_path is None:
raise RuntimeError('Could not guess base_path.')
else:
- self.info.recipe.base_path = base_path
-
- # Data file names
- #rinfo.data_file = rinfo.data_file_pattern.format(rinfo)
- #rinfo.dark_file = rinfo.dark_file_pattern.format(rinfo)
- #rinfo.flat_file = rinfo.flat_file_pattern.format(rinfo)
-
- # Read metadata
- #h = io.h5read(rinfo.data_file)
- #entry = h.keys()[0]
- #rinfo.entry = entry
-
- # Energy
- #k = H5_PATHS.energy.format(rinfo)
- #energy = float(io.h5read(rinfo.data_file, k)[k])
- #if self.info.energy is not None:
- # assert (self.info.energy == energy), (
- # "Energy (%f keV) is read from file - please don't attempt to "
- # "overwrite it" % energy)
-
- # Attempt to extract experiment ID
- #if rinfo.experimentID is None:
- # # We use the path structure for this
- # experimentID = os.path.split(os.path.split(
- # rinfo.base_path[:-1])[0])[1]
- # logger.info('experiment ID: %s' % experimentID)
- # rinfo.experimentID = experimentID
-
- # Effective pixel size
-
- # Data file names
- #rinfo.data_file = rinfo.data_file_pattern.format(rinfo)
- #rinfo.dark_file = rinfo.dark_file_pattern.format(rinfo)
- #rinfo.flat_file = rinfo.flat_file_pattern.format(rinfo)
-
- #self.rinfo = rinfo
- #self.info.recipe = rinfo
-
- #logger.info(u.verbose.report(self.info))
+ self.info.base_path = base_path
# Create the ptyd file name if not specified
if self.info.dfile is None:
home = Paths(IO_par).home
self.info.dfile = '%s/prepdata/data_%d.ptyd' % (
- home, self.info.recipe.scan_label)
+ home, self.info.scan_label)
log(3, 'Save file is %s' % self.info.dfile)
log(4, u.verbose.report(self.info))
@@ -211,8 +203,8 @@ def load_weight(self):
"""
# FIXME: do something better here. (detector-dependent)
# Load mask as weight
- if self.info.recipe.mask_file is not None:
- return io.h5read(self.info.recipe.mask_file, 'mask')['mask'].astype(
+ if self.info.mask_file is not None:
+ return io.h5read(self.info.mask_file, 'mask')['mask'].astype(
np.float32)
def load_positions(self):
@@ -220,12 +212,12 @@ def load_positions(self):
Load the positions and return as an (N, 2) array.
"""
positions = []
- mmult = u.expect2(self.info.recipe.motors_multiplier)
+ mmult = u.expect2(self.info.motors_multiplier)
# Load positions
- if self.info.recipe.use_h5:
+ if self.info.use_h5:
# From prepared .h5 file
- data = io.h5read(self.info.recipe.base_path + '/raw/data.h5')
+ data = io.h5read(self.info.base_path + '/raw/data.h5')
for i in np.arange(1, len(data) + 1, 1):
positions.append((data['data_%04d' % i]['positions'][0, 0],
data['data_%04d' % i]['positions'][0, 1]))
@@ -233,19 +225,19 @@ def load_positions(self):
# From .edf files
pos_files = []
# Count available images given through scan_label
- for i in os.listdir(self.info.recipe.base_path +
- self.info.recipe.scan_label):
- if i.startswith(self.info.recipe.scan_label):
+ for i in os.listdir(self.info.base_path +
+ self.info.scan_label):
+ if i.startswith(self.info.scan_label):
pos_files.append(i)
for i in np.arange(1, len(pos_files) + 1, 1):
- data, meta = io.edfread(self.info.recipe.base_path +
- self.info.recipe.scan_label + '/' +
- self.info.recipe.scan_label +
+ data, meta = io.edfread(self.info.base_path +
+ self.info.scan_label + '/' +
+ self.info.scan_label +
'_%04d.edf' % i)
- positions.append((meta['motor'][self.info.recipe.motors[0]],
- meta['motor'][self.info.recipe.motors[1]]))
+ positions.append((meta['motor'][self.info.motors[0]],
+ meta['motor'][self.info.motors[1]]))
return np.array(positions) * mmult[0]
@@ -287,23 +279,23 @@ def load_common(self):
#return common._to_dict()
# Load dark
- if self.info.recipe.use_h5:
+ if self.info.use_h5:
# From prepared .h5 file
- dark = io.h5read(self.info.recipe.base_path + '/raw/dark.h5')
+ dark = io.h5read(self.info.base_path + '/raw/dark.h5')
common.dark = dark['dark_avg']['avgdata'].astype(np.float32)
else:
# From .edf files
dark_files = []
# Count available dark given through scan_label
- for i in os.listdir(self.info.recipe.base_path +
- self.info.recipe.scan_label):
+ for i in os.listdir(self.info.base_path +
+ self.info.scan_label):
if i.startswith('dark'):
dark_files.append(i)
dark = []
for i in np.arange(1, len(dark_files) + 1, 1):
- data, meta = io.edfread(self.info.recipe.base_path +
- self.info.recipe.scan_label + '/' +
+ data, meta = io.edfread(self.info.base_path +
+ self.info.scan_label + '/' +
'dark_%04d.edf' % i)
dark.append(data.astype(np.float32))
@@ -312,23 +304,23 @@ def load_common(self):
log(3, 'Dark loaded successfully.')
# Load flat
- if self.info.recipe.use_h5:
+ if self.info.use_h5:
# From prepared .h5 file
- flat = io.h5read(self.info.recipe.base_path + '/raw/ref.h5')
+ flat = io.h5read(self.info.base_path + '/raw/ref.h5')
common.flat = flat['ref_avg']['avgdata'].astype(np.float32)
else:
# From .edf files
flat_files = []
# Count available dark given through scan_label
- for i in os.listdir(self.info.recipe.base_path +
- self.info.recipe.scan_label):
+ for i in os.listdir(self.info.base_path +
+ self.info.scan_label):
if i.startswith('flat'):
flat_files.append(i)
flat = []
for i in np.arange(1, len(flat_files) + 1, 1):
- data, meta = io.edfread(self.info.recipe.base_path +
- self.info.recipe.scan_label + '/' +
+ data, meta = io.edfread(self.info.base_path +
+ self.info.scan_label + '/' +
'ref_%04d.edf' % i)
flat.append(data.astype(np.float32))
@@ -371,9 +363,9 @@ def load(self, indices):
# self.rinfo, slice=j)[key].astype(np.float32)
# Load data
- if self.info.recipe.use_h5:
+ if self.info.use_h5:
# From prepared .h5 file
- data = io.h5read(self.info.recipe.base_path + '/raw/data.h5')
+ data = io.h5read(self.info.base_path + '/raw/data.h5')
for j in indices:
i = j + 1
raw[j] = data['data_%04d' % i]['data'].astype(np.float32)
@@ -381,9 +373,9 @@ def load(self, indices):
# From .edf files
for j in indices:
i = j + 1
- data, meta = io.edfread(self.info.recipe.base_path +
- self.info.recipe.scan_label + '/' +
- self.info.recipe.scan_label +
+ data, meta = io.edfread(self.info.base_path +
+ self.info.scan_label + '/' +
+ self.info.scan_label +
'_%04d.edf' % i)
raw[j] = data.astype(np.float32)
@@ -417,12 +409,12 @@ def correct(self, raw, weights, common):
#data = raw_wl_ml_ud
# Apply flat and dark, only dark, or no correction
- if self.info.recipe.flat_division and self.info.recipe.dark_subtraction:
+ if self.info.flat_division and self.info.dark_subtraction:
for j in raw:
raw[j] = (raw[j] - common.dark) / (common.flat - common.dark)
raw[j][raw[j] < 0] = 0
data = raw
- elif self.info.recipe.dark_subtraction:
+ elif self.info.dark_subtraction:
for j in raw:
raw[j] = raw[j] - common.dark
raw[j][raw[j] < 0] = 0
@@ -543,7 +535,7 @@ def undistort(frame, delta):
#
# def __init__(self, pars=None):
# super(ID16Scan, self).__init__(pars)
-# r = self.info.recipe
+# r = self.info
# # filename analysis
# body, ext = os.path.splitext(
# os.path.expanduser(r.base_path + r.first_frame))
@@ -564,7 +556,7 @@ def undistort(frame, delta):
# return self.frame_format % (index + 1)
#
# def _load_dark(self):
-# r = self.info.recipe
+# r = self.info
# print('Loading the dark files...')
# darklist = []
# for ff in sorted(glob.glob(r.base_path + 'dark*.edf')):
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 5638366e5..b5342dfe4 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -24,7 +24,7 @@
# from I08 import I08Scan
# from savu import Savu
# from plugin import makeScanPlugin
-# from ID16Anfp import ID16AScan
+from ID16Anfp import ID16AScan
from AMO_LCLS import AMOScan
from DiProI_FERMI import DiProIFERMIScan
from optiklabor import FliSpecScanMultexp
From 8999c788a747e780b733018968a399f39cfba159 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 23 Oct 2017 09:03:56 +0200
Subject: [PATCH 155/363] Not touching the helper makeScanPlugin
---
ptypy/experiment/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index b5342dfe4..270e8f2ef 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -23,7 +23,7 @@
# from DLS import DlsScan
# from I08 import I08Scan
# from savu import Savu
-# from plugin import makeScanPlugin
+from plugin import makeScanPlugin
from ID16Anfp import ID16AScan
from AMO_LCLS import AMOScan
from DiProI_FERMI import DiProIFERMIScan
From 7b0f323f799318b9be592be3966736c97cd424ae Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 23 Oct 2017 09:12:43 +0200
Subject: [PATCH 156/363] Adapted Savu scan class - not tested
---
ptypy/experiment/__init__.py | 2 +-
ptypy/experiment/savu.py | 51 +++++++++++++++++++++++-------------
2 files changed, 34 insertions(+), 19 deletions(-)
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 270e8f2ef..b0b4d6632 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -22,7 +22,7 @@
# from I13_nfp import I13ScanNFP
# from DLS import DlsScan
# from I08 import I08Scan
-# from savu import Savu
+from savu import Savu
from plugin import makeScanPlugin
from ID16Anfp import ID16AScan
from AMO_LCLS import AMOScan
diff --git a/ptypy/experiment/savu.py b/ptypy/experiment/savu.py
index 235bc9173..73a3b7637 100644
--- a/ptypy/experiment/savu.py
+++ b/ptypy/experiment/savu.py
@@ -15,45 +15,60 @@
from ..utils import parallel
from ..core.data import PtyScan
from ..utils.verbose import log
+from ..utils.descriptor import defaults_tree
from ..core.paths import Paths
-#from ..core import DEFAULT_io as IO_par
from ..core import Ptycho
-IO_par = Ptycho.DEFAULT['io']
import h5py as h5
logger = u.verbose.logger
-# Recipe defaults
-SAVU = PtyScan.DEFAULT.copy()
-SAVU.mask = None
-SAVU.data = None
-SAVU.positions = None
+@defaults_tree.parse_doc('scandata.Savu')
+class Savu(PtyScan):
+ """
+ Defaults:
+ [name]
+ default = 'Savu'
+ type = str
+ help =
-class Savu(PtyScan):
- DEFAULT = SAVU
+ [mask]
+ default = None
+ type = array
+ help =
+
+ [data]
+ default = None
+ type = array
+ help =
+
+ [positions]
+ default = None
+ type = array
+ help =
+
+ """
def __init__(self, pars=None, **kwargs):
"""
savu data preparation class.
"""
# Initialise parent class
- recipe_default = SAVU.copy()
- recipe_default.update(pars.recipe, in_place_depth=99)
- pars.recipe.update(recipe_default)
- super(Savu, self).__init__(pars, **kwargs)
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
+ super(Savu, self).__init__(p, **kwargs)
log(4, u.verbose.report(self.info))
def load_weight(self):
if self.info.mask is not None:
- return self.info.recipe.mask.astype(float)
+ return self.info.mask.astype(float)
else:
log(2,'The mask was a None')
def load_positions(self):
- if self.info.recipe.positions is not None:
- return self.info.recipe.positions
+ if self.info.positions is not None:
+ return self.info.positions
else:
log(2,'The positions were None')
@@ -67,9 +82,9 @@ def load(self, indices):
raw = {}
pos = {}
weights = {}
- if self.info.recipe.data is not None:
+ if self.info.data is not None:
for j in indices:
- raw[j] = self.info.recipe.data[j]
+ raw[j] = self.info.data[j]
else:
log(2,'The data had None')
return raw, pos, weights
From 7d68b9f416dfecba782f90d7fd9b02137bb62640 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 23 Oct 2017 09:25:30 +0200
Subject: [PATCH 157/363] Adapted I08Scan - not tested
---
ptypy/experiment/I08.py | 156 +++++++++++++++++++++++------------
ptypy/experiment/__init__.py | 2 +-
2 files changed, 106 insertions(+), 52 deletions(-)
diff --git a/ptypy/experiment/I08.py b/ptypy/experiment/I08.py
index 0a1d783f1..8843cbb59 100644
--- a/ptypy/experiment/I08.py
+++ b/ptypy/experiment/I08.py
@@ -24,6 +24,7 @@
#from ptypy.core import DEFAULT_io as IO_par
from ptypy.core import Ptycho
IO_par = Ptycho.DEFAULT['io']
+ from ptypy.utils.descriptor import defaults_tree
else:
from .. import utils as u
from .. import io
@@ -33,6 +34,7 @@
#from ..core import DEFAULT_io as IO_par
from ..core import Ptycho
IO_par = Ptycho.DEFAULT['io']
+ from ..utils.descriptor import defaults_tree
# Parameters for the nexus file saved by GDA
@@ -45,37 +47,91 @@
STXM_PATHS.motors = 'entry1/Counter1/'
STXM_PATHS.energy = 'entry1/Counter1/'
-# I08 recipe default parameters
-RECIPE = u.Param()
-RECIPE.base_path = None
-RECIPE.scan_number = None
-RECIPE.scan_number_stxm = None
-RECIPE.dark_number = None
-RECIPE.dark_number_stxm = None
-RECIPE.dark_value = 200. # Used if dark_number is None
-RECIPE.detector_flat_file = None
-RECIPE.nxs_file_pattern = '%(base_path)s/nexus/i08-%(scan_number)s.nxs'
-RECIPE.dark_nxs_file_pattern = '%(base_path)s/nexus/i08-%(dark_number)s.nxs'
-RECIPE.date = None
-RECIPE.stxm_file_pattern = '%(base_path)s/%(date)s/discard/Sample_Image_%(date)s_%(scan_number_stxm)s.hdf5'
-
-
-RECIPE.motors = ['sample_y','sample_x'] # same orientation as I13 for now
-RECIPE.energy = None
-RECIPE.lam=None
-RECIPE.z = None
-RECIPE.motors_multiplier = 1e-6
-
-# Default generic parameter set from
-I08DEFAULT = ptypy.core.data.PtyScan.DEFAULT.copy()
-
-I08DEFAULT.recipe = RECIPE
-
-I08DEFAULT.auto_center = False
-
+@defaults_tree.parse_doc('scandata.I08Scan')
class I08Scan(ptypy.core.data.PtyScan):
- DEFAULT = I08DEFAULT
+ """
+
+ I08 (Diamond Light Source) data preparation class.
+
+ Defaults:
+
+ [name]
+ default = 'I08Scan'
+ type = str
+ help =
+
+ [base_path]
+ default = None
+ type = str
+ help =
+
+ [scan_number]
+ default = None
+ type = int
+ help =
+
+ [scan_number_stxm]
+ default = None
+ type = int
+ help =
+
+ [dark_number]
+ default = None
+ type = int
+ help =
+
+ [dark_number_stxm]
+ default = None
+ type = int
+ help =
+
+ [dark_value]
+ default = 200.0
+ type = float
+ help = Used if dark_number is None
+
+ [detector_flat_file]
+ default = None
+ type = str
+ help =
+
+ [nxs_file_pattern]
+ default = '%(base_path)s/nexus/i08-%(scan_number)s.nxs'
+ type = str
+ help =
+
+ [dark_nxs_file_pattern]
+ default = '%(base_path)s/nexus/i08-%(dark_number)s.nxs'
+ type = str
+ help =
+
+ [data]
+ default = None
+ type = str
+ help =
+
+ [stxm_file_pattern]
+ default = '%(base_path)s/%(date)s/discard/Sample_Image_%(date)s_%(scan_number_stxm)s.hdf5'
+ type = str
+ help =
+
+ [motors]
+ default = ['sample_y','sample_x']
+ type = list
+ help = same orientation as I13 for now
+
+ [motors_multiplier]
+ default = 1e-6
+ type = float
+ help = Conversion factor to meters
+
+ [auto_center]
+ default = False
+ type = bool
+ help = Overrides PtyScan default
+
+ """
def __init__(self, pars=None, **kwargs):
"""
@@ -83,14 +139,12 @@ def __init__(self, pars=None, **kwargs):
"""
# Initialize parent class. All updated parameters are now in
# self.info
- RDEFAULT = RECIPE.copy()
- RDEFAULT.update(pars.recipe)
- pars.recipe.update(RDEFAULT)
-
- super(I08Scan, self).__init__(pars, **kwargs)
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
+ super(I08Scan, self).__init__(p, **kwargs)
# Try to extract base_path to access data files
- if self.info.recipe.base_path is None:
+ if self.info.base_path is None:
d = os.getcwd()
base_path = None
while True:
@@ -103,28 +157,28 @@ def __init__(self, pars=None, **kwargs):
if base_path is None:
raise RuntimeError('Could not guess base_path.')
else:
- self.info.recipe.base_path = base_path
+ self.info.base_path = base_path
# Sanity check: for now we need a date to identify the SXTM file
- if self.info.recipe.date is None:
- raise RuntimeError('recipe.date has to be specified to find the STXM file name.')
+ if self.info.date is None:
+ raise RuntimeError('date has to be specified to find the STXM file name.')
else:
try:
- time.strptime(self.info.recipe.date, '%Y-%m-%d')
+ time.strptime(self.info.date, '%Y-%m-%d')
except ValueError:
print('The date should be in format "YYYY-MM-DD"')
raise
# Construct the file names
- self.nxs_filename = self.info.recipe.nxs_file_pattern % self.info.recipe
- self.stxm_filename = self.info.recipe.stxm_file_pattern % self.info.recipe
+ self.nxs_filename = self.info.nxs_file_pattern % self.info
+ self.stxm_filename = self.info.stxm_file_pattern % self.info
log(3, 'Will read from nxs file %s' % self.nxs_filename)
log(3, 'Will read from STXM file %s' % self.stxm_filename)
# Create the ptyd file name if not specified
if self.info.dfile is None:
home = Paths(IO_par).home
- self.info.dfile = '%s/prepdata/data_%d.ptyd' % (home, self.info.recipe.scan_number)
+ self.info.dfile = '%s/prepdata/data_%d.ptyd' % (home, self.info.scan_number)
log(3, 'Save file is %s' % self.info.dfile)
def load_common(self):
@@ -136,8 +190,8 @@ def load_common(self):
"""
common = u.Param()
key = NXS_PATHS.frame_pattern
- if self.info.recipe.dark_number is not None:
- self.dark_nxs_filename = self.info.recipe.dark_nxs_file_pattern % self.info.recipe
+ if self.info.dark_number is not None:
+ self.dark_nxs_filename = self.info.dark_nxs_file_pattern % self.info
#dark = io.h5read(self.dark_nxs_filename,key)[key][0,0,:,:]# this was a problem with the dark collection. a 2x2 grid was collected.
dark = io.h5read(self.dark_nxs_filename, key)[key]
if dark.ndim == 4:
@@ -145,10 +199,10 @@ def load_common(self):
if dark.ndim == 3:
dark = np.median(dark, axis=0)
else:
- dark = self.info.recipe.dark_value
+ dark = self.info.dark_value
- if self.info.recipe.detector_flat_file is not None:
- flat = io.h5read(self.info.recipe.detector_flat_file,FLAT_PATHS.key)[FLAT_PATHS.key]
+ if self.info.detector_flat_file is not None:
+ flat = io.h5read(self.info.detector_flat_file,FLAT_PATHS.key)[FLAT_PATHS.key]
else:
flat = 1.
@@ -164,10 +218,10 @@ def load_positions(self):
"""
Load the positions and return as an (N,2) array
"""
- base_path = self.info.recipe.base_path
- mmult = u.expect2(self.info.recipe.motors_multiplier)
- keyx = STXM_PATHS.motors+str(self.info.recipe.motors[0])
- keyy=STXM_PATHS.motors+str(self.info.recipe.motors[1])
+ base_path = self.info.base_path
+ mmult = u.expect2(self.info.motors_multiplier)
+ keyx = STXM_PATHS.motors+str(self.info.motors[0])
+ keyy=STXM_PATHS.motors+str(self.info.motors[1])
print "file name is:%s" % self.stxm_filename
x1 = io.h5read(self.stxm_filename,keyx)
y1 = io.h5read(self.stxm_filename,keyy)
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index b0b4d6632..91338bb2d 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -21,7 +21,7 @@
# from I13_ffp import I13ScanFFP
# from I13_nfp import I13ScanNFP
# from DLS import DlsScan
-# from I08 import I08Scan
+from I08 import I08Scan
from savu import Savu
from plugin import makeScanPlugin
from ID16Anfp import ID16AScan
From efa96e21a088e094d3d24437f156e700c133941a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 23 Oct 2017 09:38:28 +0200
Subject: [PATCH 158/363] Adapted DlsScan - not tested
---
ptypy/experiment/DLS.py | 209 +++++++++++++++++++++++++----------
ptypy/experiment/__init__.py | 2 +-
2 files changed, 153 insertions(+), 58 deletions(-)
diff --git a/ptypy/experiment/DLS.py b/ptypy/experiment/DLS.py
index 0baffc30b..67808dde6 100644
--- a/ptypy/experiment/DLS.py
+++ b/ptypy/experiment/DLS.py
@@ -13,6 +13,7 @@
from .. import utils as u
from .. import io
from ..utils import parallel
+from ..utils.descriptor import defaults_tree
from ..core.data import PtyScan
from ..utils.verbose import log
from ..core.paths import Paths
@@ -38,63 +39,157 @@
NEXUS_PATHS.label = 'entry1/entry_identifier'
NEXUS_PATHS.experiment = 'entry1/experiment_identifier'
-# Recipe defaults
-RECIPE = u.Param()
-RECIPE.is_swmr = False
-RECIPE.israster = 0
-RECIPE.experimentID = None # Experiment identifier
-RECIPE.scan_number = None # scan number
-RECIPE.dark_number = None
-RECIPE.flat_number = None
-RECIPE.energy = None
-RECIPE.lam = None # 1.2398e-9 / RECIPE.energy
-RECIPE.z = None # Distance from object to screen
-RECIPE.detector_name = 'merlin_sw_hdf' # Name of the detector as specified in the nexus file
-RECIPE.motors = ['t1_sx', 't1_sy'] # Motor names to determine the sample translation
-# RECIPE.motors_multiplier = 1e-6 # Motor conversion factor to meters
-RECIPE.motors_multiplier = [1e-6,-1e-6] # Motor conversion factor to meters
-RECIPE.base_path = './'
-RECIPE.data_file_pattern = '%(base_path)s' + 'raw/%(scan_number)05d.nxs'
-RECIPE.dark_file_pattern = '%(base_path)s' + 'raw/%(dark_number)05d.nxs'
-RECIPE.flat_file_pattern = '%(base_path)s' + 'raw/%(flat_number)05d.nxs'
-RECIPE.mask_file = None # '%(base_path)s' + 'processing/mask.h5'
-RECIPE.NFP_correct_positions = False # Position corrections for NFP beamtime Oct 2014
-RECIPE.use_EP = False # Use flat as Empty Probe (EP) for probe sharing; needs to be set to True in the recipe of the scan that will act as EP'
-RECIPE.remove_hot_pixels = u.Param( # Apply hot pixel correction
- apply = False, # Initiate by setting to True; DEFAULT parameters will be used if not specified otherwise
- size = 3, # Size of the window on which the median filter will be applied around every data point
- tolerance = 10, # Tolerance multiplied with the standard deviation of the data array subtracted by the blurred array
- # (difference array) yields the threshold for cutoff.
- ignore_edges = False, # If True, edges of the array are ignored, which speeds up the code
-)
-
-# Generic defaults
-I13DEFAULT = PtyScan.DEFAULT.copy()
-I13DEFAULT.recipe = RECIPE
-I13DEFAULT.auto_center = False
-I13DEFAULT.orientation = (False, False, False)
-
+@defaults_tree.parse_doc('scandata.DlsScan')
class DlsScan(PtyScan):
- DEFAULT = I13DEFAULT
+ """
+ I13 (Diamond Light Source) data preparation class.
+
+ Defaults:
+
+ [name]
+ default = 'DlsScan'
+ type = str
+ help =
+
+ [is_swmr]
+ default = False
+ type = bool
+ help =
+
+ [israster]
+ default = 0
+ type = int
+ help =
+
+ [experimentID]
+ default = None
+ type = str
+ help = Experiment identifier
+
+ [scan_number]
+ default = None
+ type = int
+ help = Scan number
+
+ [dark_number]
+ default = None
+ type = int
+ help =
+
+ [flat_number]
+ default = None
+ type = int
+ help =
+
+ [detector_name]
+ default = 'merlin_sw_hdf'
+ type = str
+ help = Name of the detector
+ doc = As specified in the nexus file.
+
+ [motors]
+ default = ['t1_sx', 't1_sy']
+ type = list
+ help = Motor names to determine the sample translation
+
+ [motors_multiplier]
+ default = [1e-6,-1e-6]
+ type = list
+ help = Motor conversion factor to meters
+
+ [base_path]
+ default = './'
+ type = str
+ help =
+
+ [data_file_pattern]
+ default = '%(base_path)sraw/%(scan_number)05d.nxs'
+ type = str
+ help =
+
+ [dark_file_pattern]
+ default = '%(base_path)sraw/%(dark_number)05d.nxs'
+ type = str
+ help =
+
+ [flat_file_pattern]
+ default = '%(base_path)sraw/%(flat_number)05d.nxs'
+ type = str
+ help =
+
+ [mask_file]
+ default = None
+ type = str
+ help =
+
+ [NFP_correct_positions]
+ default = False
+ type = bool
+ help = Position corrections for NFP beamtime Oct 2014
+
+ [use_EP]
+ default = False
+ type = bool
+ help = Use flat as Empty Probe (EP) for probe sharing
+ doc = Needs to be set to True in the recipe of the scan that will act as EP.
+
+ [remove_hot_pixels]
+ default =
+ type = Param
+ help = Apply hot pixel correction
+
+ [remove_hot_pixels.apply]
+ default = False
+ type = bool
+ help =
+
+ [remove_hot_pixels.size]
+ default = 3
+ type = int
+ help = Size of the window
+ doc = The median filter will be applied around every data point.
+
+ [remove_hot_pixels.tolerance]
+ default = 10
+ type = int
+ help =
+ doc = Tolerance multiplied with the standard deviation of the data array subtracted by the blurred array (difference array) yields the threshold for cutoff.
+
+ [remove_hot_pixels.ignore_edges]
+ default = False
+ type = bool
+ help = Ignore edges of the array
+ doc = Enabling speeds up the code.
+
+ [auto_center]
+ default = False
+ type = bool
+ help = Overrides PtyScan default
+
+ [orientation]
+ default = (False, False, False)
+ type = tuple
+ help = Overrides PtyScan default
+
+ """
def __init__(self, pars=None, **kwargs):
"""
I13 (Diamond Light Source) data preparation class.
"""
# Initialise parent class
- recipe_default = RECIPE.copy()
- recipe_default.update(pars.recipe, in_place_depth=5)
- pars.recipe.update(recipe_default)
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
- super(DlsScan, self).__init__(pars, **kwargs)
- self.data_file = self.info.recipe.data_file_pattern % self.info.recipe
+ super(DlsScan, self).__init__(p, **kwargs)
+ self.data_file = self.info.data_file_pattern % self.info
# Create the ptyd file name if not specified
if self.info.dfile is None:
home = Paths(IO_par).home
- self.info.dfile = '%s/prepdata/data_%d.ptyd' % (home, self.info.recipe.scan_number)
+ self.info.dfile = '%s/prepdata/data_%d.ptyd' % (home, self.info.scan_number)
log(3, 'Save file is %s' % self.info.dfile)
log(4, u.verbose.report(self.info))
@@ -104,26 +199,26 @@ def load_weight(self):
"""
# FIXME: do something better here. (detector-dependent)
# Load mask as weight
- if self.info.recipe.mask_file is not None:
- return io.h5read(self.info.recipe.mask_file % self.info.recipe, 'mask')['mask'].astype(float)
+ if self.info.mask_file is not None:
+ return io.h5read(self.info.mask_file % self.info, 'mask')['mask'].astype(float)
def load_positions(self):
"""
Load the positions and return as an (N,2) array
"""
# Load positions from file if possible.
- if self.info.recipe.is_swmr:
- instrument = h5.File(self.data_file, 'r', libver='latest', swmr=True)[NEXUS_PATHS.instrument % self.info.recipe]
+ if self.info.is_swmr:
+ instrument = h5.File(self.data_file, 'r', libver='latest', swmr=True)[NEXUS_PATHS.instrument % self.info]
else:
- instrument = h5.File(self.data_file, 'r')[NEXUS_PATHS.instrument % self.info.recipe]
- if self.info.recipe.israster:
+ instrument = h5.File(self.data_file, 'r')[NEXUS_PATHS.instrument % self.info]
+ if self.info.israster:
self.position_shape = instrument[0].shape
motor_positions = []
i=0
- mmult = u.expect2(self.info.recipe.motors_multiplier)
+ mmult = u.expect2(self.info.motors_multiplier)
for k in NEXUS_PATHS.motors:
- if not self.info.recipe.israster:
+ if not self.info.israster:
motor_positions.append(instrument[k]*mmult[i])
else:
motor_positions.append((instrument[k]*mmult[i]).ravel())
@@ -143,14 +238,14 @@ def check(self, frames, start):
- the number of frames available from a starting point `start`
- bool if the end of scan was reached (None if this routine doesn't know)
"""
- if not self.info.recipe.is_swmr:
+ if not self.info.is_swmr:
npos = self.num_frames
frames_accessible = min((frames, npos - start))
stop = self.frames_accessible + start
return frames_accessible, (stop >= npos)
else:
f = h5.File(self.data_file, 'r', libver='latest', swmr=True)
- dset= f[NEXUS_PATHS.live_key_pattern % self.info.recipe]
+ dset= f[NEXUS_PATHS.live_key_pattern % self.info]
dset.id.refresh()
num_avail = len(dset)-start
frames_accessible = min((frames, num_avail))
@@ -169,10 +264,10 @@ def load(self, indices):
raw = {}
pos = {}
weights = {}
- key = NEXUS_PATHS.frame_pattern % self.info.recipe
- if not self.info.recipe.israster:
+ key = NEXUS_PATHS.frame_pattern % self.info
+ if not self.info.israster:
for j in indices:
- if not self.info.recipe.is_swmr:
+ if not self.info.is_swmr:
# print "frame number "+str(j)
data = io.h5read(self.data_file, key, slice=j)[key].astype(np.float32)
raw[j] = data
@@ -185,7 +280,7 @@ def load(self, indices):
raw[j] = dset[j]
dset.file.close()
else:
- if not self.info.recipe.is_swmr:
+ if not self.info.is_swmr:
data = h5.File(self.data_file)[key]
sh = data.shape
for j in indices:
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 91338bb2d..f4b9cb5a1 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -20,7 +20,7 @@
# from I13_ffp import I13ScanFFP
# from I13_nfp import I13ScanNFP
-# from DLS import DlsScan
+from DLS import DlsScan
from I08 import I08Scan
from savu import Savu
from plugin import makeScanPlugin
From b0085235626e35b47c29925b17ecc69b9e8ef4f9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 23 Oct 2017 09:58:40 +0200
Subject: [PATCH 159/363] Adapted I13ScanNFP and I13ScanFFP - not tested
---
ptypy/experiment/I13_ffp.py | 174 ++++++++++-----
ptypy/experiment/I13_nfp.py | 418 +++++++++++++++++++++--------------
ptypy/experiment/__init__.py | 5 +-
3 files changed, 373 insertions(+), 224 deletions(-)
diff --git a/ptypy/experiment/I13_ffp.py b/ptypy/experiment/I13_ffp.py
index 043e32f66..4ce697e9d 100644
--- a/ptypy/experiment/I13_ffp.py
+++ b/ptypy/experiment/I13_ffp.py
@@ -10,6 +10,7 @@
import numpy as np
import os
from .. import utils as u
+from ..utils.descriptor import defaults_tree
from .. import io
from ..core.data import PtyScan
from ..core.paths import Paths
@@ -27,43 +28,96 @@
NEXUS_PATHS.label = 'entry1/entry_identifier'
NEXUS_PATHS.experiment = 'entry1/experiment_identifier'
-# Recipe defaults
-RECIPE = u.Param()
-# Experiment identifier
-RECIPE.experimentID = None
-# Scan number
-RECIPE.scan_number = None
-RECIPE.dark_number = None
-RECIPE.flat_number = None
-RECIPE.energy = None
-RECIPE.lam = None
-# Distance from object to screen
-RECIPE.z = None
-# Name of the detector as specified in the nexus file
-RECIPE.detector_name = None
-# Motor names to determine the sample translation
-RECIPE.motors = ['t1_sx', 't1_sy']
-RECIPE.theta = 'entry1/before_scan/t1_theta/t1_theta'
-# Motor conversion factor to meters
-RECIPE.motors_multiplier = 1e-6
-RECIPE.base_path = './'
-RECIPE.data_file_pattern = '%(base_path)s' + 'raw/%(scan_number)05d.nxs'
-RECIPE.dark_file_pattern = '%(base_path)s' + 'raw/%(dark_number)05d.nxs'
-RECIPE.flat_file_pattern = '%(base_path)s' + 'raw/%(flat_number)05d.nxs'
-RECIPE.mask_file = None
-
-# Generic defaults
-I13DEFAULT = PtyScan.DEFAULT.copy()
-I13DEFAULT.recipe = RECIPE
-I13DEFAULT.auto_center = False
-I13DEFAULT.orientation = (False, False, False)
-
+@defaults_tree.parse_doc('scandata.I13ScanFFP')
class I13ScanFFP(PtyScan):
"""
I13 (Diamond Light Source) data preparation class for FFP.
+
+ Defaults:
+
+ [name]
+ default = 'I13ScanFFP'
+ type = str
+ help =
+
+ [experimentID]
+ default = None
+ type = str
+ help = Experiment identifier
+
+ [scan_number]
+ default = None
+ type = int
+ help = Scan number
+
+ [dark_number]
+ default = None
+ type = int
+ help =
+
+ [flat_number]
+ default = None
+ type = int
+ help =
+
+ [detector_name]
+ default = None
+ type = str
+ help = Name of the detector
+ doc = As specified in the nexus file.
+
+ [motors]
+ default = ['t1_sx', 't1_sy']
+ type = list
+ help = Motor names to determine the sample translation
+
+ [motors_multiplier]
+ default = 1e-6
+ type = float
+ help = Motor conversion factor to meters
+
+ [base_path]
+ default = './'
+ type = str
+ help =
+
+ [data_file_pattern]
+ default = '%(base_path)sraw/%(scan_number)05d.nxs'
+ type = str
+ help =
+
+ [dark_file_pattern]
+ default = '%(base_path)sraw/%(dark_number)05d.nxs'
+ type = str
+ help =
+
+ [flat_file_pattern]
+ default = '%(base_path)sraw/%(flat_number)05d.nxs'
+ type = str
+ help =
+
+ [mask_file]
+ default = None
+ type = str
+ help =
+
+ [theta]
+ default = 0.0
+ type = float
+ help = Angle of rotation
+
+ [auto_center]
+ default = False
+ type = bool
+ help = Overrides PtyScan default
+
+ [orientation]
+ default = (False, False, False)
+ type = tuple
+ help = Overrides PtyScan default
+
"""
- DEFAULT = I13DEFAULT
def __init__(self, pars=None, **kwargs):
"""
@@ -74,14 +128,12 @@ def __init__(self, pars=None, **kwargs):
:param kwargs: key-value pair
- additional parameters.
"""
- recipe_default = RECIPE.copy()
- recipe_default.update(pars.recipe, in_place_depth=1)
- pars.recipe.update(recipe_default)
-
- super(I13ScanFFP, self).__init__(pars, **kwargs)
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
+ super(I13ScanFFP, self).__init__(p, **kwargs)
# Try to extract base_path to access data files
- if self.info.recipe.base_path is None:
+ if self.info.base_path is None:
d = os.getcwd()
base_path = None
while True:
@@ -94,10 +146,10 @@ def __init__(self, pars=None, **kwargs):
if base_path is None:
raise RuntimeError('Could not guess base_path.')
else:
- self.info.recipe.base_path = base_path
+ self.info.base_path = base_path
# Construct file names
- self.data_file = self.info.recipe.data_file_pattern % self.info.recipe
+ self.data_file = self.info.data_file_pattern % self.info
u.log(3, 'Will read data from file %s' % self.data_file)
# Load data information
@@ -105,8 +157,8 @@ def __init__(self, pars=None, **kwargs):
NEXUS_PATHS.instrument]
# Extract detector name if not set or wrong
- if (self.info.recipe.detector_name is None
- or self.info.recipe.detector_name
+ if (self.info.detector_name is None
+ or self.info.detector_name
not in self.instrument.keys()):
detector_name = None
for k in self.instrument.keys():
@@ -118,35 +170,35 @@ def __init__(self, pars=None, **kwargs):
raise RuntimeError(
'Not possible to extract detector name. '
'Please specify in recipe instead.')
- elif (self.info.recipe.detector_name is not None
+ elif (self.info.detector_name is not None
and detector_name
- is not self.info.recipe.detector_name):
+ is not self.info.detector_name):
u.log(2, 'Detector name changed from %s to %s.'
- % (self.info.recipe.detector_name, detector_name))
+ % (self.info.detector_name, detector_name))
else:
- detector_name = self.info.recipe.detector_name
+ detector_name = self.info.detector_name
- self.info.recipe.detector_name = detector_name
+ self.info.detector_name = detector_name
# Attempt to extract experiment ID
- if self.info.recipe.experimentID is None:
+ if self.info.experimentID is None:
try:
experiment_id = io.h5read(
self.data_file, NEXUS_PATHS.experiment)[
NEXUS_PATHS.experiment][0]
except (AttributeError, KeyError):
experiment_id = os.path.split(
- self.info.recipe.base_path[:-1])[1]
+ self.info.base_path[:-1])[1]
u.logger.debug(
'Could not find experiment ID from nexus file %s. '
'Using %s instead.' % (self.data_file, experiment_id))
- self.info.recipe.experimentID = experiment_id
+ self.info.experimentID = experiment_id
# Create the ptyd file name if not specified
if self.info.dfile is None:
home = Paths(IO_par).home
self.info.dfile = ('%s/prepdata/data_%d.ptyd'
- % (home, self.info.recipe.scan_number))
+ % (home, self.info.scan_number))
u.log(3, 'Save file is %s' % self.info.dfile)
u.log(4, u.verbose.report(self.info))
@@ -165,9 +217,9 @@ def load_weight(self):
"""
# FIXME: do something better here. (detector-dependent)
# Load mask as weight
- if self.info.recipe.mask_file is not None:
+ if self.info.mask_file is not None:
return io.h5read(
- self.info.recipe.mask_file, 'mask')['mask'].astype(float)
+ self.info.mask_file, 'mask')['mask'].astype(float)
def load_positions(self):
"""
@@ -183,21 +235,21 @@ def load_positions(self):
break
# Apply motor conversion factor and create transposed position array
- if len(self.info.recipe.motors) == 3:
- self.theta = io.h5read(self.data_file, self.info.recipe.theta)[
- self.info.recipe.theta]
+ if len(self.info.motors) == 3:
+ self.theta = io.h5read(self.data_file, self.info.theta)[
+ self.info.theta]
# Convert from degree to radians
self.theta *= np.pi / 180.
- mmult = u.expect3(self.info.recipe.motors_multiplier)
+ mmult = u.expect3(self.info.motors_multiplier)
pos_list = [mmult[i] * np.array(motor_positions[motor_name])
- for i, motor_name in enumerate(self.info.recipe.motors)]
+ for i, motor_name in enumerate(self.info.motors)]
positions = 1. * np.array([np.cos(self.theta) * pos_list[0] -
np.sin(self.theta) * pos_list[2],
pos_list[1]]).T
else:
- mmult = u.expect2(self.info.recipe.motors_multiplier)
+ mmult = u.expect2(self.info.motors_multiplier)
pos_list = [mmult[i] * np.array(motor_positions[motor_name])
- for i, motor_name in enumerate(self.info.recipe.motors)]
+ for i, motor_name in enumerate(self.info.motors)]
positions = 1. * np.array(pos_list).T
return positions
@@ -245,7 +297,7 @@ def load(self, indices):
pos = {}
weights = {}
raw = {j: self.instrument[
- self.info.recipe.detector_name]['data'][j].astype(np.float32)
+ self.info.detector_name]['data'][j].astype(np.float32)
for j in indices}
u.log(3, 'Data loaded successfully.')
diff --git a/ptypy/experiment/I13_nfp.py b/ptypy/experiment/I13_nfp.py
index f8ff78bae..b77942d5a 100644
--- a/ptypy/experiment/I13_nfp.py
+++ b/ptypy/experiment/I13_nfp.py
@@ -10,6 +10,7 @@
import numpy as np
import os
from .. import utils as u
+from ..utils.descriptor import defaults_tree
from .. import io
from ..core.data import PtyScan
from ..core.paths import Paths
@@ -27,92 +28,190 @@
NEXUS_PATHS.label = 'entry1/entry_identifier'
NEXUS_PATHS.experiment = 'entry1/experiment_identifier'
-# Recipe defaults
-RECIPE = u.Param()
-# Experiment identifier
-RECIPE.experimentID = None
-# Scan number
-RECIPE.scan_number = None
-RECIPE.dark_number = None
-RECIPE.flat_number = None
-RECIPE.energy = None
-RECIPE.lam = None
-# Distance from object to screen
-RECIPE.z = None
-# Name of the detector as specified in the nexus file
-RECIPE.detector_name = None
-# Motor names to determine the sample translation
-RECIPE.motors = ['t1_sx', 't1_sy']
-# Motor conversion factor to meters
-RECIPE.motors_multiplier = 1e-6
-RECIPE.base_path = './'
-RECIPE.data_file_pattern = '%(base_path)s' + 'raw/%(scan_number)05d.nxs'
-RECIPE.dark_file_pattern = '%(base_path)s' + 'raw/%(dark_number)05d.nxs'
-RECIPE.flat_file_pattern = '%(base_path)s' + 'raw/%(flat_number)05d.nxs'
-RECIPE.mask_file = None
-# Position corrections for NFP beamtime Oct 2014
-RECIPE.correct_positions_Oct14 = False
-# Use flat as Empty Probe (EP) for probe sharing;
-# needs to be set to True in the recipe of the scan that will act as EP
-RECIPE.use_EP = False
-# Maximum number of scan points to be loaded from origin
-RECIPE.max_scan_points = 100000
-# Angle of rotation (as used in NFP beamtime Jul 2015)
-RECIPE.theta = 0
-# Apply hot pixel correction
-RECIPE.remove_hot_pixels = u.Param(
- # Initiate by setting to True;
- # DEFAULT parameters will be used if not specified otherwise
- apply=False,
- # Size of the window on which the median filter will be applied
- # around every data point
- size=3,
- # Tolerance multiplied with the standard deviation of the data array
- # subtracted by the blurred array (difference array)
- # yields the threshold for cutoff.
- tolerance=3,
- # If True, edges of the array are ignored, which speeds up the code
- ignore_edges=False,
-)
-
-# Apply Richardson Lucy deconvolution
-RECIPE.rl_deconvolution = u.Param(
- # Initiate by setting to True;
- # DEFAULT parameters will be used if not specified otherwise
- apply=False,
- # Number of iterations
- numiter=5,
- # Provide MTF from file; no loading procedure present for now,
- # loading through recon script required
- dfile=None,
- # Create fake psf as a sum of gaussians if no MTF provided
- gaussians=u.Param(
- # DEFAULT list of gaussians for Richardson Lucy deconvolution
- g1=u.Param(
- # Standard deviation in x direction
- std_x=1.0,
- # Standard deviation in y direction
- std_y=1.0,
- # Offset / shift in x direction
- off_x=0.,
- # Offset / shift in y direction
- off_y=0.,
- )
- ),
-)
-
-# Generic defaults
-I13DEFAULT = PtyScan.DEFAULT.copy()
-I13DEFAULT.recipe = RECIPE
-I13DEFAULT.auto_center = False
-I13DEFAULT.orientation = (False, False, False)
-
+@defaults_tree.parse_doc('scandata.I13ScanNFP')
class I13ScanNFP(PtyScan):
"""
I13 (Diamond Light Source) data preparation class for NFP.
+
+ Defaults:
+
+ [name]
+ default = 'I13ScanNFP'
+ type = str
+ help =
+
+ [experimentID]
+ default = None
+ type = str
+ help = Experiment identifier
+
+ [scan_number]
+ default = None
+ type = int
+ help = Scan number
+
+ [dark_number]
+ default = None
+ type = int
+ help =
+
+ [flat_number]
+ default = None
+ type = int
+ help =
+
+ [detector_name]
+ default = None
+ type = str
+ help = Name of the detector
+ doc = As specified in the nexus file.
+
+ [motors]
+ default = ['t1_sx', 't1_sy']
+ type = list
+ help = Motor names to determine the sample translation
+
+ [motors_multiplier]
+ default = 1e-6
+ type = float
+ help = Motor conversion factor to meters
+
+ [base_path]
+ default = './'
+ type = str
+ help =
+
+ [data_file_pattern]
+ default = '%(base_path)sraw/%(scan_number)05d.nxs'
+ type = str
+ help =
+
+ [dark_file_pattern]
+ default = '%(base_path)sraw/%(dark_number)05d.nxs'
+ type = str
+ help =
+
+ [flat_file_pattern]
+ default = '%(base_path)sraw/%(flat_number)05d.nxs'
+ type = str
+ help =
+
+ [mask_file]
+ default = None
+ type = str
+ help =
+
+ [correct_positions_Oct14]
+ default = False
+ type = bool
+ help =
+
+ [use_EP]
+ default = False
+ type = bool
+ help = Use flat as Empty Probe (EP) for probe sharing
+ doc = Needs to be set to True in the recipe of the scan that will act as EP.
+
+ [max_scan_points]
+ default = 100000
+ type = int
+ help = Maximum number of scan points to be loaded from origin
+
+ [theta]
+ default = 0.0
+ type = float
+ help = Angle of rotation (as used in NFP beamtime Jul 2015)
+
+ [remove_hot_pixels]
+ default =
+ type = Param
+ help = Apply hot pixel correction
+
+ [remove_hot_pixels.apply]
+ default = False
+ type = bool
+ help =
+
+ [remove_hot_pixels.size]
+ default = 3
+ type = int
+ help = Size of the window
+ doc = The median filter will be applied around every data point.
+
+ [remove_hot_pixels.tolerance]
+ default = 3
+ type = int
+ help =
+ doc = Tolerance multiplied with the standard deviation of the data array subtracted by the blurred array (difference array) yields the threshold for cutoff.
+
+ [remove_hot_pixels.ignore_edges]
+ default = False
+ type = bool
+ help = Ignore edges of the array
+ doc = Enabling speeds up the code.
+
+ [auto_center]
+ default = False
+ type = bool
+ help = Overrides PtyScan default
+
+ [orientation]
+ default = (False, False, False)
+ type = tuple
+ help = Overrides PtyScan default
+
+ [rl_deconvolution]
+ default =
+ type = Param
+ help = Apply Richardson Lucy deconvolution
+
+ [rl_deconvolution.apply]
+ default = False
+ type = bool
+ help = Initiate by setting to True
+
+ [rl_deconvolution.numiter]
+ default = 5
+ type = int
+ help = Number of iterations
+
+ [rl_deconvolution.dfile]
+ default = None
+ type = str
+ help = Provide MTF from file; no loading procedure present for now, loading through recon script required
+
+ [rl_deconvolution.gaussians]
+ default =
+ type = Param
+ help = Create fake psf as a sum of gaussians if no MTF provided
+
+ [rl_deconvolution.gaussians.g1]
+ default =
+ type = Param
+ help = list of gaussians for Richardson Lucy deconvolution
+
+ [rl_deconvolution.gaussians.g1.std_x]
+ default = 1.0
+ type = float
+ help = Standard deviation in x direction
+
+ [rl_deconvolution.gaussians.g1.std_y]
+ default = 1.0
+ type = float
+ help = Standard deviation in y direction
+
+ [rl_deconvolution.gaussians.g1.off_x]
+ default = 0.0
+ type = float
+ help = Offset / shift in x direction
+
+ [rl_deconvolution.gaussians.g1.off_y]
+ default = 0.0
+ type = float
+ help = Offset / shift in y direction
+
"""
- DEFAULT = I13DEFAULT
def __init__(self, pars=None, **kwargs):
"""
@@ -123,14 +222,13 @@ def __init__(self, pars=None, **kwargs):
:param kwargs: key-value pair
- additional parameters.
"""
- recipe_default = RECIPE.copy()
- recipe_default.update(pars.recipe, in_place_depth=1)
- pars.recipe.update(recipe_default)
-
+
+ p = self.DEFAULT.copy(99)
+ p.update(pars)
super(I13ScanNFP, self).__init__(pars, **kwargs)
# Try to extract base_path to access data files
- if self.info.recipe.base_path is None:
+ if self.info.base_path is None:
d = os.getcwd()
base_path = None
while True:
@@ -143,26 +241,26 @@ def __init__(self, pars=None, **kwargs):
if base_path is None:
raise RuntimeError('Could not guess base_path.')
else:
- self.info.recipe.base_path = base_path
+ self.info.base_path = base_path
# Construct file names
- self.data_file = self.info.recipe.data_file_pattern % self.info.recipe
+ self.data_file = self.info.data_file_pattern % self.info
u.log(3, 'Will read data from file %s' % self.data_file)
- if self.info.recipe.dark_number is None:
+ if self.info.dark_number is None:
self.dark_file = None
u.log(3, 'No data for dark')
else:
- self.dark_file = (self.info.recipe.dark_file_pattern
- % self.info.recipe)
+ self.dark_file = (self.info.dark_file_pattern
+ % self.info)
u.log(3, 'Will read dark from file %s' % self.dark_file)
- if self.info.recipe.flat_number is None:
+ if self.info.flat_number is None:
self.flat_file = None
u.log(3, 'No data for flat')
else:
- self.flat_file = (self.info.recipe.flat_file_pattern
- % self.info.recipe)
+ self.flat_file = (self.info.flat_file_pattern
+ % self.info)
u.log(3, 'Will read flat from file %s' % self.flat_file)
# Load data information
@@ -170,8 +268,8 @@ def __init__(self, pars=None, **kwargs):
NEXUS_PATHS.instrument]
# Extract detector name if not set or wrong
- if (self.info.recipe.detector_name is None
- or self.info.recipe.detector_name
+ if (self.info.detector_name is None
+ or self.info.detector_name
not in self.instrument.keys()):
detector_name = None
for k in self.instrument.keys():
@@ -183,32 +281,32 @@ def __init__(self, pars=None, **kwargs):
raise RuntimeError(
'Not possible to extract detector name. '
'Please specify in recipe instead.')
- elif (self.info.recipe.detector_name is not None
+ elif (self.info.detector_name is not None
and detector_name
- is not self.info.recipe.detector_name):
+ is not self.info.detector_name):
u.log(2, 'Detector name changed from %s to %s.'
- % (self.info.recipe.detector_name, detector_name))
+ % (self.info.detector_name, detector_name))
else:
- detector_name = self.info.recipe.detector_name
+ detector_name = self.info.detector_name
- self.info.recipe.detector_name = detector_name
+ self.info.detector_name = detector_name
# Set up dimensions for cropping
try:
# Switch for attributes which are set to None
# Will be removed once None attributes are removed
- center = pars.center
+ center = p.center
except AttributeError:
center = 'unset'
# Check if dimension tuple is provided
if type(center) == tuple:
- offset_x = pars.center[0]
- offset_y = pars.center[1]
+ offset_x = p.center[0]
+ offset_y = p.center[1]
# If center unset, extract offset from raw data
elif center == 'unset':
raw_shape = self.instrument[
- self.info.recipe.detector_name]['data'].shape
+ self.info.detector_name]['data'].shape
offset_x = raw_shape[-1] // 2
offset_y = raw_shape[-2] // 2
else:
@@ -216,30 +314,30 @@ def __init__(self, pars=None, **kwargs):
'Center provided is not of type tuple or set to "unset". '
'Please correct input parameters.')
- xdim = (offset_x - pars.shape // 2, offset_x + pars.shape // 2)
- ydim = (offset_y - pars.shape // 2, offset_y + pars.shape // 2)
+ xdim = (offset_x - p.shape // 2, offset_x + p.shape // 2)
+ ydim = (offset_y - p.shape // 2, offset_y + p.shape // 2)
- self.info.recipe.array_dim = [xdim, ydim]
+ self.info.array_dim = [xdim, ydim]
# Attempt to extract experiment ID
- if self.info.recipe.experimentID is None:
+ if self.info.experimentID is None:
try:
experiment_id = io.h5read(
self.data_file, NEXUS_PATHS.experiment)[
NEXUS_PATHS.experiment][0]
except (AttributeError, KeyError):
experiment_id = os.path.split(
- self.info.recipe.base_path[:-1])[1]
+ self.info.base_path[:-1])[1]
u.logger.debug(
'Could not find experiment ID from nexus file %s. '
'Using %s instead.' % (self.data_file, experiment_id))
- self.info.recipe.experimentID = experiment_id
+ self.info.experimentID = experiment_id
# Create the ptyd file name if not specified
if self.info.dfile is None:
home = Paths(IO_par).home
self.info.dfile = ('%s/prepdata/data_%d.ptyd'
- % (home, self.info.recipe.scan_number))
+ % (home, self.info.scan_number))
u.log(3, 'Save file is %s' % self.info.dfile)
u.log(4, u.verbose.report(self.info))
@@ -255,9 +353,9 @@ def load_weight(self):
"""
# FIXME: do something better here. (detector-dependent)
# Load mask as weight
- if self.info.recipe.mask_file is not None:
+ if self.info.mask_file is not None:
return io.h5read(
- self.info.recipe.mask_file, 'mask')['mask'].astype(float)
+ self.info.mask_file, 'mask')['mask'].astype(float)
def load_positions(self):
"""
@@ -275,7 +373,7 @@ def load_positions(self):
# If Empty Probe sharing is enabled, assign pseudo center position to
# scan and skip the rest of the function. If no positions are found at
# all, raise error.
- if motor_positions is None and self.info.recipe.use_EP:
+ if motor_positions is None and self.info.use_EP:
positions = 1. * np.array([[0., 0.]])
return positions
elif motor_positions is None:
@@ -283,17 +381,17 @@ def load_positions(self):
% str(NEXUS_PATHS.motors))
# Apply motor conversion factor and create transposed position array
- mmult = u.expect2(self.info.recipe.motors_multiplier)
+ mmult = u.expect2(self.info.motors_multiplier)
pos_list = [mmult[i] * np.array(motor_positions[motor_name])[
- :self.info.recipe.max_scan_points]
- for i, motor_name in enumerate(self.info.recipe.motors)]
+ :self.info.max_scan_points]
+ for i, motor_name in enumerate(self.info.motors)]
positions = 1. * np.array(pos_list).T
# Correct positions for angle of rotation if necessary
- positions[:, 1] *= np.cos(np.pi * self.info.recipe.theta / 180.)
+ positions[:, 1] *= np.cos(np.pi * self.info.theta / 180.)
# Position corrections for NFP beamtime Oct 2014.
- if self.info.recipe.correct_positions_Oct14:
+ if self.info.correct_positions_Oct14:
r = np.array([[0.99987485, 0.01582042], [-0.01582042, 0.99987485]])
p0 = positions.mean(axis=0)
positions = np.dot(r, (positions - p0).T).T + p0
@@ -311,36 +409,36 @@ def load_common(self):
common = u.Param()
# Load dark.
- if self.info.recipe.dark_number is not None:
- key = NEXUS_PATHS.frame_pattern % self.info.recipe
+ if self.info.dark_number is not None:
+ key = NEXUS_PATHS.frame_pattern % self.info
dark_indices = range(len(
io.h5read(self.dark_file, NEXUS_PATHS.frame_pattern
- % self.info.recipe)[key]))
+ % self.info)[key]))
dark = [io.h5read(self.dark_file, NEXUS_PATHS.frame_pattern
- % self.info.recipe, slice=j)[key][
- self.info.recipe.array_dim[1][0]:
- self.info.recipe.array_dim[1][1],
- self.info.recipe.array_dim[0][0]:
- self.info.recipe.array_dim[0][1]].astype(np.float32)
+ % self.info, slice=j)[key][
+ self.info.array_dim[1][0]:
+ self.info.array_dim[1][1],
+ self.info.array_dim[0][0]:
+ self.info.array_dim[0][1]].astype(np.float32)
for j in dark_indices]
common.dark = np.array(dark).mean(0)
u.log(3, 'Dark loaded successfully.')
# Load flat.
- if self.info.recipe.flat_number is not None:
- key = NEXUS_PATHS.frame_pattern % self.info.recipe
+ if self.info.flat_number is not None:
+ key = NEXUS_PATHS.frame_pattern % self.info
flat_indices = range(len(
io.h5read(self.flat_file, NEXUS_PATHS.frame_pattern
- % self.info.recipe)[key]))
+ % self.info)[key]))
flat = [io.h5read(self.flat_file, NEXUS_PATHS.frame_pattern
- % self.info.recipe, slice=j)[key][
- self.info.recipe.array_dim[1][0]:
- self.info.recipe.array_dim[1][1],
- self.info.recipe.array_dim[0][0]:
- self.info.recipe.array_dim[0][1]].astype(np.float32)
+ % self.info, slice=j)[key][
+ self.info.array_dim[1][0]:
+ self.info.array_dim[1][1],
+ self.info.array_dim[0][0]:
+ self.info.array_dim[0][1]].astype(np.float32)
for j in flat_indices]
common.flat = np.array(flat).mean(0)
@@ -379,11 +477,11 @@ def load(self, indices):
"""
pos = {}
weights = {}
- raw = {j: self.instrument[self.info.recipe.detector_name]['data'][j][
- self.info.recipe.array_dim[1][0]:
- self.info.recipe.array_dim[1][1],
- self.info.recipe.array_dim[0][0]:
- self.info.recipe.array_dim[0][1]].astype(np.float32)
+ raw = {j: self.instrument[self.info.detector_name]['data'][j][
+ self.info.array_dim[1][0]:
+ self.info.array_dim[1][1],
+ self.info.array_dim[0][0]:
+ self.info.array_dim[0][1]].astype(np.float32)
for j in indices}
u.log(3, 'Data loaded successfully.')
@@ -415,43 +513,43 @@ def correct(self, raw, weights, common):
- dict: contains modified weights.
"""
# Apply hot pixel removal
- if self.info.recipe.remove_hot_pixels.apply:
+ if self.info.remove_hot_pixels.apply:
u.log(3, 'Applying hot pixel removal...')
for j in raw:
raw[j] = u.remove_hot_pixels(
raw[j],
- self.info.recipe.remove_hot_pixels.size,
- self.info.recipe.remove_hot_pixels.tolerance,
- self.info.recipe.remove_hot_pixels.ignore_edges)[0]
+ self.info.remove_hot_pixels.size,
+ self.info.remove_hot_pixels.tolerance,
+ self.info.remove_hot_pixels.ignore_edges)[0]
- if self.info.recipe.flat_number is not None:
+ if self.info.flat_number is not None:
common.dark = u.remove_hot_pixels(
common.dark,
- self.info.recipe.remove_hot_pixels.size,
- self.info.recipe.remove_hot_pixels.tolerance,
- self.info.recipe.remove_hot_pixels.ignore_edges)[0]
+ self.info.remove_hot_pixels.size,
+ self.info.remove_hot_pixels.tolerance,
+ self.info.remove_hot_pixels.ignore_edges)[0]
- if self.info.recipe.flat_number is not None:
+ if self.info.flat_number is not None:
common.flat = u.remove_hot_pixels(
common.flat,
- self.info.recipe.remove_hot_pixels.size,
- self.info.recipe.remove_hot_pixels.tolerance,
- self.info.recipe.remove_hot_pixels.ignore_edges)[0]
+ self.info.remove_hot_pixels.size,
+ self.info.remove_hot_pixels.tolerance,
+ self.info.remove_hot_pixels.ignore_edges)[0]
u.log(3, 'Hot pixel removal completed.')
# Apply deconvolution
- if self.info.recipe.rl_deconvolution.apply:
+ if self.info.rl_deconvolution.apply:
u.log(3, 'Applying deconvolution...')
# Use mtf from a file if provided in recon script
- if self.info.recipe.rl_deconvolution.dfile is not None:
+ if self.info.rl_deconvolution.dfile is not None:
mtf = self.info.rl_deconvolution.dfile
# Create fake psf as a sum of gaussians from parameters
else:
gau_sum = 0
for k in (
- self.info.recipe.rl_deconvolution.gaussians.iteritems()):
+ self.info.rl_deconvolution.gaussians.iteritems()):
gau_sum += u.gaussian2D(raw[0].shape[0],
k[1].std_x,
k[1].std_y,
@@ -465,18 +563,18 @@ def correct(self, raw, weights, common):
raw[j] = u.rl_deconvolution(
raw[j],
mtf,
- self.info.recipe.rl_deconvolution.numiter)
+ self.info.rl_deconvolution.numiter)
u.log(3, 'Deconvolution completed.')
# Apply flat and dark, only dark, or no correction
- if (self.info.recipe.flat_number is not None
- and self.info.recipe.dark_number is not None):
+ if (self.info.flat_number is not None
+ and self.info.dark_number is not None):
for j in raw:
raw[j] = (raw[j] - common.dark) / (common.flat - common.dark)
raw[j][raw[j] < 0] = 0
data = raw
- elif self.info.recipe.dark_number is not None:
+ elif self.info.dark_number is not None:
for j in raw:
raw[j] = raw[j] - common.dark
raw[j][raw[j] < 0] = 0
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index f4b9cb5a1..4c3fcff16 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -17,9 +17,8 @@
"""
# Import instrument-specific modules
#import cSAXS
-
-# from I13_ffp import I13ScanFFP
-# from I13_nfp import I13ScanNFP
+from I13_ffp import I13ScanFFP
+from I13_nfp import I13ScanNFP
from DLS import DlsScan
from I08 import I08Scan
from savu import Savu
From 7fccfbbebb58c96d2fd6834ea17566171b6750d7 Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Mon, 23 Oct 2017 14:54:59 +0100
Subject: [PATCH 160/363] Added a to_string method in descriptor.
---
doc/conf.py | 10 ++++++++++
ptypy/utils/descriptor.py | 9 +++++++++
templates/make_sample_ptyd.py | 16 ++++++++--------
3 files changed, 27 insertions(+), 8 deletions(-)
diff --git a/doc/conf.py b/doc/conf.py
index 8589e0676..9482c4c30 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -42,6 +42,16 @@
]
+def truncate_docstring(app, what, name, obj, options, lines):
+ """
+ Remove the Default parameter entries.
+ """
+
+ if any(l.strip().startswith('Defaults:') for l in lines):
+ while True:
+ if lines.pop(-1).strip().startswith('Defaults:'):
+ break
+
def remove_mod_docstring(app, what, name, obj, options, lines):
from ptypy import utils as u
import numpy as np
diff --git a/ptypy/utils/descriptor.py b/ptypy/utils/descriptor.py
index d2bfe5b25..ded47ac28 100644
--- a/ptypy/utils/descriptor.py
+++ b/ptypy/utils/descriptor.py
@@ -411,6 +411,15 @@ def save_conf_parser(self, fbuffer, print_optional=True):
parser.write(fbuffer)
return parser
+ def to_string(self):
+ """
+ Return the full content of descriptor as a string in configparser format.
+ """
+ import StringIO
+ s = StringIO.StringIO()
+ self.save_conf_parser(s)
+ return s.getvalue().strip()
+
def __str__(self):
"""
Pretty-print the Parameter options in ConfigParser format.
diff --git a/templates/make_sample_ptyd.py b/templates/make_sample_ptyd.py
index 82125a23f..eb8893fec 100644
--- a/templates/make_sample_ptyd.py
+++ b/templates/make_sample_ptyd.py
@@ -6,27 +6,27 @@
import time
import ptypy
from ptypy import utils as u
-
+from ptypy import experiment
# for verbose output
u.verbose.set_level(3)
# create data parameter branch
data = u.Param()
+data.source = 'test'
data.dfile = 'sample.ptyd'
-data.shape = 128
data.num_frames = 100
data.save = 'append'
-data.label=None
-data.psize=None
-data.energy=None
-data.center=None
-data.distance = None
+data.label = None
data.auto_center = None
data.rebin = None
data.orientation = None
# create PtyScan instance
-MF = ptypy.core.data.MoonFlowerScan(data)
+# The following call is equivalent to
+# MF = ptypy.core.data.MoonFlowerScan(data)
+# It uses data.source to find the proper PtyScan subclass
+MF = experiment.makePtyScan(data)
+
MF.initialize()
for i in range(2):
# autoprocess data
From 974f0f3a6e14f87bd2f224aaea2a39f8d2c1b175 Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Mon, 23 Oct 2017 15:07:33 +0100
Subject: [PATCH 161/363] Updated Subclassing PtyScan doc
---
doc/conf.py | 51 +++-------
doc/parameters2rst.py | 2 +-
doc/script2rst.py | 16 ++-
tutorial/subclassptyscan.py | 196 +++++++++++++++++++++++-------------
4 files changed, 153 insertions(+), 112 deletions(-)
diff --git a/doc/conf.py b/doc/conf.py
index 9482c4c30..0ab3f63fd 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -21,11 +21,11 @@
#sys.path.insert(0, os.path.abspath('.'))
# generate paramters.rst and other rst
+import subprocess
+subprocess.call(['python', 'script2rst.py']) # We need this to have a clean sys.argv
execfile('parameters2rst.py')
execfile('tmp2rst.py')
execfile('version.py')
-import subprocess
-subprocess.call(['python', 'script2rst.py']) # We need this to have a clean sys.argv
# -- General configuration ------------------------------------------------
@@ -46,79 +46,58 @@ def truncate_docstring(app, what, name, obj, options, lines):
"""
Remove the Default parameter entries.
"""
-
+ if not hasattr(obj, 'DEFAULT'):
+ return
if any(l.strip().startswith('Defaults:') for l in lines):
while True:
if lines.pop(-1).strip().startswith('Defaults:'):
break
+
def remove_mod_docstring(app, what, name, obj, options, lines):
from ptypy import utils as u
- import numpy as np
u.verbose.report.headernewline='\n\n'
searchstr = ':py:data:'
- def get_refs(dct,pd,depth=2, indent = ''):
- if depth<0:
+ def get_refs(dct, pd, depth=2, indent=''):
+ if depth < 0:
return
for k, value in dct.iteritems():
ref = ', see :py:data:`~%s`' % pd.children[k].entry_point if pd.children.has_key(k) else ''
- if hasattr(value,'items'):
+ if hasattr(value, 'items'):
v = str(value.__class__.__name__)
- elif str(value)==value:
- v='"%s"' % value
+ elif str(value) == value:
+ v = '"%s"' % value
else:
- v=str(value)
+ v = str(value)
- lines.append(indent+'* *' +k+'* = ``'+v+'``' +ref)#+'\n')
+ lines.append(indent + '* *' + k + '* = ``' + v + '``' + ref)
- if hasattr(value,'items'):
- #lines.append('\n\n')
+ if hasattr(value, 'items'):
lines.append("")
- get_refs(value,pd.children[k],depth=depth-1, indent = indent+' ')
+ get_refs(value, pd.children[k], depth=depth-1, indent=indent+' ')
lines.append("")
- #lines.append('\n\n')
- #if name.find('DEFAULT')>=0:
if isinstance(obj, u.Param) or isinstance(obj, dict):
- keys = obj.keys()
pd = None
- """
- # auto_matching
- for entry,pdesc in u.validator.entry_points_Param.iteritems():
- chkeys = ':'.join([k.split('.')[-1] for k in pdesc.children.keys()])
- #print chkeys
- #print keys
- matches = [key in chkeys for key in keys]
- #print matches
- print np.mean(matches)
- if np.mean(matches)>0.8:
- print 'Param match'
- e=entry
- print e
- pd = pdesc
- break
- """
for l in lines:
start = l.find(searchstr)
if start > -1:
newstr = l[start:]
newstr = newstr.split('`')[1]
newstr = newstr.replace('~', '')
- #print newstr, what, name, options
pd = u.descriptor.defaults_tree.get(newstr)
break
if pd is not None:
- #lines.append('Match with :py:data:`.%s` \n\n' %pd.entry_point)
get_refs(obj, pd, depth=2, indent='')
- #print lines
def setup(app):
app.connect('autodoc-process-docstring', remove_mod_docstring)
+ app.connect('autodoc-process-docstring', truncate_docstring)
napoleon_use_ivar = True
diff --git a/doc/parameters2rst.py b/doc/parameters2rst.py
index c64bac882..35832af57 100644
--- a/doc/parameters2rst.py
+++ b/doc/parameters2rst.py
@@ -44,7 +44,7 @@
prst.write(' '+desc.doc.replace('\n', '\n ')+'\n\n')
if desc.is_symlink:
- prst.write(' *default* = '+':py:data:`'+default.path+'`\n')
+ prst.write(' *default* = '+':py:data:`'+desc.path+'`\n')
else:
prst.write(' *default* = ``'+repr(default))
if lowlim is not None and uplim is not None:
diff --git a/doc/script2rst.py b/doc/script2rst.py
index 352679301..c49083b10 100644
--- a/doc/script2rst.py
+++ b/doc/script2rst.py
@@ -126,12 +126,17 @@ def check_for_fig(wline):
frst.write(' ' + line2)
continue
+ decorator = False
indent = False
for key in indent_keys:
if line.startswith(key):
indent = True
break
-
+
+ if line.startswith('@'):
+ indent = True
+ decorator = True
+
if indent:
frst.write('\n::\n\n >>> '+line)
func = line
@@ -139,9 +144,12 @@ def check_for_fig(wline):
while True:
line2 = fpy.readline()
if line2.strip() and not line2.startswith(' '):
- frst.write('\n')
- fpy.seek(pt)
- break
+ if decorator:
+ decorator = False
+ else:
+ frst.write('\n')
+ fpy.seek(pt)
+ break
func += line2
frst.write(' >>> '+line2)
pt = fpy.tell()
diff --git a/tutorial/subclassptyscan.py b/tutorial/subclassptyscan.py
index 81847f19c..dca8f2fe2 100644
--- a/tutorial/subclassptyscan.py
+++ b/tutorial/subclassptyscan.py
@@ -5,12 +5,9 @@
# if section :ref:`store` was completed
# Again, the imports first.
-import matplotlib as mpl
import numpy as np
-import ptypy
+from ptypy.core.data import PtyScan
from ptypy import utils as u
-plt = mpl.pyplot
-import sys
# For this tutorial we assume that the data and meta information is
# in this path:
@@ -19,64 +16,64 @@
# Furthermore, we assume that a file about the experimental geometry is
# located at
geofilepath = save_path + 'geometry.txt'
-print geofilepath
+print(geofilepath)
# and has contents of the following form
-print ''.join([line for line in open(geofilepath, 'r')])
+print(''.join([line for line in open(geofilepath, 'r')]))
# The scanning positions are in
positionpath = save_path + 'positions.txt'
-print positionpath
+print(positionpath)
# with a list of positions for vertical and horizontanl movement and the
# image frame from the "camera"
-print ''.join([line for line in open(positionpath, 'r')][:6])+'....'
+print(''.join([line for line in open(positionpath, 'r')][:6])+'....')
# Writing a subclass
# ------------------
-# A subclass of :any:`PtyScan` takes the same input parameter
-# tree as PtyScan itself, i.e :py:data:`.scan.data`. As the subclass
-# will most certainly require additional parameters, there has to be
-# a flexible additional container. For PtyScan, that is the
-# :py:data:`.scan.data.recipe` parameter. A subclass must extract all
-# additional parameters from this source and, in script, you fill
-# the recipe with the appropriate items.
-
-# In this case we can assume that the only parameter of the recipe
-# is the base path ``/tmp/ptypy/sim/``\ . Hence we write
-RECIPE = u.Param()
-RECIPE.base_path = '/tmp/ptypy/sim/'
-
-# Now we import the default generic parameter set from
-from ptypy.core.data import PtyScan
-DEFAULT = PtyScan.DEFAULT.copy()
-
-# This would be the perfect point to change any default value.
-# For sure we need to set the recipe parameter
-DEFAULT.recipe = RECIPE
-
-# A default data file location may be handy too and we allow saving of
-# data in a single file. And since we know it is simulated data we do not
-# have to find the optical axes in the diffraction pattern with
-# the help of auto_center
-DEFAULT.dfile = '/tmp/ptypy/sim/npy.ptyd'
-DEFAULT.auto_center = False
-
-# Our defaults are now
-print u.verbose.report(DEFAULT, noheader=True)
-
# The simplest subclass of PtyScan would look like this
class NumpyScan(PtyScan):
- # We overwrite the DEFAULT with the new DEFAULT.
- DEFAULT = DEFAULT
+ """
+ A PtyScan subclass to extract data from a numpy array.
+ """
def __init__(self, pars=None, **kwargs):
# In init we need to call the parent.
super(NumpyScan, self).__init__(pars, **kwargs)
# Of course this class does nothing special beyond PtyScan.
+# As it is, the class also cannot be used as a real PtyScan instance
+# because its defaults are not properly managed. For this, Ptypy provides a
+# powerful self-documenting tool call a "descriptor" which can be applied
+# to any new class using a decorator. The tree of all valid ptypy parameters
+# is located at :any:`ptypy.utils.descriptor.defaults_tree`. To manage the default
+# parameters of our subclass and document its existence, we would need to write
+from ptypy.utils.descriptor import defaults_tree
+
+@defaults_tree.parse_doc('scandata.numpyscan')
+class NumpyScan(PtyScan):
+ """
+ A PtyScan subclass to extract data from a numpy array.
+ """
+
+ def __init__(self, pars=None, **kwargs):
+ # In init we need to call the parent.
+ super(NumpyScan, self).__init__(pars, **kwargs)
+
+# The decorator extracts information from the docstring of the subclass and
+# parent classes about the expected input parameters. Currently the docstring
+# of `NumpyScan` does not contain anything special, thus the only parameters
+# registered are those of the parent class, `PtyScan`:
+print(defaults_tree['scandata.numpyscan'].to_string())
-# An additional step of initialisation would be to retrieve
+# As you can see, there are already many parameters documented in `PtyScan`'s
+# class. For each parameter, most important are the *type*, *default* value and
+# *help* string. The decorator does more than collect this information: it also
+# generates from it a class variable called `DEFAULT`, which stores all defaults:
+print(u.verbose.report(NumpyScan.DEFAULT, noheader=True))
+
+# Now we are ready to add functionality to our subclass.
+# A first step of initialisation would be to retrieve
# the geometric information that we stored in ``geofilepath`` and update
# the input parameters with it.
@@ -90,21 +87,35 @@ def extract_geo(base_path):
return out
# We test it.
-print extract_geo(save_path)
+print(extract_geo(save_path))
# That seems to work. We can integrate this parser into
# the initialisation as we assume that this small access can be
# done by all MPI nodes without data access problems. Hence,
# our subclass becomes
+@defaults_tree.parse_doc('scandata.numpyscan')
class NumpyScan(PtyScan):
- # We overwrite the DEFAULT with the new DEFAULT.
- DEFAULT = DEFAULT
+ """
+ A PtyScan subclass to extract data from a numpy array.
+
+ Defaults:
+
+ [name]
+ type = str
+ default = numpyscan
+ help =
+
+ [base_path]
+ type = str
+ default = './'
+ help = Base path to extract data files from.
+ """
def __init__(self, pars=None, **kwargs):
- p = DEFAULT.copy(depth=2)
+ p = self.DEFAULT.copy(depth=2)
p.update(pars)
- with open(p.recipe.base_path+'geometry.txt') as f:
+ with open(p.base_path+'geometry.txt') as f:
for line in f:
key, value = line.strip().split()
# we only replace Nones or missing keys
@@ -113,11 +124,20 @@ def __init__(self, pars=None, **kwargs):
super(NumpyScan, self).__init__(p, **kwargs)
+# We now need a new input parameter called `base_path`, so we documented it
+# in the docstring after the section header "Defaults:".
+print(defaults_tree['scandata.numpyscan.base_path'])
+
+# As you can see, the first step in `__init__` is to build a default
+# parameter structure to ensure that all input parameters are available.
+# The next line updates this structure to overwrite the entries specified by
+# the user.
+
# Good! Next, we need to implement how the class finds out about
# the positions in the scan. The method
# :py:meth:`~ptypy.core.data.PtyScan.load_positions` can be used
# for this purpose.
-print PtyScan.load_positions.__doc__
+print(PtyScan.load_positions.__doc__)
# The parser for the positions file would look like this.
def extract_pos(base_path):
@@ -132,34 +152,48 @@ def extract_pos(base_path):
# And the test:
files, pos = extract_pos(save_path)
-print files[:2]
-print pos[:2]
+print(files[:2])
+print(pos[:2])
+@defaults_tree.parse_doc('scandata.numpyscan')
class NumpyScan(PtyScan):
- # We overwrite the DEFAULT with the new DEFAULT.
- DEFAULT = DEFAULT
+ """
+ A PtyScan subclass to extract data from a numpy array.
- def __init__(self,pars=None, **kwargs):
- p = DEFAULT.copy(depth=2)
+ Defaults:
+
+ [name]
+ type = str
+ default = numpyscan
+ help =
+
+ [base_path]
+ type = str
+ default = /tmp/ptypy/sim/
+ help = Base path to extract data files from.
+ """
+
+ def __init__(self, pars=None, **kwargs):
+ p = self.DEFAULT.copy(depth=2)
p.update(pars)
- with open(p.recipe.base_path+'geometry.txt') as f:
+ with open(p.base_path+'geometry.txt') as f:
for line in f:
key, value = line.strip().split()
# we only replace Nones or missing keys
if p.get(key) is None:
- p[key]=eval(value)
+ p[key] = eval(value)
super(NumpyScan, self).__init__(p, **kwargs)
- # all input data is now in self.info
def load_positions(self):
# the base path is now stored in
- base_path = self.info.recipe.base_path
+ base_path = self.info.base_path
+ pos = []
with open(base_path+'positions.txt') as f:
for line in f:
fname, y, x = line.strip().split()
- pos.append((eval(y),eval(x)))
+ pos.append((eval(y), eval(x)))
files.append(fname)
return np.asarray(pos)
@@ -177,39 +211,59 @@ def load_positions(self):
# and positions, as we have already adapted ``self.load_positions``
# and there were no bad pixels in the (linear) detector
-# The final subclass looks like this.
+# The final subclass looks like this. We overwrite two defaults from
+# `PtyScan`:
+@defaults_tree.parse_doc('scandata.numpyscan')
class NumpyScan(PtyScan):
- # We overwrite the DEFAULT with the new DEFAULT.
- DEFAULT = DEFAULT
+ """
+ A PtyScan subclass to extract data from a numpy array.
+
+ Defaults:
+
+ [name]
+ type = str
+ default = numpyscan
+ help =
+
+ [base_path]
+ type = str
+ default = /tmp/ptypy/sim/
+ help = Base path to extract data files from.
- def __init__(self,pars=None, **kwargs):
- p = DEFAULT.copy(depth=2)
+ [auto_center]
+ default = False
+
+ [dfile]
+ default = /tmp/ptypy/sim/npy.ptyd
+ """
+
+ def __init__(self, pars=None, **kwargs):
+ p = self.DEFAULT.copy(depth=2)
p.update(pars)
- with open(p.recipe.base_path+'geometry.txt') as f:
+ with open(p.base_path+'geometry.txt') as f:
for line in f:
key, value = line.strip().split()
# we only replace Nones or missing keys
if p.get(key) is None:
- p[key]=eval(value)
+ p[key] = eval(value)
super(NumpyScan, self).__init__(p, **kwargs)
- # all input data is now in self.info
def load_positions(self):
# the base path is now stored in
- pos=[]
- base_path = self.info.recipe.base_path
+ base_path = self.info.base_path
+ pos = []
with open(base_path+'positions.txt') as f:
for line in f:
fname, y, x = line.strip().split()
- pos.append((eval(y),eval(x)))
+ pos.append((eval(y), eval(x)))
files.append(fname)
return np.asarray(pos)
def load(self, indices):
raw = {}
- bp = self.info.recipe.base_path
+ bp = self.info.base_path
for ii in indices:
raw[ii] = np.load(bp+'ccd/diffraction_%04d.npy' % ii)
return raw, {}, {}
From 9771ee25221cd11776dced06898d4d7fe4275594 Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Tue, 24 Oct 2017 11:37:34 +0100
Subject: [PATCH 162/363] Bug fixes, new tests, old tests running again
---
ptypy/experiment/optiklabor.py | 2 +-
ptypy/test/util_tests/descriptor_test.py | 70 +++++++++++++++++++++++-
ptypy/utils/descriptor.py | 4 +-
3 files changed, 70 insertions(+), 6 deletions(-)
diff --git a/ptypy/experiment/optiklabor.py b/ptypy/experiment/optiklabor.py
index 8e82c74f9..24564cf2d 100644
--- a/ptypy/experiment/optiklabor.py
+++ b/ptypy/experiment/optiklabor.py
@@ -112,7 +112,7 @@ class FliSpecScanMultexp(PtyScan):
[dark_dir_pattern]
default = '%(base_path)s%(scan_dir)sS%(dark_number)05d/'
- type =
+ type = str
help =
"""
diff --git a/ptypy/test/util_tests/descriptor_test.py b/ptypy/test/util_tests/descriptor_test.py
index 8e824b85c..ed33e311a 100644
--- a/ptypy/test/util_tests/descriptor_test.py
+++ b/ptypy/test/util_tests/descriptor_test.py
@@ -3,12 +3,76 @@
"""
import unittest
-from ptypy.utils.descriptor import EvalDescriptor, CODES
+from ptypy.utils.descriptor import EvalDescriptor, CODES, defaults_tree
from ptypy.utils import Param
+class SanityCheck(unittest.TestCase):
+
+ def test_sanity(self):
+ defaults_tree.sanity_check()
+
+
class EvalDescriptorTest(unittest.TestCase):
+ def test_basic_functions(self):
+ """
+ Test EvalDescriptor behaviour
+ """
+
+ # Parameter declaration through formatted string
+ x = EvalDescriptor('')
+ x.from_string("""
+ [param1]
+ default = 0
+ type = int
+ help = A parameter
+ uplim = 5
+ lowlim = 0""")
+
+ assert x['param1'].default == 0
+ assert x['param1'].limits == (0, 5)
+ assert x['param1'].type == ['int']
+
+ # ConfigParser allows overwriting some properties
+ x = EvalDescriptor('')
+ x.from_string("""
+ [param1]
+ default = 0
+ type = int
+ help = A parameter
+
+ [param2]
+ default = a
+ type = str
+ help = Another parameter
+
+ [param1]
+ uplim = 5
+ lowlim = 0""")
+
+ assert x['param1'].limits == (0, 5)
+
+ # Implicit branch creation
+ x = EvalDescriptor('')
+ x.from_string("""
+ [category1.subcategory1.param1]
+ default = 0
+ type = int
+ help = A parameter""")
+
+ assert [k for k,v in x.descendants] == ['category1', 'category1.subcategory1', 'category1.subcategory1.param1']
+
+ assert x['category1'].implicit == True
+
+ x.from_string("""
+ [category1]
+ default =
+ type = Param
+ help = The first category""")
+
+ assert x['category1'].implicit == False
+
def test_parse_doc_basic(self):
"""
Test basic behaviour of the EvalDescriptor decorator.
@@ -393,7 +457,7 @@ class FakePtychoClass(object):
p.engines.engine01 = Param()
p.engines.engine01.numiter = 10
out = root.check(p)
- assert out['engines.*']['symlink'] == CODES.INVALID
+ assert out['engines.engine01']['symlink'] == CODES.INVALID
# wrong name
p = Param()
@@ -402,7 +466,7 @@ class FakePtychoClass(object):
p.engines.engine01.name = 'ePIE'
p.engines.engine01.numiter = 10
out = root.check(p)
- assert out['engines.*']['symlink'] == CODES.INVALID
+ assert out['engines.engine01']['symlink'] == CODES.INVALID
if __name__ == "__main__":
diff --git a/ptypy/utils/descriptor.py b/ptypy/utils/descriptor.py
index ded47ac28..2581f9bc3 100644
--- a/ptypy/utils/descriptor.py
+++ b/ptypy/utils/descriptor.py
@@ -179,7 +179,6 @@ def _store_options(self, dct):
if missing:
raise ValueError('Missing required option(s) <%s> for parameter %s.' % (', '.join(missing), self.name))
- self.options = dict.fromkeys(self.required)
self.options.update(dct)
def _find(self, name):
@@ -391,6 +390,7 @@ def from_string(self, s, **kwargs):
Keyword arguments are forwarded to `ConfigParser.RawConfigParser`
"""
from StringIO import StringIO
+ s = textwrap.dedent(s)
return self.load_conf_parser(StringIO(s), **kwargs)
def save_conf_parser(self, fbuffer, print_optional=True):
@@ -852,7 +852,7 @@ def check(self, pars, depth=99):
"""
out = OrderedDict()
for res in self._walk(depth=depth, pars=pars):
- path = res['d'].path
+ path = res['path']
out[path] = {}
# Switch through all possible statuses
if res['status'] == 'ok':
From acee2f19dcf7f5841d5305435863a4c2c784a30b Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Tue, 24 Oct 2017 11:50:09 +0100
Subject: [PATCH 163/363] Removed entries that would needlessly override help
and type fields of PtyScan defaults.
---
ptypy/experiment/ALS_5321.py | 2 --
ptypy/experiment/AMO_LCLS.py | 6 ------
ptypy/experiment/DLS.py | 6 ------
ptypy/experiment/DiProI_FERMI.py | 2 --
ptypy/experiment/I08.py | 3 ---
ptypy/experiment/I13_ffp.py | 6 ------
ptypy/experiment/I13_nfp.py | 6 ------
ptypy/experiment/ID16Anfp.py | 7 -------
ptypy/experiment/UCL.py | 8 +-------
ptypy/experiment/optiklabor.py | 6 ------
10 files changed, 1 insertion(+), 51 deletions(-)
diff --git a/ptypy/experiment/ALS_5321.py b/ptypy/experiment/ALS_5321.py
index 587318e9a..5bfb3e0b2 100644
--- a/ptypy/experiment/ALS_5321.py
+++ b/ptypy/experiment/ALS_5321.py
@@ -44,8 +44,6 @@ class ALS5321Scan(PtyScan):
[energy]
default = 0.820
- type = float
- help = Beam energy
[CXI_PATHS]
default = None
diff --git a/ptypy/experiment/AMO_LCLS.py b/ptypy/experiment/AMO_LCLS.py
index a10912d0a..69ad6a261 100644
--- a/ptypy/experiment/AMO_LCLS.py
+++ b/ptypy/experiment/AMO_LCLS.py
@@ -41,8 +41,6 @@ class AMOScan(core.data.PtyScan):
[experimentID]
default = None
- type = str
- help = Experiment identifier
[scan_number]
default = None
@@ -107,13 +105,9 @@ class AMOScan(core.data.PtyScan):
[auto_center]
default = False
- type = bool
- help = Overrides PtyScan default
[orientation]
default = (False, False, False)
- type = tuple
- help = Overrides PtyScan default
"""
diff --git a/ptypy/experiment/DLS.py b/ptypy/experiment/DLS.py
index 67808dde6..175e73287 100644
--- a/ptypy/experiment/DLS.py
+++ b/ptypy/experiment/DLS.py
@@ -64,8 +64,6 @@ class DlsScan(PtyScan):
[experimentID]
default = None
- type = str
- help = Experiment identifier
[scan_number]
default = None
@@ -164,13 +162,9 @@ class DlsScan(PtyScan):
[auto_center]
default = False
- type = bool
- help = Overrides PtyScan default
[orientation]
default = (False, False, False)
- type = tuple
- help = Overrides PtyScan default
"""
diff --git a/ptypy/experiment/DiProI_FERMI.py b/ptypy/experiment/DiProI_FERMI.py
index 2de54fd9d..dcff2c0aa 100644
--- a/ptypy/experiment/DiProI_FERMI.py
+++ b/ptypy/experiment/DiProI_FERMI.py
@@ -125,8 +125,6 @@ class DiProIFERMIScan(PtyScan):
[auto_center]
default = False
- type = bool
- help = Overrides PtyScan default
"""
diff --git a/ptypy/experiment/I08.py b/ptypy/experiment/I08.py
index 8843cbb59..34416ebdd 100644
--- a/ptypy/experiment/I08.py
+++ b/ptypy/experiment/I08.py
@@ -128,9 +128,6 @@ class I08Scan(ptypy.core.data.PtyScan):
[auto_center]
default = False
- type = bool
- help = Overrides PtyScan default
-
"""
def __init__(self, pars=None, **kwargs):
diff --git a/ptypy/experiment/I13_ffp.py b/ptypy/experiment/I13_ffp.py
index 4ce697e9d..56162c83f 100644
--- a/ptypy/experiment/I13_ffp.py
+++ b/ptypy/experiment/I13_ffp.py
@@ -43,8 +43,6 @@ class I13ScanFFP(PtyScan):
[experimentID]
default = None
- type = str
- help = Experiment identifier
[scan_number]
default = None
@@ -109,13 +107,9 @@ class I13ScanFFP(PtyScan):
[auto_center]
default = False
- type = bool
- help = Overrides PtyScan default
[orientation]
default = (False, False, False)
- type = tuple
- help = Overrides PtyScan default
"""
diff --git a/ptypy/experiment/I13_nfp.py b/ptypy/experiment/I13_nfp.py
index b77942d5a..e56c323a9 100644
--- a/ptypy/experiment/I13_nfp.py
+++ b/ptypy/experiment/I13_nfp.py
@@ -43,8 +43,6 @@ class I13ScanNFP(PtyScan):
[experimentID]
default = None
- type = str
- help = Experiment identifier
[scan_number]
default = None
@@ -153,13 +151,9 @@ class I13ScanNFP(PtyScan):
[auto_center]
default = False
- type = bool
- help = Overrides PtyScan default
[orientation]
default = (False, False, False)
- type = tuple
- help = Overrides PtyScan default
[rl_deconvolution]
default =
diff --git a/ptypy/experiment/ID16Anfp.py b/ptypy/experiment/ID16Anfp.py
index f1201273e..ad94f44de 100644
--- a/ptypy/experiment/ID16Anfp.py
+++ b/ptypy/experiment/ID16Anfp.py
@@ -57,8 +57,6 @@ class ID16AScan(PtyScan):
[experimentID]
default = None
- type = str
- help = Experiment identifier - will be read from h5
[motors]
default = ['spy', 'spz']
@@ -147,14 +145,9 @@ class ID16AScan(PtyScan):
[auto_center]
default = False
- type = bool
- help = Overrides the PtyScan default
[orientation]
default = (False, True, False)
- type = tuple
- help = Frelon frame - only LR flip
-
"""
def __init__(self, pars=None, **kwargs):
diff --git a/ptypy/experiment/UCL.py b/ptypy/experiment/UCL.py
index be1c90288..b55f5f3ff 100644
--- a/ptypy/experiment/UCL.py
+++ b/ptypy/experiment/UCL.py
@@ -36,13 +36,9 @@ class UCLLaserScan(PtyScan):
[auto_center]
default = False
- type = bool
- help =
[orientation]
default = (False, False, False)
- type = tuple
- help =
[scan_number]
default = None
@@ -61,13 +57,11 @@ class UCLLaserScan(PtyScan):
[energy]
default = None
- type = float
- help =
[lam]
default = None
type = float
- help =
+ help =
[z]
default = None
diff --git a/ptypy/experiment/optiklabor.py b/ptypy/experiment/optiklabor.py
index 24564cf2d..5a26b3d1b 100644
--- a/ptypy/experiment/optiklabor.py
+++ b/ptypy/experiment/optiklabor.py
@@ -67,8 +67,6 @@ class FliSpecScanMultexp(PtyScan):
[energy]
default = None
- type = float
- help =
[z]
default = 0.158
@@ -82,13 +80,9 @@ class FliSpecScanMultexp(PtyScan):
[center]
default = 'auto'
- type = str
- help =
[orientation]
default = (True,True,False)
- type = tuple
- help =
[base_path]
default = '/data/CDI/opticslab_sxdm_2013/'
From cc2ea3645b0a3d43a09273223d492f2352141b80 Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Wed, 25 Oct 2017 14:39:53 +0100
Subject: [PATCH 164/363] Unrelated improvement on ML
---
ptypy/engines/ML.py | 80 +++++++++++++++++++++++++--------------------
1 file changed, 44 insertions(+), 36 deletions(-)
diff --git a/ptypy/engines/ML.py b/ptypy/engines/ML.py
index 8e2724cf7..0d4132cc4 100644
--- a/ptypy/engines/ML.py
+++ b/ptypy/engines/ML.py
@@ -69,7 +69,13 @@ class ML(BaseEngine):
default = 0.0
type = float
help = Smoothing preconditioner
- doc = If 0, not used, if > 0 gaussian filter if < 0 Hann window.
+ doc = Sigma for gaussian filter (turned off if 0.)
+
+ [smooth_gradient_decay]
+ default = 0.
+ type = float
+ help = Decay rate for smoothing preconditioner
+ doc = Sigma for gaussian filter will reduce exponentially at this rate
[scale_precond]
default = False
@@ -187,17 +193,19 @@ def engine_iterate(self, num=1):
new_pr_grad.fill(0.)
# Smoothing preconditioner
- # !!! Lets make this consistent with
- # the smoothing already done in DM
- # if self.smooth_gradient:
- # for name, s in new_ob_grad.storages.iteritems():
- # s.data[:] = self.smooth_gradient(s.data)
+ if self.smooth_gradient:
+ self.smooth_gradient.sigma *= (1. - self.p.smooth_gradient_decay)
+ for name, s in new_ob_grad.storages.iteritems():
+ s.data[:] = self.smooth_gradient(s.data)
# probe/object rescaling
if self.p.scale_precond:
- scale_p_o = (self.p.scale_probe_object * Cnorm2(new_ob_grad)
- / Cnorm2(new_pr_grad))
- #/ (Cnorm2(new_pr_grad)) * len(self.ob.views))
+ cn2_new_pr_grad = Cnorm2(new_pr_grad)
+ if cn2_new_pr_grad > 1e-5:
+ scale_p_o = (self.p.scale_probe_object * Cnorm2(new_ob_grad)
+ / Cnorm2(new_pr_grad))
+ else:
+ scale_p_o = self.p.scale_probe_object
if self.scale_p_o is None:
self.scale_p_o = scale_p_o
else:
@@ -235,7 +243,13 @@ def engine_iterate(self, num=1):
"""
# 3. Next conjugate
self.ob_h *= bt / self.tmin
- self.ob_h -= self.ob_grad
+
+ # Smoothing preconditioner
+ if self.smooth_gradient:
+ for name, s in self.ob_h.storages.iteritems():
+ s.data[:] -= self.smooth_gradient(self.ob_grad.storages[name].data)
+ else:
+ self.ob_h -= self.ob_grad
self.pr_h *= bt / self.tmin
self.pr_grad *= self.scale_p_o
self.pr_h -= self.pr_grad
@@ -616,29 +630,22 @@ def __init__(self, sigma):
self.sigma = sigma
def __call__(self, x):
- y = np.empty_like(x)
- sh = x.shape
- xf = x.reshape((-1,) + sh[-2:])
- yf = y.reshape((-1,) + sh[-2:])
- for i in range(len(xf)):
- yf[i] = gaussian_filter(xf[i], self.sigma)
- return y
-
- from scipy.signal import correlate2d
-
- class HannFilt:
- def __call__(self, x):
- y = np.empty_like(x)
- sh = x.shape
- xf = x.reshape((-1,) + sh[-2:])
- yf = y.reshape((-1,) + sh[-2:])
- for i in range(len(xf)):
- yf[i] = correlate2d(xf[i],
- np.array([[.0625, .125, .0625],
- [.125, .25, .125],
- [.0625, .125, .0625]]),
- mode='same')
- return y
+ return u.c_gf(x, [0, self.sigma, self.sigma])
+
+ # from scipy.signal import correlate2d
+ # class HannFilt:
+ # def __call__(self, x):
+ # y = np.empty_like(x)
+ # sh = x.shape
+ # xf = x.reshape((-1,) + sh[-2:])
+ # yf = y.reshape((-1,) + sh[-2:])
+ # for i in range(len(xf)):
+ # yf[i] = correlate2d(xf[i],
+ # np.array([[.0625, .125, .0625],
+ # [.125, .25, .125],
+ # [.0625, .125, .0625]]),
+ # mode='same')
+ # return y
if amplitude > 0.:
logger.debug(
@@ -646,6 +653,7 @@ def __call__(self, x):
return GaussFilt(amplitude)
elif amplitude < 0.:
- logger.debug(
- 'Using a smooth gradient filter (Hann window - only for ML)')
- return HannFilt()
+ raise RuntimeError('Hann filter not implemented (negative smoothing amplitude not supported)')
+ # logger.debug(
+ # 'Using a smooth gradient filter (Hann window - only for ML)')
+ # return HannFilt()
From 6b9a9e2a430bcf09b4d1fcf92df0a30a12cdfe76 Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Tue, 24 Oct 2017 14:04:00 +0100
Subject: [PATCH 165/363] First step in ScanModel overhaul
---
ptypy/core/manager.py | 32 +++++++++++++-------------------
ptypy/core/ptycho.py | 10 +++++++---
2 files changed, 20 insertions(+), 22 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index b45c59d54..c8e27a193 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -493,7 +493,7 @@ class ModelManager(object):
y = 0.0,
)
- def __init__(self, ptycho, pars=None, scans=None, **kwargs):
+ def __init__(self, ptycho, pars):
"""
Parameters
@@ -503,16 +503,11 @@ def __init__(self, ptycho, pars=None, scans=None, **kwargs):
pars : dict or Param
Input parameters (see :py:attr:`DEFAULT`)
- If None uses defaults
-
- scans : dict or Param
- Scan-specific parameters, Values should be dict Param that
- follow the structure of `pars`.
- If None, tries in ptycho.p.scans else becomes empty dict
"""
# Initialize the input parameters
- p = u.Param(self.DEFAULT.copy())
- p.update(pars, in_place_depth=4)
+ # REDESIGN: ModelManager will soon disappear. Here we fix things temporarily.
+ # this means: no DEFAULT for this class.
+ p = pars.copy(depth=99)
self.p = p
self.ptycho = ptycho
@@ -522,23 +517,22 @@ def __init__(self, ptycho, pars=None, scans=None, **kwargs):
if self.ptycho is None:
return
- # store scan-specific parameters
- self.scans_pars = scans if scans is not None else self.ptycho.p.get('scans', u.Param())
-
- self.scans = {}
+ self.scans = OrderedDict()
# Create scan objects from information already available
- for label, scan_pars in self.scans_pars.iteritems():
- self.scans[label] = ScanModel(ptycho=self.ptycho, specific_pars=scan_pars, generic_pars=self.p, label=label)
+ for label, scan_pars in self.p.iteritems():
+ self.scans[label] = ScanModel(ptycho=self.ptycho, pars=scan_pars, label=label)
# Sharing dictionary that stores sharing behavior
self.sharing = {'probe_ids': {}, 'object_ids': {}}
+ # REDESIGN: this will be replaced
# Initialize sharing rules for POD creations
- self.sharing_rules = model.parse_model(p.sharing, self.sharing)
-
- # This start is a little arbitrary
- self.label_idx = len(self.scans)
+ sharing_pars = u.Param({'model_type': 'basic',
+ 'scan_per_probe': 1,
+ 'scan_per_object': 1,
+ 'npts': None})
+ self.sharing_rules = model.parse_model(sharing_pars, self.sharing)
def _to_dict(self):
# Delete the model class. We do not really need to store it.
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index 1bfc6bde7..74f028574 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -10,6 +10,7 @@
import numpy as np
import time
import paths
+from collections import OrderedDict
import os
import sys
@@ -36,7 +37,7 @@ class Ptycho(Base):
If MPI is enabled, this class acts both as a manager (rank = 0) and
a worker (any rank), and most information exists on all processes.
- In principle the only part that is divided between processes is the
+ In its original design, the only part that is divided between processes is the
diffraction data.
By default Ptycho is instantiated once per process, but it can also
@@ -357,9 +358,12 @@ def _configure(self):
if not hasattr(self, 'runtime'):
self.runtime = u.Param() # DEFAULT_runtime.copy()
+ if not hasattr(self, 'scans'):
+ # Create a scans entry if it does not already exist
+ self.scans = OrderedDict()
+
if not hasattr(self, 'engines'):
# Create an engines entry if it does not already exist
- from collections import OrderedDict
self.engines = OrderedDict()
# Generate all the paths
@@ -441,7 +445,7 @@ def init_structures(self):
###################################
# Initialize the model manager
- self.modelm = ModelManager(self, self.p.scan)
+ self.modelm = ModelManager(self, self.p.scans)
def init_data(self, print_stats=True):
"""
From 335afd5be2c265ef2f186e14d9bd8f7662f2a114 Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Tue, 24 Oct 2017 14:38:02 +0100
Subject: [PATCH 166/363] More changes in ScanModel overhaul.
---
ptypy/core/illumination.py | 2 +-
ptypy/core/manager.py | 78 ++++++++++++++++++++-----------
ptypy/core/sample.py | 2 +-
templates/minimal_prep_and_run.py | 2 +-
4 files changed, 53 insertions(+), 31 deletions(-)
diff --git a/ptypy/core/illumination.py b/ptypy/core/illumination.py
index f70a6b727..d5aeb5c2d 100644
--- a/ptypy/core/illumination.py
+++ b/ptypy/core/illumination.py
@@ -21,7 +21,7 @@
TEMPLATES = dict()
from io import StringIO
-@defaults_tree.parse_doc('scan.illumination')
+@defaults_tree.parse_doc('scanmodel.Full.illumination')
class DummyClass(object):
"""
Defaults:
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index c8e27a193..876281ba2 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -175,9 +175,19 @@ class ScanModel(object):
doc = Either "farfield" or "nearfield"
userlevel = 1
+ [illumination]
+ type = Param
+ default =
+ help = Container for probe initialization model
+
+ [sample]
+ type = Param
+ default =
+ help = Container for sample initialization model
+
"""
- def __init__(self, ptycho=None, specific_pars=None, generic_pars=None, label=None):
+ def __init__(self, ptycho=None, pars=None, label=None):
"""
Create ScanModel object.
@@ -195,12 +205,13 @@ def __init__(self, ptycho=None, specific_pars=None, generic_pars=None, label=Non
# Update parameter structure
# Load default parameter structure
p = self.DEFAULT.copy(99)
- p.update(generic_pars, in_place_depth=4)
- p.update(specific_pars, in_place_depth=4)
+ p.update(pars, in_place_depth=4)
self.p = p
self.label = label
self.ptycho = ptycho
+ print(p)
+
# Manage stand-alone cases
if self.ptycho is None:
self.Cdiff = Container(ptycho=self, ID='Cdiff', data_type='real')
@@ -256,31 +267,7 @@ def new_data(self):
# Prepare the scan geometry if not already done.
if not self.geometries:
- self.geometries = []
-
- # extract necessary info from the received data package
- get_keys = ['distance', 'center', 'energy', 'psize', 'shape']
- geo_pars = u.Param({key: dp['common'][key] for key in get_keys})
-
- # add propagation info from this scan model
- geo_pars.propagation = self.p.propagation
-
- # The multispectral case will have multiple geometries
- for ii, fac in enumerate(self.p.coherence.energies):
- geoID = geometry.Geo._PREFIX + '%02d' % ii + label
- g = geometry.Geo(self.ptycho, geoID, pars=geo_pars)
- # now we fix the sample pixel size, This will make the frame size adapt
- g.p.resolution_is_fix = True
- # save old energy value:
- g.p.energy_orig = g.energy
- # change energy
- g.energy *= fac
- # append the geometry
- self.geometries.append(g)
-
- # Store frame shape
- self.shape = np.array(dp['common'].get('shape', self.geometries[0].shape))
- self.psize = self.geometries[0].psize
+ self._initialize_geo(dp['common'])
sh = self.shape
@@ -397,6 +384,41 @@ def new_data(self):
return True
+ def _initialize_geo(self, common):
+ """
+ Initialize the geometry/geometries based on input data package
+ Parameters
+ ----------
+ common: dict
+ metadata part of the data package passed into new_data.
+
+ """
+ # Extract necessary info from the received data package
+ get_keys = ['distance', 'center', 'energy', 'psize', 'shape']
+ geo_pars = u.Param({key: common[key] for key in get_keys})
+
+ # Add propagation info from this scan model
+ geo_pars.propagation = self.p.propagation
+
+ # The multispectral case will have multiple geometries
+ for ii, fac in enumerate(self.p.coherence.energies):
+ geoID = geometry.Geo._PREFIX + '%02d' % ii + self.label
+ g = geometry.Geo(self.ptycho, geoID, pars=geo_pars)
+ # now we fix the sample pixel size, This will make the frame size adapt
+ g.p.resolution_is_fix = True
+ # save old energy value:
+ g.p.energy_orig = g.energy
+ # change energy
+ g.energy *= fac
+ # append the geometry
+ self.geometries.append(g)
+
+ # Store frame shape
+ self.shape = np.array(common.get('shape', self.geometries[0].shape))
+ self.psize = self.geometries[0].psize
+
+ return
+
def _update_stats(self):
"""
(Re)compute the statistics for the data stored in the scan.
diff --git a/ptypy/core/sample.py b/ptypy/core/sample.py
index 1ff8f7a26..8a72bd6e7 100644
--- a/ptypy/core/sample.py
+++ b/ptypy/core/sample.py
@@ -25,7 +25,7 @@
TEMPLATES = dict()
-@defaults_tree.parse_doc('scan.sample')
+@defaults_tree.parse_doc('scanmodel.Full.sample')
class DummyClass(object):
"""
Defaults:
diff --git a/templates/minimal_prep_and_run.py b/templates/minimal_prep_and_run.py
index 747b5805a..07ba95d97 100644
--- a/templates/minimal_prep_and_run.py
+++ b/templates/minimal_prep_and_run.py
@@ -20,7 +20,7 @@
p.scans = u.Param()
p.scans.MF = u.Param()
p.scans.MF.data= u.Param()
-p.scans.MF.data.source = 'test'
+p.scans.MF.data.name = 'MoonFlowerScan'
p.scans.MF.data.shape = 128
p.scans.MF.data.num_frames = 100
p.scans.MF.data.save = None
From 3367b19c333efdddc551669b0a6a0d286150dd55 Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Wed, 25 Oct 2017 09:46:35 +0100
Subject: [PATCH 167/363] Renamed confusing views_in_storage argument
---
ptypy/core/classes.py | 6 +++---
ptypy/core/manager.py | 6 +++---
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/ptypy/core/classes.py b/ptypy/core/classes.py
index 8bd14bd09..59fd504e4 100644
--- a/ptypy/core/classes.py
+++ b/ptypy/core/classes.py
@@ -1543,7 +1543,7 @@ def nbytes(self):
sz += s.data.nbytes
return sz
- def views_in_storage(self, s, active=True):
+ def views_in_storage(self, s, active_only=True):
"""
Return a list of views on :any:`Storage` `s`.
@@ -1551,10 +1551,10 @@ def views_in_storage(self, s, active=True):
----------
s : Storage
The storage to look for.
- active : True or False
+ active_only : True or False
If True (default), return only active views.
"""
- if active:
+ if active_only:
return [v for v in self.original.V.values()
if v.active and (v.storageID == s.ID)]
else:
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 876281ba2..de40d1faf 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -279,8 +279,8 @@ def new_data(self):
old_diff_views = []
old_diff_layers = []
else:
- # ok storage exists already. Views most likely also. Let's do some analysis and deactivate the old views
- old_diff_views = self.Cdiff.views_in_storage(self.diff, active=False)
+ # ok storage exists already. Views most likely also. We store them so we can update their status later.
+ old_diff_views = self.Cdiff.views_in_storage(self.diff, active_only=False)
old_diff_layers = []
for v in old_diff_views:
old_diff_layers.append(v.layer)
@@ -292,7 +292,7 @@ def new_data(self):
old_mask_views = []
old_mask_layers = []
else:
- old_mask_views = self.Cmask.views_in_storage(self.mask, active=False)
+ old_mask_views = self.Cmask.views_in_storage(self.mask, active_only=False)
old_mask_layers = []
for v in old_mask_views:
old_mask_layers.append(v.layer)
From 84362f0be12ab6ba0ec01cac1f724e1abb3e501e Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Wed, 25 Oct 2017 15:12:39 +0100
Subject: [PATCH 168/363] Migrated illumination and sample parameters in
ScanModel
---
ptypy/core/illumination.py | 6 +-
ptypy/core/manager.py | 314 +++++++++++++++++++++++++++++++++++++
ptypy/core/sample.py | 8 +-
3 files changed, 321 insertions(+), 7 deletions(-)
diff --git a/ptypy/core/illumination.py b/ptypy/core/illumination.py
index d5aeb5c2d..89b628101 100644
--- a/ptypy/core/illumination.py
+++ b/ptypy/core/illumination.py
@@ -16,12 +16,12 @@
from ..core import geometry
from ..utils.verbose import logger
from .. import resources
-from ..utils.descriptor import defaults_tree
+from ..utils.descriptor import EvalDescriptor
TEMPLATES = dict()
-from io import StringIO
-@defaults_tree.parse_doc('scanmodel.Full.illumination')
+local_tree = EvalDescriptor('')
+@local_tree.parse_doc('illumination')
class DummyClass(object):
"""
Defaults:
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index de40d1faf..6174b5252 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -180,11 +180,325 @@ class ScanModel(object):
default =
help = Container for probe initialization model
+ [illumination.aperture]
+ type = Param
+ default =
+ help = Beam aperture parameters
+
+ [illumination.aperture.rotate]
+ type = float
+ default = 0.
+ help = Rotate aperture by this value
+ doc =
+
+ [illumination.aperture.central_stop]
+ help = size of central stop as a fraction of aperture.size
+ default = None
+ doc = If not None: places a central beam stop in aperture. The value given here is the fraction of the beam stop compared to `size`
+ lowlim = 0.
+ uplim = 1.
+ userlevel = 1
+ type = float
+
+ [illumination.aperture.diffuser]
+ help = Noise in the transparen part of the aperture
+ default = None
+ doc = Can be either:
+ - ``None`` : no noise
+ - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
+ - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
+ userlevel = 2
+ type = tuple
+
+ [illumination.aperture.edge]
+ help = Edge width of aperture (in pixels!)
+ type = float
+ default = 2.0
+ userlevel = 2
+
+ [illumination.aperture.form]
+ default = circ
+ type = None, str
+ help = One of None, 'rect' or 'circ'
+ doc = One of:
+ - ``None`` : no aperture, this may be useful for nearfield
+ - ``'rect'`` : rectangular aperture
+ - ``'circ'`` : circular aperture
+ choices = None,'rect','circ'
+ userlevel = 2
+
+ [illumination.aperture.offset]
+ default = 0.
+ type = float, tuple
+ help = Offset between center of aperture and optical axes
+ doc = May also be a tuple (vertical,horizontal) for size in case of an asymmetric offset
+ userlevel = 2
+
+ [illumination.aperture.size]
+ default = None
+ type = float
+ help = Aperture width or diameter
+ doc = May also be a tuple *(vertical,horizontal)* in case of an asymmetric aperture
+ lowlim = 0.
+ userlevel = 0
+
+ [illumination.diversity]
+ default = None
+ type = Param, None
+ help = Probe mode(s) diversity parameters
+ doc = Can be ``None`` i.e. no diversity
+ userlevel = 1
+
+ [illumination.diversity.noise]
+ default = None
+ type = tuple
+ help = Noise in the generated modes of the illumination
+ doc = Can be either:
+ - ``None`` : no noise
+ - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
+ - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
+ userlevel = 1
+
+ [illumination.diversity.power]
+ default = 0.1
+ type = tuple, float
+ help = Power of modes relative to main mode (zero-layer)
+ uplim = 1.0
+ lowlim = 0.0
+ userlevel = 1
+
+ [illumination.diversity.shift]
+ default = None
+ type = float
+ help = Lateral shift of modes relative to main mode
+ doc = **[not implemented]**
+ userlevel = 2
+
+ [illumination.model]
+ default = None
+ type = str
+ help = Type of illumination model
+ doc = One of:
+ - ``None`` : model initialitziation defaults to flat array filled with the specified number of photons
+ - ``'recon'`` : load model from previous reconstruction, see `recon` Parameters
+ - ``'stxm'`` : Estimate model from autocorrelation of mean diffraction data
+ - ** : one of ptypys internal image resource strings
+ - ** : one of the templates inillumination module
+
+ In script, you may pass a numpy.ndarray here directly as the model. It is considered as incoming wavefront and will be propagated according to `propagation` with an optional `aperture` applied before.
+ userlevel = 0
+
+ [illumination.photons]
+ type = int, None
+ default = None
+ help = Number of photons in the incident illumination
+ doc = A value specified here will take precedence over calculated statistics from the loaded data.
+ lowlim = 0
+ userlevel = 2
+
+ [illumination.propagation]
+ type = Param
+ default =
+ help = Parameters for propagation after aperture plane
+ doc = Propagation to focus takes precedence to parallel propagation if `foccused` is not ``None``
+
+ [illumination.propagation.antialiasing]
+ default = 1
+ type = float
+ help = Antialiasing factor
+ doc = Antialiasing factor used when generating the probe. (numbers larger than 2 or 3 are memory hungry)
+ **[Untested]**
+ userlevel = 2
+
+ [illumination.propagation.focussed]
+ default = None
+ type = None, float
+ lowlim =
+ help = Propagation distance from aperture to focus
+ doc = If ``None`` or ``0`` : No focus propagation
+ userlevel = 0
+
+ [illumination.propagation.parallel]
+ default = None
+ type = None, float
+ help = Parallel propagation distance
+ doc = If ``None`` or ``0`` : No parallel propagation
+ userlevel = 0
+
+ [illumination.propagation.spot_size]
+ default = None
+ type = None, float
+ help = Focal spot diameter
+ doc = If not ``None``, this parameter is used to generate the appropriate aperture size instead of :py:data:`size`
+ lowlim = 0
+ userlevel = 1
+
+ [illumination.recon]
+ default =
+ type = Param
+ help = Parameters to load from previous reconstruction
+
+ [illumination.recon.label]
+ default = None
+ type = None, str
+ help = Scan label of diffraction that is to be used for probe estimate
+ doc = If ``None``, own scan label is used
+ userlevel = 1
+
+ [illumination.recon.rfile]
+ default = \*.ptyr
+ type = str
+ help = Path to a ``.ptyr`` compatible file
+ userlevel = 0
+
[sample]
type = Param
default =
help = Container for sample initialization model
+ [sample.model]
+ default = None
+ help = Type of initial object model
+ doc = One of:
+ - ``None`` : model initialitziation defaults to flat array filled `fill`
+ - ``'recon'`` : load model from STXM analysis of diffraction data
+ - ``'stxm'`` : Estimate model from autocorrelation of mean diffraction data
+ - ** : one of ptypys internal model resource strings
+ - ** : one of the templates in sample module
+ In script, you may pass a numpy.array here directly as the model. This array will be
+ processed according to `process` in order to *simulate* a sample from e.g. a thickness
+ profile.
+ type = str
+ userlevel = 0
+
+ [sample.fill]
+ default = 1
+ help = Default fill value
+ doc =
+ type = float, complex
+ userlevel =
+
+ [sample.recon]
+ default =
+ help = Parameters to load from previous reconstruction
+ doc =
+ type = Param
+ userlevel =
+
+ [sample.recon.rfile]
+ default = \*.ptyr
+ help = Path to a ``.ptyr`` compatible file
+ doc =
+ type = file
+ userlevel = 0
+
+ [sample.stxm]
+ default =
+ help = STXM analysis parameters
+ doc =
+ type = Param
+ userlevel = 1
+
+ [sample.stxm.label]
+ default = None
+ help = Scan label of diffraction that is to be used for probe estimate
+ doc = ``None``, own scan label is used
+ type = str
+ userlevel = 1
+
+ [sample.process]
+ default = None
+ help = Model processing parameters
+ doc = Can be ``None``, i.e. no processing
+ type = Param
+ userlevel =
+
+ [sample.process.offset]
+ default = (0,0)
+ help = Offset between center of object array and scan pattern
+ doc =
+ type = tuple
+ userlevel = 2
+ lowlim = 0
+
+ [sample.process.zoom]
+ default = None
+ help = Zoom value for object simulation.
+ doc = If ``None``, leave the array untouched. Otherwise the modeled or loaded image will be
+ resized using :py:func:`zoom`.
+ type = tuple
+ userlevel = 2
+ lowlim = 0
+
+ [sample.process.formula]
+ default = None
+ help = Chemical formula
+ doc = A Formula compatible with a cxro database query,e.g. ``'Au'`` or ``'NaCl'`` or ``'H2O'``
+ type = str
+ userlevel = 2
+
+ [sample.process.density]
+ default = 1
+ help = Density in [g/ccm]
+ doc = Only used if `formula` is not None
+ type = float
+ userlevel = 2
+
+ [sample.process.thickness]
+ default = 1.00E-06
+ help = Maximum thickness of sample
+ doc = If ``None``, the absolute values of loaded source array will be used
+ type = float
+ userlevel = 2
+
+ [sample.process.ref_index]
+ default = 0.5+0.j
+ help = Assigned refractive index
+ doc = If ``None``, treat source array as projection of refractive index. If a refractive index
+ is provided the array's absolute value will be used to scale the refractive index.
+ type = complex
+ userlevel = 2
+ lowlim = 0
+
+ [sample.process.smoothing]
+ default = 2
+ help = Smoothing scale
+ doc = Smooth the projection with gaussian kernel of width given by `smoothing_mfs`
+ type = int
+ userlevel = 2
+ lowlim = 0
+
+ [sample.diversity]
+ default =
+ help = Probe mode(s) diversity parameters
+ doc = Can be ``None`` i.e. no diversity
+ type = Param
+ userlevel =
+
+ [sample.diversity.noise]
+ default = None
+ help = Noise in the generated modes of the illumination
+ doc = Can be either:
+ - ``None`` : no noise
+ - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
+ - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
+ type = tuple
+ userlevel = 1
+
+ [sample.diversity.power]
+ default = 0.1
+ help = Power of modes relative to main mode (zero-layer)
+ doc =
+ type = tuple, float
+ userlevel = 1
+
+ [sample.diversity.shift]
+ default = None
+ help = Lateral shift of modes relative to main mode
+ doc = **[not implemented]**
+ type = float
+ userlevel = 2
+
"""
def __init__(self, ptycho=None, pars=None, label=None):
diff --git a/ptypy/core/sample.py b/ptypy/core/sample.py
index 8a72bd6e7..abf43b453 100644
--- a/ptypy/core/sample.py
+++ b/ptypy/core/sample.py
@@ -10,22 +10,22 @@
:license: GPLv2, see LICENSE for details.
"""
import numpy as np
-# import os
-# from matplotlib import pyplot as plt
if __name__ == '__main__':
from ptypy import utils as u
from ptypy import resources
+ from ptypy.utils.descriptor import EvalDescriptor
else:
from .. import utils as u
from .. import resources
- from ..utils.descriptor import defaults_tree
+ from ..utils.descriptor import EvalDescriptor
logger = u.verbose.logger
TEMPLATES = dict()
-@defaults_tree.parse_doc('scanmodel.Full.sample')
+local_tree = EvalDescriptor('')
+@local_tree.parse_doc('sample')
class DummyClass(object):
"""
Defaults:
From 4bc122e99a9166b67d2c5479c606c19678ed8921 Mon Sep 17 00:00:00 2001
From: Pierre Thibault
Date: Wed, 25 Oct 2017 15:13:36 +0100
Subject: [PATCH 169/363] Added a working ML template
---
templates/minimal_prep_and_run_ML.py | 52 ++++++++++++++++++++++++++++
1 file changed, 52 insertions(+)
create mode 100644 templates/minimal_prep_and_run_ML.py
diff --git a/templates/minimal_prep_and_run_ML.py b/templates/minimal_prep_and_run_ML.py
new file mode 100644
index 000000000..a4d683019
--- /dev/null
+++ b/templates/minimal_prep_and_run_ML.py
@@ -0,0 +1,52 @@
+"""
+This script is a test for ptychographic reconstruction in the absence
+of actual data. It uses the test Scan class
+`ptypy.core.data.MoonFlowerScan` to provide "data".
+"""
+#import ptypy
+from ptypy.core import Ptycho
+from ptypy import utils as u
+p = u.Param()
+
+# for verbose output
+p.verbose_level = 4
+
+# set home path
+p.io = u.Param()
+p.io.home = "/tmp/ptypy/"
+p.io.autosave = None
+#p.io.autoplot = u.Param()
+#p.io.autoplot.dump = True
+#p.io.autoplot = False
+
+# max 100 frames (128x128px) of diffraction data
+p.scans = u.Param()
+p.scans.MF = u.Param()
+p.scans.MF.data= u.Param()
+p.scans.MF.data.name = 'MoonFlowerScan'
+p.scans.MF.data.shape = 128
+p.scans.MF.data.num_frames = 100
+p.scans.MF.data.save = None
+
+# position distance in fraction of illumination frame
+p.scans.MF.data.density = 0.2
+# total number of photon in empty beam
+p.scans.MF.data.photons = 1e8
+# Gaussian FWHM of possible detector blurring
+p.scans.MF.data.psf = 0.
+
+# attach a reconstrucion engine
+p.engines = u.Param()
+p.engines.engine00 = u.Param()
+p.engines.engine00.name = 'ML'
+p.engines.engine00.reg_del2 = True # Whether to use a Gaussian prior (smoothing) regularizer
+p.engines.engine00.reg_del2_amplitude = 1. # Amplitude of the Gaussian prior if used
+p.engines.engine00.scale_precond = True
+#p.engines.engine00.scale_probe_object = 1.
+p.engines.engine00.smooth_gradient = 20.
+p.engines.engine00.smooth_gradient_decay = 1/50.
+p.engines.engine00.floating_intensities = False
+p.engines.engine00.numiter = 300
+
+# prepare and run
+P = Ptycho(p,level=5)
From d0b65f30b6b2d4be312a85d50aa567486496525f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 30 Oct 2017 18:17:58 +0100
Subject: [PATCH 170/363] Gutted ModelManager and moved most of it into
ScanModel
---
ptypy/core/manager.py | 587 +++++++++++++++++++-----------------------
1 file changed, 269 insertions(+), 318 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 6174b5252..db5d06b8d 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -501,6 +501,8 @@ class ScanModel(object):
"""
+ _PREFIX = MODEL_PREFIX
+
def __init__(self, ptycho=None, pars=None, label=None):
"""
Create ScanModel object.
@@ -557,6 +559,17 @@ def __init__(self, ptycho=None, pars=None, label=None):
self.frames_per_call = 100000
+ # Sharing dictionary that stores sharing behavior
+ self.sharing = {'probe_ids': {}, 'object_ids': {}}
+
+ # REDESIGN: this will be replaced
+ # Initialize sharing rules for POD creations
+ sharing_pars = u.Param({'model_type': 'basic',
+ 'scan_per_probe': 1,
+ 'scan_per_object': 1,
+ 'npts': None})
+ self.sharing_rules = model.parse_model(sharing_pars, self.sharing)
+
def new_data(self):
"""
Feed data from ptyscan object.
@@ -696,8 +709,168 @@ def new_data(self):
self._update_stats()
+ new_pods, new_probe_ids, new_object_ids = self._create_pods()
+ logger.info('Process %d created %d new PODs, %d new probes and %d new objects.' % (
+ parallel.rank, len(new_pods), len(new_probe_ids), len(new_object_ids)), extra={'allprocesses': True})
+
+ # Adjust storages
+ self.ptycho.probe.reformat(True)
+ self.ptycho.obj.reformat(True)
+ self.ptycho.exit.reformat()
+
+ self._initialize_probe(new_probe_ids)
+ self._initialize_object(new_object_ids)
+ self._initialize_exit(new_pods)
+
return True
+ def _create_pods(self):
+ """
+ Create all pods associated with the scan labels in 'scans'.
+
+ Return the list of new pods, probe and object ids (to allow for
+ initialization).
+ """
+ logger.info('\n' + headerline('Creating PODS', 'l'))
+ new_pods = []
+ new_probe_ids = {}
+ new_object_ids = {}
+
+ label = self.label
+
+ # Get a list of probe and object that already exist
+ existing_probes = self.ptycho.probe.storages.keys()
+ # SC: delete? self.sharing_rules.probe_ids.keys()
+ existing_objects = self.ptycho.obj.storages.keys()
+ # SC: delete? self.sharing_rules.object_ids.keys()
+ logger.info('Found these probes : ' + ', '.join(existing_probes))
+ logger.info('Found these objects: ' + ', '.join(existing_objects))
+
+ scan = self
+
+ positions = scan.new_positions
+ di_views = scan.new_diff_views
+ ma_views = scan.new_mask_views
+
+ # Compute sharing rules
+ share = scan.p.sharing
+ alt_obj = share.object_share_with if share is not None else None
+ alt_pr = share.probe_share_with if share is not None else None
+
+ obj_label = label if alt_obj is None else alt_obj
+ pr_label = label if alt_pr is None else alt_pr
+
+ # Loop through diffraction patterns
+ for i in range(len(di_views)):
+ dv, mv = di_views.pop(0), ma_views.pop(0)
+
+ index = dv.layer
+
+ # Object and probe position
+ pos_pr = u.expect2(0.0)
+ pos_obj = positions[i] if 'empty' not in scan.p.tags else 0.0
+
+ t, object_id = self.sharing_rules(obj_label, index)
+ probe_id, t = self.sharing_rules(pr_label, index)
+
+ # For multiwavelength reconstructions: loop here over
+ # geometries, and modify probe_id and object_id.
+ for ii, geometry in enumerate(scan.geometries):
+ # Make new IDs and keep them in record
+ # sharing_rules is not aware of IDs with suffix
+
+ pdis = scan.p.coherence.probe_dispersion
+
+ if pdis is None or str(pdis) == 'achromatic':
+ gind = 0
+ else:
+ gind = ii
+
+ probe_id_suf = probe_id + 'G%02d' % gind
+ if (probe_id_suf not in new_probe_ids.keys()
+ and probe_id_suf not in existing_probes):
+ new_probe_ids[probe_id_suf] = (
+ self.sharing_rules.probe_ids[probe_id])
+
+ odis = scan.p.coherence.object_dispersion
+
+ if odis is None or str(odis) == 'achromatic':
+ gind = 0
+ else:
+ gind = ii
+
+ object_id_suf = object_id + 'G%02d' % gind
+ if (object_id_suf not in new_object_ids.keys()
+ and object_id_suf not in existing_objects):
+ new_object_ids[object_id_suf] = (
+ self.sharing_rules.object_ids[object_id])
+
+ # Loop through modes
+ for pm in range(scan.p.coherence.num_probe_modes):
+ for om in range(scan.p.coherence.num_object_modes):
+ # Make a unique layer index for exit view
+ # The actual number does not matter due to the
+ # layermap access
+ exit_index = index * 10000 + pm * 100 + om
+
+ # Create views
+ # Please note that mostly references are passed,
+ # i.e. the views do mostly not own the accessrule
+ # contents
+ pv = View(container=self.ptycho.probe,
+ accessrule={'shape': geometry.shape,
+ 'psize': geometry.resolution,
+ 'coord': pos_pr,
+ 'storageID': probe_id_suf,
+ 'layer': pm,
+ 'active': True})
+
+ ov = View(container=self.ptycho.obj,
+ accessrule={'shape': geometry.shape,
+ 'psize': geometry.resolution,
+ 'coord': pos_obj,
+ 'storageID': object_id_suf,
+ 'layer': om,
+ 'active': True})
+
+ ev = View(container=self.ptycho.exit,
+ accessrule={'shape': geometry.shape,
+ 'psize': geometry.resolution,
+ 'coord': pos_pr,
+ 'storageID': (probe_id +
+ object_id[1:] +
+ 'G%02d' % ii),
+ 'layer': exit_index,
+ 'active': dv.active})
+
+ views = {'probe': pv,
+ 'obj': ov,
+ 'diff': dv,
+ 'mask': mv,
+ 'exit': ev}
+
+ pod = POD(ptycho=self.ptycho,
+ ID=None,
+ views=views,
+ geometry=geometry) # , meta=meta)
+
+ new_pods.append(pod)
+
+ # If Empty Probe sharing is enabled,
+ # adjust POD accordingly.
+ if share is not None:
+ pod.probe_weight = share.probe_share_power
+ pod.object_weight = share.object_share_power
+ if share.EP_sharing:
+ pod.is_empty = True
+ else:
+ pod.is_empty = False
+ else:
+ pod.probe_weight = 1
+ pod.object_weight = 1
+
+ return new_pods, new_probe_ids, new_object_ids
+
def _initialize_geo(self, common):
"""
Initialize the geometry/geometries based on input data package
@@ -733,6 +906,95 @@ def _initialize_geo(self, common):
return
+ def _initialize_probe(self, probe_ids):
+ """
+ Initialize the probe storages referred to by the probe_ids
+ """
+ logger.info('\n'+headerline('Probe initialization', 'l'))
+
+ # Loop through probe ids
+ for pid, labels in probe_ids.items():
+
+ illu_pars = self.p.illumination
+
+ # pick storage from container
+ s = self.ptycho.probe.S.get(pid)
+
+ if s is None:
+ continue
+ else:
+ logger.info('Initializing probe storage %s using scan %s.'
+ % (pid, self.label))
+
+
+ # if photon count is None, assign a number from the stats.
+ phot = illu_pars.get('photons')
+ phot_max = self.diff.max_power
+
+ if phot is None:
+ logger.info('Found no photon count for probe in parameters.\nUsing photon count %.2e from photon report' % phot_max)
+ illu_pars['photons'] = phot_max
+ elif np.abs(np.log10(phot)-np.log10(phot_max)) > 1:
+ logger.warn('Photon count from input parameters (%.2e) differs from statistics (%.2e) by more than a magnitude' % (phot, phot_max))
+
+ illumination.init_storage(s, illu_pars)
+
+ s.reformat() # Maybe not needed
+ s.model_initialized = True
+
+ def _initialize_object(self, object_ids):
+ """
+ Initializes the probe storages referred to by the object_ids.
+ """
+
+ logger.info('\n'+headerline('Object initialization', 'l'))
+
+ # Loop through object IDs
+ for oid, labels in object_ids.items():
+
+ sample_pars = self.p.sample
+
+ # pick storage from container
+ s = self.ptycho.obj.S.get(oid)
+
+ if s is None or s.model_initialized:
+ continue
+ else:
+ logger.info('Initializing object storage %s using scan %s.'
+ % (oid, self.label))
+
+ sample_pars = self.p.sample
+
+ if type(sample_pars) is u.Param:
+ # Deep copy
+ sample_pars = sample_pars.copy(depth=10)
+
+ # Quickfix spectral contribution.
+ if (self.p.coherence.object_dispersion
+ not in [None, 'achromatic']
+ and self.p.coherence.probe_dispersion
+ in [None, 'achromatic']):
+ logger.info(
+ 'Applying spectral distribution input to object fill.')
+ sample_pars['fill'] *= s.views[0].pod.geometry.p.spectral
+
+
+ sample.init_storage(s, sample_pars)
+ s.reformat() # maybe not needed
+
+ s.model_initialized = True
+
+ @staticmethod
+ def _initialize_exit(pods):
+ """
+ Initializes exit waves using the pods.
+ """
+ logger.info('\n' + headerline('Creating exit waves', 'l'))
+ for pod in pods:
+ if not pod.active:
+ continue
+ pod.exit = pod.probe * pod.object
+
def _update_stats(self):
"""
(Re)compute the statistics for the data stored in the scan.
@@ -796,38 +1058,10 @@ class ScanModel2(object):
class ModelManager(object):
"""
- Manages ptypy objects creation and update.
-
- The main task of ModelManager is to follow the rules for a given
- reconstruction model and create:
-
- - the probe, object, exit, diff and mask containers
- - the views
- - the PODs
-
- A ptychographic problem is defined by the combination of one or
- multiple scans. ModelManager uses encapsulate
- scan-specific elements in .scans und .scans_pars
-
- Note
- ----
- This class is densely connected to :any:`Ptycho` the separation
- in two classes is more history than reason and these classes may get
- merged in future releases
+ Thin wrapper class which now just interfaces Ptycho with ScanModel.
+ This should probably all be done directly in Ptycho and would take
+ like 8 lines of code.
"""
- DEFAULT = ScanModel.DEFAULT
- """ Default scan parameters. See :py:data:`.scan`
- and a short listing below """
-
- _PREFIX = MODEL_PREFIX
-
- _BASE_MODEL = OrderedDict(
- index = 0,
- energy = 0.0,
- pmode = 0,
- x = 0.0,
- y = 0.0,
- )
def __init__(self, ptycho, pars):
"""
@@ -838,38 +1072,16 @@ def __init__(self, ptycho, pars):
The parent Ptycho object
pars : dict or Param
- Input parameters (see :py:attr:`DEFAULT`)
+ The .scans tree of the :any:`Ptycho` parameters.
"""
- # Initialize the input parameters
- # REDESIGN: ModelManager will soon disappear. Here we fix things temporarily.
- # this means: no DEFAULT for this class.
- p = pars.copy(depth=99)
- self.p = p
-
+ assert ptycho is not None
self.ptycho = ptycho
- # abort if ptycho is None:
- # FIXME: PT Is this the expected behavior?
- if self.ptycho is None:
- return
-
+ # Create scan model objects
self.scans = OrderedDict()
-
- # Create scan objects from information already available
- for label, scan_pars in self.p.iteritems():
+ for label, scan_pars in pars.iteritems():
self.scans[label] = ScanModel(ptycho=self.ptycho, pars=scan_pars, label=label)
- # Sharing dictionary that stores sharing behavior
- self.sharing = {'probe_ids': {}, 'object_ids': {}}
-
- # REDESIGN: this will be replaced
- # Initialize sharing rules for POD creations
- sharing_pars = u.Param({'model_type': 'basic',
- 'scan_per_probe': 1,
- 'scan_per_object': 1,
- 'npts': None})
- self.sharing_rules = model.parse_model(sharing_pars, self.sharing)
-
def _to_dict(self):
# Delete the model class. We do not really need to store it.
del self.sharing_rules
@@ -899,268 +1111,7 @@ def new_data(self):
return 'No data'
logger.info('Processing new data.')
- used_scans = []
# Attempt to get new data
for label, scan in self.scans.iteritems():
new_data = scan.new_data()
- if new_data:
- used_scans.append(label)
-
- if not used_scans:
- return None
-
- # Create PODs
- new_pods, new_probe_ids, new_object_ids = self._create_pods(used_scans)
- logger.info('Process %d created %d new PODs, %d new probes and %d new objects.' % (
- parallel.rank, len(new_pods), len(new_probe_ids), len(new_object_ids)), extra={'allprocesses': True})
-
- # Adjust storages
- self.ptycho.probe.reformat(True)
- self.ptycho.obj.reformat(True)
- self.ptycho.exit.reformat()
-
- self._initialize_probe(new_probe_ids)
- self._initialize_object(new_object_ids)
- self._initialize_exit(new_pods)
-
- def _initialize_probe(self, probe_ids):
- """
- Initialize the probe storages referred to by the probe_ids
- """
- logger.info('\n'+headerline('Probe initialization', 'l'))
-
- # Loop through probe ids
- for pid, labels in probe_ids.items():
-
- # Pick first scan - this should not matter.
- scan = self.scans[labels[0]]
- illu_pars = scan.p.illumination
-
- # pick storage from container
- s = self.ptycho.probe.S.get(pid)
-
- if s is None:
- continue
- else:
- logger.info('Initializing probe storage %s using scan %s.'
- % (pid, scan.label))
-
-
- # if photon count is None, assign a number from the stats.
- phot = illu_pars.get('photons')
- phot_max = scan.diff.max_power
-
- if phot is None:
- logger.info('Found no photon count for probe in parameters.\nUsing photon count %.2e from photon report' % phot_max)
- illu_pars['photons'] = phot_max
- elif np.abs(np.log10(phot)-np.log10(phot_max)) > 1:
- logger.warn('Photon count from input parameters (%.2e) differs from statistics (%.2e) by more than a magnitude' % (phot, phot_max))
-
- illumination.init_storage(s, illu_pars)
-
- s.reformat() # Maybe not needed
- s.model_initialized = True
-
- def _initialize_object(self, object_ids):
- """
- Initializes the probe storages referred to by the object_ids.
- """
-
- logger.info('\n'+headerline('Object initialization', 'l'))
-
- # Loop through object IDs
- for oid, labels in object_ids.items():
-
- # Pick first scan - this should not matter.
- scan = self.scans[labels[0]]
- sample_pars = scan.p.sample
-
- # pick storage from container
- s = self.ptycho.obj.S.get(oid)
-
- if s is None or s.model_initialized:
- continue
- else:
- logger.info('Initializing object storage %s using scan %s.'
- % (oid, scan.label))
-
- sample_pars = scan.p.sample
-
- if type(sample_pars) is u.Param:
- # Deep copy
- sample_pars = sample_pars.copy(depth=10)
-
- # Quickfix spectral contribution.
- if (scan.p.coherence.object_dispersion
- not in [None, 'achromatic']
- and scan.p.coherence.probe_dispersion
- in [None, 'achromatic']):
- logger.info(
- 'Applying spectral distribution input to object fill.')
- sample_pars['fill'] *= s.views[0].pod.geometry.p.spectral
-
-
- sample.init_storage(s, sample_pars)
- s.reformat() # maybe not needed
-
- s.model_initialized = True
-
- @staticmethod
- def _initialize_exit(pods):
- """
- Initializes exit waves using the pods.
- """
- logger.info('\n' + headerline('Creating exit waves', 'l'))
- for pod in pods:
- if not pod.active:
- continue
- pod.exit = pod.probe * pod.object
-
- def _create_pods(self, new_scans):
- """
- Create all pods associated with the scan labels in 'scans'.
-
- Return the list of new pods, probe and object ids (to allow for
- initialization).
- """
- logger.info('\n' + headerline('Creating PODS', 'l'))
- new_pods = []
- new_probe_ids = {}
- new_object_ids = {}
-
- # Get a list of probe and object that already exist
- existing_probes = self.ptycho.probe.storages.keys()
- # SC: delete? self.sharing_rules.probe_ids.keys()
- existing_objects = self.ptycho.obj.storages.keys()
- # SC: delete? self.sharing_rules.object_ids.keys()
- logger.info('Found these probes : ' + ', '.join(existing_probes))
- logger.info('Found these objects: ' + ', '.join(existing_objects))
-
- # Loop through scans
- for label in new_scans:
- scan = self.scans[label]
-
- positions = scan.new_positions
- di_views = scan.new_diff_views
- ma_views = scan.new_mask_views
-
- # Compute sharing rules
- share = scan.p.sharing
- alt_obj = share.object_share_with if share is not None else None
- alt_pr = share.probe_share_with if share is not None else None
-
- obj_label = label if alt_obj is None else alt_obj
- pr_label = label if alt_pr is None else alt_pr
-
- # Loop through diffraction patterns
- for i in range(len(di_views)):
- dv, mv = di_views.pop(0), ma_views.pop(0)
-
- index = dv.layer
-
- # Object and probe position
- pos_pr = u.expect2(0.0)
- pos_obj = positions[i] if 'empty' not in scan.p.tags else 0.0
-
- t, object_id = self.sharing_rules(obj_label, index)
- probe_id, t = self.sharing_rules(pr_label, index)
-
- # For multiwavelength reconstructions: loop here over
- # geometries, and modify probe_id and object_id.
- for ii, geometry in enumerate(scan.geometries):
- # Make new IDs and keep them in record
- # sharing_rules is not aware of IDs with suffix
-
- pdis = scan.p.coherence.probe_dispersion
-
- if pdis is None or str(pdis) == 'achromatic':
- gind = 0
- else:
- gind = ii
-
- probe_id_suf = probe_id + 'G%02d' % gind
- if (probe_id_suf not in new_probe_ids.keys()
- and probe_id_suf not in existing_probes):
- new_probe_ids[probe_id_suf] = (
- self.sharing_rules.probe_ids[probe_id])
-
- odis = scan.p.coherence.object_dispersion
-
- if odis is None or str(odis) == 'achromatic':
- gind = 0
- else:
- gind = ii
-
- object_id_suf = object_id + 'G%02d' % gind
- if (object_id_suf not in new_object_ids.keys()
- and object_id_suf not in existing_objects):
- new_object_ids[object_id_suf] = (
- self.sharing_rules.object_ids[object_id])
-
- # Loop through modes
- for pm in range(scan.p.coherence.num_probe_modes):
- for om in range(scan.p.coherence.num_object_modes):
- # Make a unique layer index for exit view
- # The actual number does not matter due to the
- # layermap access
- exit_index = index * 10000 + pm * 100 + om
-
- # Create views
- # Please note that mostly references are passed,
- # i.e. the views do mostly not own the accessrule
- # contents
- pv = View(container=self.ptycho.probe,
- accessrule={'shape': geometry.shape,
- 'psize': geometry.resolution,
- 'coord': pos_pr,
- 'storageID': probe_id_suf,
- 'layer': pm,
- 'active': True})
-
- ov = View(container=self.ptycho.obj,
- accessrule={'shape': geometry.shape,
- 'psize': geometry.resolution,
- 'coord': pos_obj,
- 'storageID': object_id_suf,
- 'layer': om,
- 'active': True})
-
- ev = View(container=self.ptycho.exit,
- accessrule={'shape': geometry.shape,
- 'psize': geometry.resolution,
- 'coord': pos_pr,
- 'storageID': (probe_id +
- object_id[1:] +
- 'G%02d' % ii),
- 'layer': exit_index,
- 'active': dv.active})
-
- views = {'probe': pv,
- 'obj': ov,
- 'diff': dv,
- 'mask': mv,
- 'exit': ev}
-
- pod = POD(ptycho=self.ptycho,
- ID=None,
- views=views,
- geometry=geometry) # , meta=meta)
-
- new_pods.append(pod)
-
- # If Empty Probe sharing is enabled,
- # adjust POD accordingly.
- if share is not None:
- pod.probe_weight = share.probe_share_power
- pod.object_weight = share.object_share_power
- if share.EP_sharing:
- pod.is_empty = True
- else:
- pod.is_empty = False
- else:
- pod.probe_weight = 1
- pod.object_weight = 1
-
-
- return new_pods, new_probe_ids, new_object_ids
From e3bdfee257b8ca2c7a011c31e876999b991849fc Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 30 Oct 2017 18:18:55 +0100
Subject: [PATCH 171/363] Removed deprecated method in Ptycho
---
ptypy/core/ptycho.py | 80 --------------------------------------------
1 file changed, 80 deletions(-)
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index 74f028574..225007765 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -713,86 +713,6 @@ def finalize(self):
except BaseException:
pass
- def _run(self, run_label=None):
- """
- *deprecated*
- Start the reconstruction. Former method
- """
- # Time the initialization
- if self.runtime.get('start') is None:
- self.runtime.start = time.asctime()
-
- # Check if there is already a runtime info collector
- if self.runtime.get('iter_info') is None:
- self.runtime.iter_info = []
-
- # Note when the last autosave was carried out
- if self.runtime.get('last_save') is None:
- self.runtime.last_save = 0
-
- # Maybe not needed
- if self.runtime.get('last_plot') is None:
- self.runtime.last_plot = 0
-
- # Run all engines sequentially
- for run_label in self.run_labels:
-
- # Set a new engine
- engine = self.engines[run_label]
- # self.current_engine = engine
-
- # Prepare the engine
- engine.initialize()
-
- # Start the iteration loop
- while not engine.finished:
- # Check for client requests
- if parallel.master and self.interactor is not None:
- self.interactor.process_requests()
-
- parallel.barrier()
-
- # Check for new data
- self.modelm.new_data()
-
- # Last minute preparation before a contiguous block of
- # iterations
- engine.prepare()
-
- if self.p.autosave is not None and self.p.autosave.interval > 1:
- if engine.curiter % self.p.autosave.interval == 0:
- auto = self.paths.auto_file(self.runtime)
- logger.info(headerline('Autosaving'), 'l')
- self.save_run(auto, 'dump')
- self.runtime.last_save = engine.curiter
- logger.info(headerline())
-
- # One iteration
- engine.iterate()
-
- # Display runtime information and do saving
- if parallel.master:
- info = self.runtime.iter_info[-1]
- # Calculate error:
- err = np.array(info['error'].values()).mean(0)
- logger.info('Iteration #%(iteration)d of %(engine)s :: '
- 'Time %(duration).2f' % info)
- logger.info('Errors :: Fourier %.2e, Photons %.2e, '
- 'Exit %.2e' % tuple(err))
-
- parallel.barrier()
- # Done. Let the engine finish up
- engine.finalize()
-
- # Save
- # Deactivated for now as something fishy happens through MPI
- self.save_run()
-
- # Clean up - if needed.
-
- # Time the initialization
- self.runtime.stop = time.asctime()
-
@classmethod
def _from_dict(cls, dct):
# This method will be called from save_load on linking
From f4f51b2ee5222c44536acb77f44dd2b9321b43c2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 30 Oct 2017 18:20:06 +0100
Subject: [PATCH 172/363] More cleanup in Ptycho
---
ptypy/core/ptycho.py | 34 ----------------------------------
1 file changed, 34 deletions(-)
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index 225007765..3c5f10bca 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -463,40 +463,6 @@ def init_data(self, print_stats=True):
if print_stats:
self.print_stats()
- # Create plotting instance (maybe)
-
- def _init_engines(self):
- """
- * deprecated*
- Initialize engines from parameters. Sets :py:attr:`engines`
- """
- # Store the engines in a dict
- self.engines = {}
-
- # Store the run labels in a list to ensure precedence is preserved.
- self.run_labels = []
-
- # Loop through p.engines sub-dictionaries
- for run_label, pars in self.p.engines.iteritems():
- # Copy common parameters
- engine_pars = self.p.engine.common.copy()
-
- # Identify engine by name
- engine_class = engines.by_name(pars.name)
-
- # Update engine type specific parameters
- engine_pars.update(self.p.engine[pars.name])
-
- # Update engine instance specific parameters
- engine_pars.update(pars)
-
- # Create instance
- engine = engine_class(self, engine_pars)
-
- # Store info
- self.engines[run_label] = engine
- self.run_labels.append(run_label)
-
def init_engine(self, label=None, epars=None):
"""
Called on __init__ if ``level >= 4``.
From e4a48708ed515df6202732d7911e8ddc387d6a04 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 31 Oct 2017 09:00:01 +0100
Subject: [PATCH 173/363] Broke out container creation
---
ptypy/core/manager.py | 59 ++++++++++++++++++++++++++++---------------
ptypy/core/ptycho.py | 15 +++--------
2 files changed, 41 insertions(+), 33 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index db5d06b8d..00e234812 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -509,12 +509,14 @@ def __init__(self, ptycho=None, pars=None, label=None):
Parameters
----------
- specific_pars : dict or Param
- Input parameters specific to the given scan.
+ pars : dict or Param
+ Input parameter tree.
+
+ ptycho : Ptycho instance
+ Ptycho instance to which this scan belongs
- generic_pars : dict or Param
- Input parameters (see :py:attr:`DEFAULT`)
- If None uses defaults
+ label : str
+ Unique label
"""
from .. import experiment
@@ -526,18 +528,6 @@ def __init__(self, ptycho=None, pars=None, label=None):
self.label = label
self.ptycho = ptycho
- print(p)
-
- # Manage stand-alone cases
- if self.ptycho is None:
- self.Cdiff = Container(ptycho=self, ID='Cdiff', data_type='real')
- self.Cmask = Container(ptycho=self, ID='Cmask', data_type='bool')
- self.CType = CType
- self.FType = FType
- else:
- self.Cdiff = ptycho.diff
- self.Cmask = ptycho.mask
-
# Create Associated PtyScan object
self.ptyscan = experiment.makePtyScan(self.p.data)
@@ -555,8 +545,11 @@ def __init__(self, ptycho=None, pars=None, label=None):
self.shape = None
self.psize = None
+ # Object flags and constants
+ self.containers_initialized = False
self.data_available = True
-
+ self.CType = CType
+ self.FType = FType
self.frames_per_call = 100000
# Sharing dictionary that stores sharing behavior
@@ -596,12 +589,17 @@ def new_data(self):
if not self.geometries:
self._initialize_geo(dp['common'])
- sh = self.shape
+ # Create containers if not already done
+ if not self.containers_initialized:
+ self._initialize_containers()
+
+ # Generalized shape which works for 2d and 3d cases
+ sh = (1,) + tuple(self.shape)
# Storage generation if not already existing
if self.diff is None:
# This scan is brand new so we create storages for it
- self.diff = self.Cdiff.new_storage(shape=(1, sh[-2], sh[-1]), psize=self.psize, padonly=True,
+ self.diff = self.Cdiff.new_storage(shape=sh, psize=self.psize, padonly=True,
layermap=None)
old_diff_views = []
old_diff_layers = []
@@ -614,7 +612,7 @@ def new_data(self):
# Same for mask
if self.mask is None:
- self.mask = self.Cmask.new_storage(shape=(1, sh[-2], sh[-1]), psize=self.psize, padonly=True,
+ self.mask = self.Cmask.new_storage(shape=sh, psize=self.psize, padonly=True,
layermap=None)
old_mask_views = []
old_mask_layers = []
@@ -724,6 +722,25 @@ def new_data(self):
return True
+ def _initialize_containers(self):
+ """
+ Initialize containers appropriate for this model.
+ """
+ if self.ptycho is None:
+ # Stand-alone use
+ self.Cdiff = Container(ptycho=self, ID='Cdiff', data_type='real')
+ self.Cmask = Container(ptycho=self, ID='Cmask', data_type='bool')
+ else:
+ # Use with a Ptycho instance
+ self.ptycho.probe = Container(ptycho=self.ptycho, ID='Cprobe', data_type='complex')
+ self.ptycho.obj = Container(ptycho=self.ptycho, ID='Cobj', data_type='complex')
+ self.ptycho.exit = Container(ptycho=self.ptycho, ID='Cexit', data_type='complex')
+ self.ptycho.diff = Container(ptycho=self.ptycho, ID='Cdiff', data_type='real')
+ self.ptycho.mask = Container(ptycho=self.ptycho, ID='Cmask', data_type='bool')
+ self.Cdiff = self.ptycho.diff
+ self.Cmask = self.ptycho.mask
+ self.containers_initialized = True
+
def _create_pods(self):
"""
Create all pods associated with the scan labels in 'scans'.
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index 3c5f10bca..527ace9d1 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -433,18 +433,9 @@ def init_structures(self):
the exit waves, :py:attr:`diff` for diffraction data and
:py:attr:`mask` for detectors masks
"""
- # Initialize the reconstruction containers
- self.probe = Container(ptycho=self, ID='Cprobe', data_type='complex')
- self.obj = Container(ptycho=self, ID='Cobj', data_type='complex')
- self.exit = Container(ptycho=self, ID='Cexit', data_type='complex')
- self.diff = Container(ptycho=self, ID='Cdiff', data_type='real')
- self.mask = Container(ptycho=self, ID='Cmask', data_type='bool')
-
- ###################################
- # Initialize data sources load data
- ###################################
-
- # Initialize the model manager
+
+ # Initialize the model manager. This also initializes the
+ # containers.
self.modelm = ModelManager(self, self.p.scans)
def init_data(self, print_stats=True):
From ae371ee132db29b56b3c5c3d89dbf08279f5cc92 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 31 Oct 2017 09:15:05 +0100
Subject: [PATCH 174/363] Just clarified two comments
---
ptypy/core/manager.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 00e234812..91c61ab2e 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -707,6 +707,8 @@ def new_data(self):
self._update_stats()
+ # Create new views on object, probe, and exit wave, and connect
+ # these through new pods.
new_pods, new_probe_ids, new_object_ids = self._create_pods()
logger.info('Process %d created %d new PODs, %d new probes and %d new objects.' % (
parallel.rank, len(new_pods), len(new_probe_ids), len(new_object_ids)), extra={'allprocesses': True})
@@ -743,7 +745,9 @@ def _initialize_containers(self):
def _create_pods(self):
"""
- Create all pods associated with the scan labels in 'scans'.
+ Create all new pods as specified in the new_positions,
+ new_diff_views and new_mask_views object attributes. Also create
+ all necessary views on object, probe, and exit wave.
Return the list of new pods, probe and object ids (to allow for
initialization).
From 308f760150af2605401eb37954c91832e7769d82 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 31 Oct 2017 10:31:51 +0100
Subject: [PATCH 175/363] Broke out a scan model base class
---
ptypy/core/manager.py | 895 ++++++++++++++++++-----------------
ptypy/simulations/simscan.py | 4 +-
2 files changed, 465 insertions(+), 434 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 91c61ab2e..4bc3f1a99 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -35,12 +35,361 @@
FType = np.float64
CType = np.complex128
-__all__ = ['ModelManager', 'ScanModel']
+__all__ = ['ModelManager', 'BaseModel', 'Full', 'Vanilla']
+
+
+@defaults_tree.parse_doc('scanmodel.BaseModel')
+class BaseModel(object):
+ """
+ Abstract base class for models. Override at least these methods:
+ _create_pods(self)
+ _initialize_geo(self, common)
+ _initialize_probe(self, probe_ids)
+ _initialize_object(self, object_ids)
+
+ Defaults:
+
+ [tags]
+ default = ['dummy']
+ help = Comma seperated string tags describing the data input
+ doc = [deprecated?]
+ type = list
+ userlevel = 2
+
+ [propagation]
+ type = str
+ default = farfield
+ help = Propagation type
+ doc = Either "farfield" or "nearfield"
+ userlevel = 1
+
+ [illumination]
+ type = Param
+ default =
+ help = Container for probe initialization model
+
+ [sample]
+ type = Param
+ default =
+ help = Container for sample initialization model
+
+ """
+ def __init__(self, ptycho=None, pars=None, label=None):
+ """
+ Create ScanModel object.
+
+ Parameters
+ ----------
+ pars : dict or Param
+ Input parameter tree.
+
+ ptycho : Ptycho instance
+ Ptycho instance to which this scan belongs
+
+ label : str
+ Unique label
+ """
+ from .. import experiment
+
+ # Update parameter structure
+ # Load default parameter structure
+ p = self.DEFAULT.copy(99)
+ p.update(pars, in_place_depth=4)
+ self.p = p
+ self.label = label
+ self.ptycho = ptycho
+
+ # Create Associated PtyScan object
+ self.ptyscan = experiment.makePtyScan(self.p.data)
+
+ # Initialize instance attributes
+ self.mask = None
+ self.diff = None
+ self.positions = []
+ self.mask_views = []
+ self.diff_views = []
+ self.new_positions = None
+ self.new_diff_views = None
+ self.new_mask_views = None
+
+ self.geometries = []
+ self.shape = None
+ self.psize = None
+
+ # Object flags and constants
+ self.containers_initialized = False
+ self.data_available = True
+ self.CType = CType
+ self.FType = FType
+ self.frames_per_call = 100000
+
+ def new_data(self):
+ """
+ Feed data from ptyscan object.
+ :return: None if no data is available, True otherwise.
+ """
+
+ # Initialize if that has not been done yet
+ if not self.ptyscan.is_initialized:
+ self.ptyscan.initialize()
+
+ # Get data
+ dp = self.ptyscan.auto(self.frames_per_call)
+
+ self.data_available = (dp != data.EOS)
+ logger.debug(u.verbose.report(dp))
+
+ if dp == data.WAIT or not self.data_available:
+ return None
+
+ label = self.label
+ logger.info('Importing data from scan %s.' % label)
+
+ # Prepare the scan geometry if not already done.
+ if not self.geometries:
+ self._initialize_geo(dp['common'])
+
+ # Create containers if not already done
+ if not self.containers_initialized:
+ self._initialize_containers()
+
+ # Generalized shape which works for 2d and 3d cases
+ sh = (1,) + tuple(self.shape)
+
+ # Storage generation if not already existing
+ if self.diff is None:
+ # This scan is brand new so we create storages for it
+ self.diff = self.Cdiff.new_storage(shape=sh, psize=self.psize, padonly=True,
+ layermap=None)
+ old_diff_views = []
+ old_diff_layers = []
+ else:
+ # ok storage exists already. Views most likely also. We store them so we can update their status later.
+ old_diff_views = self.Cdiff.views_in_storage(self.diff, active_only=False)
+ old_diff_layers = []
+ for v in old_diff_views:
+ old_diff_layers.append(v.layer)
+
+ # Same for mask
+ if self.mask is None:
+ self.mask = self.Cmask.new_storage(shape=sh, psize=self.psize, padonly=True,
+ layermap=None)
+ old_mask_views = []
+ old_mask_layers = []
+ else:
+ old_mask_views = self.Cmask.views_in_storage(self.mask, active_only=False)
+ old_mask_layers = []
+ for v in old_mask_views:
+ old_mask_layers.append(v.layer)
+
+ # Prepare for View generation
+ AR_diff_base = DEFAULT_ACCESSRULE.copy()
+ AR_diff_base.shape = self.shape
+ AR_diff_base.coord = 0.0
+ AR_diff_base.psize = self.psize
+ AR_mask_base = AR_diff_base.copy()
+ AR_diff_base.storageID = self.diff.ID
+ AR_mask_base.storageID = self.mask.ID
+
+ diff_views = []
+ mask_views = []
+ positions = []
+
+ # First pass: create or update views and reformat corresponding storage
+ for dct in dp['iterable']:
+
+ index = dct['index']
+ active = dct['data'] is not None
+
+ pos = dct.get('position')
+
+ if pos is None:
+ logger.warning('No position set to scan point %d of scan %s' % (index, label))
+
+ AR_diff = AR_diff_base
+ AR_mask = AR_mask_base
+ AR_diff.layer = index
+ AR_mask.layer = index
+ AR_diff.active = active
+ AR_mask.active = active
+
+ # check here: is there already a view to this layer? Is it active?
+ try:
+ old_view = old_diff_views[old_diff_layers.index(index)]
+ old_active = old_view.active
+ old_view.active = active
+
+ logger.debug(
+ 'Diff view with layer/index %s of scan %s exists. \nSetting view active state from %s to %s' % (
+ index, label, old_active, active))
+ except ValueError:
+ v = View(self.Cdiff, accessrule=AR_diff)
+ diff_views.append(v)
+ logger.debug(
+ 'Diff view with layer/index %s of scan %s does not exist. \nCreating view with ID %s and set active state to %s' % (
+ index, label, v.ID, active))
+ # append position also
+ positions.append(pos)
+
+ try:
+ old_view = old_mask_views[old_mask_layers.index(index)]
+ old_view.active = active
+ except ValueError:
+ v = View(self.Cmask, accessrule=AR_mask)
+ mask_views.append(v)
+
+ # so now we should have the right views to this storages. Let them reformat()
+ # that will create the right sizes and the datalist access
+ self.diff.reformat()
+ self.mask.reformat()
+
+ # Second pass: copy the data
+ for dct in dp['iterable']:
+ parallel.barrier()
+ if dct['data'] is None:
+ continue
+ diff_data = dct['data']
+ idx = dct['index']
+
+ # FIXME: Find a more transparent way than this.
+ self.diff.data[self.diff.layermap.index(idx)][:] = diff_data
+ self.mask.data[self.mask.layermap.index(idx)][:] = dct.get('mask', np.ones_like(diff_data))
+
+ self.diff.nlayers = parallel.MPImax(self.diff.layermap) + 1
+ self.mask.nlayers = parallel.MPImax(self.mask.layermap) + 1
+
+ self.new_positions = positions
+ self.new_diff_views = diff_views
+ self.new_mask_views = mask_views
+ self.positions += positions
+ self.diff_views += diff_views
+ self.mask_views += mask_views
+
+ self._update_stats()
+
+ # Create new views on object, probe, and exit wave, and connect
+ # these through new pods.
+ new_pods, new_probe_ids, new_object_ids = self._create_pods()
+ logger.info('Process %d created %d new PODs, %d new probes and %d new objects.' % (
+ parallel.rank, len(new_pods), len(new_probe_ids), len(new_object_ids)), extra={'allprocesses': True})
+
+ # Adjust storages
+ self.ptycho.probe.reformat(True)
+ self.ptycho.obj.reformat(True)
+ self.ptycho.exit.reformat()
+
+ self._initialize_probe(new_probe_ids)
+ self._initialize_object(new_object_ids)
+ self._initialize_exit(new_pods)
+
+ return True
+
+ def _initialize_containers(self):
+ """
+ Initialize containers appropriate for the model. This
+ implementation works for 2d models, override if necessary.
+ """
+ if self.ptycho is None:
+ # Stand-alone use
+ self.Cdiff = Container(ptycho=self, ID='Cdiff', data_type='real')
+ self.Cmask = Container(ptycho=self, ID='Cmask', data_type='bool')
+ else:
+ # Use with a Ptycho instance
+ self.ptycho.probe = Container(ptycho=self.ptycho, ID='Cprobe', data_type='complex')
+ self.ptycho.obj = Container(ptycho=self.ptycho, ID='Cobj', data_type='complex')
+ self.ptycho.exit = Container(ptycho=self.ptycho, ID='Cexit', data_type='complex')
+ self.ptycho.diff = Container(ptycho=self.ptycho, ID='Cdiff', data_type='real')
+ self.ptycho.mask = Container(ptycho=self.ptycho, ID='Cmask', data_type='bool')
+ self.Cdiff = self.ptycho.diff
+ self.Cmask = self.ptycho.mask
+ self.containers_initialized = True
+
+ @staticmethod
+ def _initialize_exit(pods):
+ """
+ Initializes exit waves using the pods.
+ """
+ logger.info('\n' + headerline('Creating exit waves', 'l'))
+ for pod in pods:
+ if not pod.active:
+ continue
+ pod.exit = pod.probe * pod.object
+
+ def _update_stats(self):
+ """
+ (Re)compute the statistics for the data stored in the scan.
+ These statistics are:
+ * Itotal: The integrated power per frame
+ * max/min/mean_frame: pixel-by-pixel maximum, minimum and
+ average among all frames.
+ """
+ mask_views = self.mask_views
+ diff_views = self.diff_views
+
+ # Nothing to do if no view exist
+ if not self.diff: return
+
+ # Reinitialize containers
+ Itotal = []
+ max_frame = np.zeros(self.diff_views[0].shape)
+ min_frame = np.zeros_like(max_frame)
+ mean_frame = np.zeros_like(max_frame)
+ norm = np.zeros_like(max_frame)
+
+ for maview, diview in zip(mask_views, diff_views):
+ if not diview.active:
+ continue
+ dv = diview.data
+ m = maview.data
+ v = m * dv
+ Itotal.append(np.sum(v))
+ max_frame[max_frame < v] = v[max_frame < v]
+ min_frame[min_frame > v] = v[min_frame > v]
+ mean_frame += v
+ norm += m
+
+ parallel.allreduce(mean_frame)
+ parallel.allreduce(norm)
+ parallel.allreduce(max_frame, parallel.MPI.MAX)
+ parallel.allreduce(max_frame, parallel.MPI.MIN)
+ mean_frame /= (norm + (norm == 0))
+
+ self.diff.norm = norm
+ self.diff.max_power = parallel.MPImax(Itotal)
+ self.diff.tot_power = parallel.MPIsum(Itotal)
+ self.diff.pbound_stub = self.diff.max_power / mean_frame.shape[-1]**2
+ self.diff.mean = mean_frame
+ self.diff.max = max_frame
+ self.diff.min = min_frame
+
+ info = {'label': self.label, 'max': self.diff.max_power, 'tot': self.diff.tot_power, 'mean': mean_frame.sum()}
+ logger.info(
+ '\n--- Scan %(label)s photon report ---\nTotal photons : %(tot).2e \nAverage photons : %(mean).2e\nMaximum photons : %(max).2e\n' % info + '-' * 29)
+
+ def _create_pods(self):
+ raise NotImplementedError
+
+ def _initialize_geo(self, common):
+ raise NotImplementedError
+
+ def _initialize_probe(self, probe_ids):
+ raise NotImplementedError
+
+ def _initialize_object(self, object_ids):
+ raise NotImplementedError
+
+
+@defaults_tree.parse_doc('scanmodel.Vanilla')
+class Vanilla(BaseModel):
+ """
+ Dummy for testing, there must be more than one for validate to react
+ to invalid names.
+ """
+ pass
-NO_DATA_FLAG = 'No data'
@defaults_tree.parse_doc('scanmodel.Full')
-class ScanModel(object):
+class Full(Vanilla):
"""
Manage a single scan model (sharing, coherence, propagation, ...)
@@ -52,13 +401,6 @@ class ScanModel(object):
help =
doc =
- [tags]
- default = ['dummy']
- help = Comma seperated string tags describing the data input
- doc = [deprecated?]
- type = list
- userlevel = 2
-
[sharing]
default =
help = Scan sharing options
@@ -164,21 +506,9 @@ class ScanModel(object):
- ``None`` or ``'achromatic'``: no dispersion
- ``'linear'``: linear response model
- ``'irregular'``: no assumption
- **[not implemented]**
- type = str
- userlevel = 2
-
- [propagation]
- type = str
- default = farfield
- help = Propagation type
- doc = Either "farfield" or "nearfield"
- userlevel = 1
-
- [illumination]
- type = Param
- default =
- help = Container for probe initialization model
+ **[not implemented]**
+ type = str
+ userlevel = 2
[illumination.aperture]
type = Param
@@ -351,11 +681,6 @@ class ScanModel(object):
help = Path to a ``.ptyr`` compatible file
userlevel = 0
- [sample]
- type = Param
- default =
- help = Container for sample initialization model
-
[sample.model]
default = None
help = Type of initial object model
@@ -394,354 +719,131 @@ class ScanModel(object):
[sample.stxm]
default =
- help = STXM analysis parameters
- doc =
- type = Param
- userlevel = 1
-
- [sample.stxm.label]
- default = None
- help = Scan label of diffraction that is to be used for probe estimate
- doc = ``None``, own scan label is used
- type = str
- userlevel = 1
-
- [sample.process]
- default = None
- help = Model processing parameters
- doc = Can be ``None``, i.e. no processing
- type = Param
- userlevel =
-
- [sample.process.offset]
- default = (0,0)
- help = Offset between center of object array and scan pattern
- doc =
- type = tuple
- userlevel = 2
- lowlim = 0
-
- [sample.process.zoom]
- default = None
- help = Zoom value for object simulation.
- doc = If ``None``, leave the array untouched. Otherwise the modeled or loaded image will be
- resized using :py:func:`zoom`.
- type = tuple
- userlevel = 2
- lowlim = 0
-
- [sample.process.formula]
- default = None
- help = Chemical formula
- doc = A Formula compatible with a cxro database query,e.g. ``'Au'`` or ``'NaCl'`` or ``'H2O'``
- type = str
- userlevel = 2
-
- [sample.process.density]
- default = 1
- help = Density in [g/ccm]
- doc = Only used if `formula` is not None
- type = float
- userlevel = 2
-
- [sample.process.thickness]
- default = 1.00E-06
- help = Maximum thickness of sample
- doc = If ``None``, the absolute values of loaded source array will be used
- type = float
- userlevel = 2
-
- [sample.process.ref_index]
- default = 0.5+0.j
- help = Assigned refractive index
- doc = If ``None``, treat source array as projection of refractive index. If a refractive index
- is provided the array's absolute value will be used to scale the refractive index.
- type = complex
- userlevel = 2
- lowlim = 0
-
- [sample.process.smoothing]
- default = 2
- help = Smoothing scale
- doc = Smooth the projection with gaussian kernel of width given by `smoothing_mfs`
- type = int
- userlevel = 2
- lowlim = 0
-
- [sample.diversity]
- default =
- help = Probe mode(s) diversity parameters
- doc = Can be ``None`` i.e. no diversity
- type = Param
- userlevel =
-
- [sample.diversity.noise]
- default = None
- help = Noise in the generated modes of the illumination
- doc = Can be either:
- - ``None`` : no noise
- - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
- - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
- type = tuple
- userlevel = 1
-
- [sample.diversity.power]
- default = 0.1
- help = Power of modes relative to main mode (zero-layer)
- doc =
- type = tuple, float
- userlevel = 1
-
- [sample.diversity.shift]
- default = None
- help = Lateral shift of modes relative to main mode
- doc = **[not implemented]**
- type = float
- userlevel = 2
-
- """
-
- _PREFIX = MODEL_PREFIX
-
- def __init__(self, ptycho=None, pars=None, label=None):
- """
- Create ScanModel object.
-
- Parameters
- ----------
- pars : dict or Param
- Input parameter tree.
-
- ptycho : Ptycho instance
- Ptycho instance to which this scan belongs
-
- label : str
- Unique label
- """
- from .. import experiment
-
- # Update parameter structure
- # Load default parameter structure
- p = self.DEFAULT.copy(99)
- p.update(pars, in_place_depth=4)
- self.p = p
- self.label = label
- self.ptycho = ptycho
-
- # Create Associated PtyScan object
- self.ptyscan = experiment.makePtyScan(self.p.data)
-
- # Initialize instance attributes
- self.mask = None
- self.diff = None
- self.positions = []
- self.mask_views = []
- self.diff_views = []
- self.new_positions = None
- self.new_diff_views = None
- self.new_mask_views = None
-
- self.geometries = []
- self.shape = None
- self.psize = None
-
- # Object flags and constants
- self.containers_initialized = False
- self.data_available = True
- self.CType = CType
- self.FType = FType
- self.frames_per_call = 100000
-
- # Sharing dictionary that stores sharing behavior
- self.sharing = {'probe_ids': {}, 'object_ids': {}}
-
- # REDESIGN: this will be replaced
- # Initialize sharing rules for POD creations
- sharing_pars = u.Param({'model_type': 'basic',
- 'scan_per_probe': 1,
- 'scan_per_object': 1,
- 'npts': None})
- self.sharing_rules = model.parse_model(sharing_pars, self.sharing)
-
- def new_data(self):
- """
- Feed data from ptyscan object.
- :return: None if no data is available, True otherwise.
- """
-
- # Initialize if that has not been done yet
- if not self.ptyscan.is_initialized:
- self.ptyscan.initialize()
-
- # Get data
- dp = self.ptyscan.auto(self.frames_per_call)
-
- self.data_available = (dp != data.EOS)
- logger.debug(u.verbose.report(dp))
-
- if dp == data.WAIT or not self.data_available:
- return None
-
- label = self.label
- logger.info('Importing data from scan %s.' % label)
-
- # Prepare the scan geometry if not already done.
- if not self.geometries:
- self._initialize_geo(dp['common'])
-
- # Create containers if not already done
- if not self.containers_initialized:
- self._initialize_containers()
-
- # Generalized shape which works for 2d and 3d cases
- sh = (1,) + tuple(self.shape)
-
- # Storage generation if not already existing
- if self.diff is None:
- # This scan is brand new so we create storages for it
- self.diff = self.Cdiff.new_storage(shape=sh, psize=self.psize, padonly=True,
- layermap=None)
- old_diff_views = []
- old_diff_layers = []
- else:
- # ok storage exists already. Views most likely also. We store them so we can update their status later.
- old_diff_views = self.Cdiff.views_in_storage(self.diff, active_only=False)
- old_diff_layers = []
- for v in old_diff_views:
- old_diff_layers.append(v.layer)
-
- # Same for mask
- if self.mask is None:
- self.mask = self.Cmask.new_storage(shape=sh, psize=self.psize, padonly=True,
- layermap=None)
- old_mask_views = []
- old_mask_layers = []
- else:
- old_mask_views = self.Cmask.views_in_storage(self.mask, active_only=False)
- old_mask_layers = []
- for v in old_mask_views:
- old_mask_layers.append(v.layer)
-
- # Prepare for View generation
- AR_diff_base = DEFAULT_ACCESSRULE.copy()
- AR_diff_base.shape = self.shape
- AR_diff_base.coord = 0.0
- AR_diff_base.psize = self.psize
- AR_mask_base = AR_diff_base.copy()
- AR_diff_base.storageID = self.diff.ID
- AR_mask_base.storageID = self.mask.ID
-
- diff_views = []
- mask_views = []
- positions = []
-
- # First pass: create or update views and reformat corresponding storage
- for dct in dp['iterable']:
-
- index = dct['index']
- active = dct['data'] is not None
-
- pos = dct.get('position')
+ help = STXM analysis parameters
+ doc =
+ type = Param
+ userlevel = 1
- if pos is None:
- logger.warning('No position set to scan point %d of scan %s' % (index, label))
+ [sample.stxm.label]
+ default = None
+ help = Scan label of diffraction that is to be used for probe estimate
+ doc = ``None``, own scan label is used
+ type = str
+ userlevel = 1
- AR_diff = AR_diff_base
- AR_mask = AR_mask_base
- AR_diff.layer = index
- AR_mask.layer = index
- AR_diff.active = active
- AR_mask.active = active
+ [sample.process]
+ default = None
+ help = Model processing parameters
+ doc = Can be ``None``, i.e. no processing
+ type = Param
+ userlevel =
- # check here: is there already a view to this layer? Is it active?
- try:
- old_view = old_diff_views[old_diff_layers.index(index)]
- old_active = old_view.active
- old_view.active = active
+ [sample.process.offset]
+ default = (0,0)
+ help = Offset between center of object array and scan pattern
+ doc =
+ type = tuple
+ userlevel = 2
+ lowlim = 0
- logger.debug(
- 'Diff view with layer/index %s of scan %s exists. \nSetting view active state from %s to %s' % (
- index, label, old_active, active))
- except ValueError:
- v = View(self.Cdiff, accessrule=AR_diff)
- diff_views.append(v)
- logger.debug(
- 'Diff view with layer/index %s of scan %s does not exist. \nCreating view with ID %s and set active state to %s' % (
- index, label, v.ID, active))
- # append position also
- positions.append(pos)
+ [sample.process.zoom]
+ default = None
+ help = Zoom value for object simulation.
+ doc = If ``None``, leave the array untouched. Otherwise the modeled or loaded image will be
+ resized using :py:func:`zoom`.
+ type = tuple
+ userlevel = 2
+ lowlim = 0
- try:
- old_view = old_mask_views[old_mask_layers.index(index)]
- old_view.active = active
- except ValueError:
- v = View(self.Cmask, accessrule=AR_mask)
- mask_views.append(v)
+ [sample.process.formula]
+ default = None
+ help = Chemical formula
+ doc = A Formula compatible with a cxro database query,e.g. ``'Au'`` or ``'NaCl'`` or ``'H2O'``
+ type = str
+ userlevel = 2
- # so now we should have the right views to this storages. Let them reformat()
- # that will create the right sizes and the datalist access
- self.diff.reformat()
- self.mask.reformat()
+ [sample.process.density]
+ default = 1
+ help = Density in [g/ccm]
+ doc = Only used if `formula` is not None
+ type = float
+ userlevel = 2
- # Second pass: copy the data
- for dct in dp['iterable']:
- parallel.barrier()
- if dct['data'] is None:
- continue
- diff_data = dct['data']
- idx = dct['index']
+ [sample.process.thickness]
+ default = 1.00E-06
+ help = Maximum thickness of sample
+ doc = If ``None``, the absolute values of loaded source array will be used
+ type = float
+ userlevel = 2
- # FIXME: Find a more transparent way than this.
- self.diff.data[self.diff.layermap.index(idx)][:] = diff_data
- self.mask.data[self.mask.layermap.index(idx)][:] = dct.get('mask', np.ones_like(diff_data))
+ [sample.process.ref_index]
+ default = 0.5+0.j
+ help = Assigned refractive index
+ doc = If ``None``, treat source array as projection of refractive index. If a refractive index
+ is provided the array's absolute value will be used to scale the refractive index.
+ type = complex
+ userlevel = 2
+ lowlim = 0
- self.diff.nlayers = parallel.MPImax(self.diff.layermap) + 1
- self.mask.nlayers = parallel.MPImax(self.mask.layermap) + 1
+ [sample.process.smoothing]
+ default = 2
+ help = Smoothing scale
+ doc = Smooth the projection with gaussian kernel of width given by `smoothing_mfs`
+ type = int
+ userlevel = 2
+ lowlim = 0
- self.new_positions = positions
- self.new_diff_views = diff_views
- self.new_mask_views = mask_views
- self.positions += positions
- self.diff_views += diff_views
- self.mask_views += mask_views
+ [sample.diversity]
+ default =
+ help = Probe mode(s) diversity parameters
+ doc = Can be ``None`` i.e. no diversity
+ type = Param
+ userlevel =
- self._update_stats()
+ [sample.diversity.noise]
+ default = None
+ help = Noise in the generated modes of the illumination
+ doc = Can be either:
+ - ``None`` : no noise
+ - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
+ - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
+ type = tuple
+ userlevel = 1
- # Create new views on object, probe, and exit wave, and connect
- # these through new pods.
- new_pods, new_probe_ids, new_object_ids = self._create_pods()
- logger.info('Process %d created %d new PODs, %d new probes and %d new objects.' % (
- parallel.rank, len(new_pods), len(new_probe_ids), len(new_object_ids)), extra={'allprocesses': True})
+ [sample.diversity.power]
+ default = 0.1
+ help = Power of modes relative to main mode (zero-layer)
+ doc =
+ type = tuple, float
+ userlevel = 1
- # Adjust storages
- self.ptycho.probe.reformat(True)
- self.ptycho.obj.reformat(True)
- self.ptycho.exit.reformat()
+ [sample.diversity.shift]
+ default = None
+ help = Lateral shift of modes relative to main mode
+ doc = **[not implemented]**
+ type = float
+ userlevel = 2
- self._initialize_probe(new_probe_ids)
- self._initialize_object(new_object_ids)
- self._initialize_exit(new_pods)
+ """
- return True
+ _PREFIX = MODEL_PREFIX
- def _initialize_containers(self):
+ def __init__(self, ptycho=None, pars=None, label=None):
"""
- Initialize containers appropriate for this model.
+ Override constructor to add sharing functionality.
"""
- if self.ptycho is None:
- # Stand-alone use
- self.Cdiff = Container(ptycho=self, ID='Cdiff', data_type='real')
- self.Cmask = Container(ptycho=self, ID='Cmask', data_type='bool')
- else:
- # Use with a Ptycho instance
- self.ptycho.probe = Container(ptycho=self.ptycho, ID='Cprobe', data_type='complex')
- self.ptycho.obj = Container(ptycho=self.ptycho, ID='Cobj', data_type='complex')
- self.ptycho.exit = Container(ptycho=self.ptycho, ID='Cexit', data_type='complex')
- self.ptycho.diff = Container(ptycho=self.ptycho, ID='Cdiff', data_type='real')
- self.ptycho.mask = Container(ptycho=self.ptycho, ID='Cmask', data_type='bool')
- self.Cdiff = self.ptycho.diff
- self.Cmask = self.ptycho.mask
- self.containers_initialized = True
+ super(Full, self).__init__(ptycho, pars, label)
+
+ # Sharing dictionary that stores sharing behavior
+ self.sharing = {'probe_ids': {}, 'object_ids': {}}
+
+ # REDESIGN: this will be replaced
+ # Initialize sharing rules for POD creations
+ sharing_pars = u.Param({'model_type': 'basic',
+ 'scan_per_probe': 1,
+ 'scan_per_object': 1,
+ 'npts': None})
+ self.sharing_rules = model.parse_model(sharing_pars, self.sharing)
def _create_pods(self):
"""
@@ -1005,77 +1107,6 @@ def _initialize_object(self, object_ids):
s.model_initialized = True
- @staticmethod
- def _initialize_exit(pods):
- """
- Initializes exit waves using the pods.
- """
- logger.info('\n' + headerline('Creating exit waves', 'l'))
- for pod in pods:
- if not pod.active:
- continue
- pod.exit = pod.probe * pod.object
-
- def _update_stats(self):
- """
- (Re)compute the statistics for the data stored in the scan.
- These statistics are:
- * Itotal: The integrated power per frame
- * max/min/mean_frame: pixel-by-pixel maximum, minimum and
- average among all frames.
- """
- mask_views = self.mask_views
- diff_views = self.diff_views
-
- # Nothing to do if no view exist
- if not self.diff: return
-
- # Reinitialize containers
- Itotal = []
- max_frame = np.zeros(self.diff_views[0].shape)
- min_frame = np.zeros_like(max_frame)
- mean_frame = np.zeros_like(max_frame)
- norm = np.zeros_like(max_frame)
-
- for maview, diview in zip(mask_views, diff_views):
- if not diview.active:
- continue
- dv = diview.data
- m = maview.data
- v = m * dv
- Itotal.append(np.sum(v))
- max_frame[max_frame < v] = v[max_frame < v]
- min_frame[min_frame > v] = v[min_frame > v]
- mean_frame += v
- norm += m
-
- parallel.allreduce(mean_frame)
- parallel.allreduce(norm)
- parallel.allreduce(max_frame, parallel.MPI.MAX)
- parallel.allreduce(max_frame, parallel.MPI.MIN)
- mean_frame /= (norm + (norm == 0))
-
- self.diff.norm = norm
- self.diff.max_power = parallel.MPImax(Itotal)
- self.diff.tot_power = parallel.MPIsum(Itotal)
- self.diff.pbound_stub = self.diff.max_power / mean_frame.shape[-1]**2
- self.diff.mean = mean_frame
- self.diff.max = max_frame
- self.diff.min = min_frame
-
- info = {'label': self.label, 'max': self.diff.max_power, 'tot': self.diff.tot_power, 'mean': mean_frame.sum()}
- logger.info(
- '\n--- Scan %(label)s photon report ---\nTotal photons : %(tot).2e \nAverage photons : %(mean).2e\nMaximum photons : %(max).2e\n' % info + '-' * 29)
-
-
-@defaults_tree.parse_doc('scanmodel.Vanilla')
-class ScanModel2(object):
- """
- Dummy for testing, there must be more than one for validate to react
- to invalid names.
- """
- pass
-
class ModelManager(object):
"""
@@ -1101,7 +1132,7 @@ def __init__(self, ptycho, pars):
# Create scan model objects
self.scans = OrderedDict()
for label, scan_pars in pars.iteritems():
- self.scans[label] = ScanModel(ptycho=self.ptycho, pars=scan_pars, label=label)
+ self.scans[label] = Full(ptycho=self.ptycho, pars=scan_pars, label=label)
def _to_dict(self):
# Delete the model class. We do not really need to store it.
diff --git a/ptypy/simulations/simscan.py b/ptypy/simulations/simscan.py
index dce27e421..66d875863 100644
--- a/ptypy/simulations/simscan.py
+++ b/ptypy/simulations/simscan.py
@@ -16,14 +16,14 @@
from detector import Detector, conv
from ptypy.core.data import PtyScan
from ptypy.core.ptycho import Ptycho
- from ptypy.core.manager import ScanModel
+ from ptypy.core.manager import Full as ScanModel
scan_DEFAULT = ScanModel.DEFAULT
else:
from .. import utils as u
from detector import Detector, conv
from ..core.data import PtyScan
from ..core.ptycho import Ptycho
- from ..core.manager import ScanModel
+ from ..core.manager import Full as ScanModel
scan_DEFAULT = ScanModel.DEFAULT
logger = u.verbose.logger
From 2328bdf2e832dd34682d9485c52400a5423698ed Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 31 Oct 2017 10:55:50 +0100
Subject: [PATCH 176/363] Models now in charge of the whole scan tree
---
ptypy/core/manager.py | 19 ++++++++++++++++---
ptypy/core/ptycho.py | 22 ++--------------------
2 files changed, 18 insertions(+), 23 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 4bc3f1a99..a0d353c17 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -38,7 +38,7 @@
__all__ = ['ModelManager', 'BaseModel', 'Full', 'Vanilla']
-@defaults_tree.parse_doc('scanmodel.BaseModel')
+@defaults_tree.parse_doc('scan.BaseModel')
class BaseModel(object):
"""
Abstract base class for models. Override at least these methods:
@@ -63,6 +63,17 @@ class BaseModel(object):
doc = Either "farfield" or "nearfield"
userlevel = 1
+ [data]
+ default =
+ type = @scandata.*
+ help = Link to container for data preparation
+ doc =
+
+ [data.name]
+ default =
+ type = str
+ help = Name of the PtyScan subclass to use
+
[illumination]
type = Param
default =
@@ -379,7 +390,7 @@ def _initialize_object(self, object_ids):
raise NotImplementedError
-@defaults_tree.parse_doc('scanmodel.Vanilla')
+@defaults_tree.parse_doc('scan.Vanilla')
class Vanilla(BaseModel):
"""
Dummy for testing, there must be more than one for validate to react
@@ -388,7 +399,7 @@ class Vanilla(BaseModel):
pass
-@defaults_tree.parse_doc('scanmodel.Full')
+@defaults_tree.parse_doc('scan.Full')
class Full(Vanilla):
"""
Manage a single scan model (sharing, coherence, propagation, ...)
@@ -834,6 +845,8 @@ def __init__(self, ptycho=None, pars=None, label=None):
"""
super(Full, self).__init__(ptycho, pars, label)
+ defaults_tree['scan.Full'].validate(self.p)
+
# Sharing dictionary that stores sharing behavior
self.sharing = {'probe_ids': {}, 'object_ids': {}}
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index 527ace9d1..a0b6ba488 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -215,8 +215,8 @@ class Ptycho(Base):
doc =
[scans.*]
- default = @scan
- type = @scan
+ default = @scan.Vanilla
+ type = @scan.*
help = Wildcard entry for list of scans to load. See :py:data:`scan`
[engines]
@@ -231,24 +231,6 @@ class Ptycho(Base):
help = Wildcard entry for list of engines to run. See :py:data:`engine`
doc = The value of engines.*.name is used to choose among the available engines.
- [scan]
- default = None
- type = Param
- help = Template for scan.* instances
- doc =
-
- [scan.data]
- default =
- type = @scandata.*
- help = Link to container for data preparation
- doc =
-
- [scan.model]
- default = @scanmodel.Full
- type = @scanmodel.*
- help = Link to container for data interpretation model
- doc =
-
"""
_PREFIX = PTYCHO_PREFIX
From 36918785714627e8312074d51a0c7070b837ca16 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 31 Oct 2017 14:55:03 +0100
Subject: [PATCH 177/363] Introduced a simple Vanilla ScanModel as well as
ScanModel subclass choice
---
ptypy/core/manager.py | 217 ++++++++++++++++++++++++++----
ptypy/core/ptycho.py | 2 +-
ptypy/experiment/__init__.py | 14 +-
ptypy/utils/misc.py | 16 ++-
templates/minimal_prep_and_run.py | 1 +
5 files changed, 210 insertions(+), 40 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index a0d353c17..d43499ff6 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -5,9 +5,8 @@
The main task of this module is to prepare the data structure for
reconstruction, taking a data feed and connecting individual diffraction
measurements to the other containers. The way this connection is done
-is defined by the user through a model definition. The connections are
-described by the POD objects. This module also takes care of initializing
-containers according to user-defined rules.
+is defined by ScanModel and its subclasses. The connections are
+described by the POD objects.
This file is part of the PTYPY package.
@@ -35,11 +34,11 @@
FType = np.float64
CType = np.complex128
-__all__ = ['ModelManager', 'BaseModel', 'Full', 'Vanilla']
+__all__ = ['ModelManager', 'ScanModel', 'Full', 'Vanilla']
-@defaults_tree.parse_doc('scan.BaseModel')
-class BaseModel(object):
+@defaults_tree.parse_doc('scan.ScanModel')
+class ScanModel(object):
"""
Abstract base class for models. Override at least these methods:
_create_pods(self)
@@ -87,7 +86,7 @@ class BaseModel(object):
"""
def __init__(self, ptycho=None, pars=None, label=None):
"""
- Create ScanModel object.
+ Create scan model object.
Parameters
----------
@@ -378,9 +377,25 @@ def _update_stats(self):
'\n--- Scan %(label)s photon report ---\nTotal photons : %(tot).2e \nAverage photons : %(mean).2e\nMaximum photons : %(max).2e\n' % info + '-' * 29)
def _create_pods(self):
+ """
+ Create all new pods as specified in the new_positions,
+ new_diff_views and new_mask_views object attributes. Also create
+ all necessary views on object, probe, and exit wave.
+
+ Return the list of new pods, and dicts of new probe and object
+ ids (to allow for initialization).
+ """
raise NotImplementedError
def _initialize_geo(self, common):
+ """
+ Initialize the geometry/geometries based on input data package
+ Parameters
+ ----------
+ common: dict
+ metadata part of the data package passed into new_data.
+
+ """
raise NotImplementedError
def _initialize_probe(self, probe_ids):
@@ -391,16 +406,163 @@ def _initialize_object(self, object_ids):
@defaults_tree.parse_doc('scan.Vanilla')
-class Vanilla(BaseModel):
+class Vanilla(ScanModel):
"""
Dummy for testing, there must be more than one for validate to react
to invalid names.
+
+ Defaults:
+
+ [name]
+ default = Vanilla
+ type = str
+ help =
+
+ [illumination.size]
+ default = None
+ type = float
+ help = Initial probe size
+ doc = The probe is initialized as a flat circle.
+
+ [sample.fill]
+ default = 1
+ type = float, complex
+ help = Initial sample value
+ doc = The sample is initialized with this value everywhere.
+
"""
- pass
+
+ def _create_pods(self):
+ """
+ Create all new pods as specified in the new_positions,
+ new_diff_views and new_mask_views object attributes.
+ """
+ logger.info('\n' + headerline('Creating PODS', 'l'))
+ new_pods = []
+ new_probe_ids = {}
+ new_object_ids = {}
+
+ # We can just decide what the storage ID:s will be
+ ID ='S00G00'
+
+ # We need to return info on what storages are created
+ if not ID in self.ptycho.probe.storages.keys():
+ new_probe_ids[ID] = True
+ if not ID in self.ptycho.obj.storages.keys():
+ new_object_ids[ID] = True
+
+ geometry = self.geometries[0]
+
+ # Loop through diffraction patterns
+ for i in range(len(self.new_diff_views)):
+ dv, mv = self.new_diff_views.pop(0), self.new_mask_views.pop(0)
+
+
+
+ # Create views
+ pv = View(container=self.ptycho.probe,
+ accessrule={'shape': geometry.shape,
+ 'psize': geometry.resolution,
+ 'coord': u.expect2(0.0),
+ 'storageID': ID,
+ 'layer': 0,
+ 'active': True})
+
+ ov = View(container=self.ptycho.obj,
+ accessrule={'shape': geometry.shape,
+ 'psize': geometry.resolution,
+ 'coord': self.new_positions[i],
+ 'storageID': ID,
+ 'layer': 0,
+ 'active': True})
+
+ ev = View(container=self.ptycho.exit,
+ accessrule={'shape': geometry.shape,
+ 'psize': geometry.resolution,
+ 'coord': u.expect2(0.0),
+ 'storageID': ID,
+ 'layer': dv.layer,
+ 'active': dv.active})
+
+ views = {'probe': pv,
+ 'obj': ov,
+ 'diff': dv,
+ 'mask': mv,
+ 'exit': ev}
+
+ pod = POD(ptycho=self.ptycho,
+ ID=None,
+ views=views,
+ geometry=geometry)
+ pod.probe_weight = 1
+ pod.object_weight = 1
+
+ new_pods.append(pod)
+
+ return new_pods, new_probe_ids, new_object_ids
+
+ def _initialize_geo(self, common):
+ """
+ Initialize the geometry based on input data package
+ Parameters.
+ """
+
+ # Collect geometry parameters
+ get_keys = ['distance', 'center', 'energy', 'psize', 'shape']
+ geo_pars = u.Param({key: common[key] for key in get_keys})
+ geo_pars.propagation = self.p.propagation
+
+ # make a Geo instance and fix resolution
+ g = geometry.Geo(owner=self.ptycho, pars=geo_pars)
+ g.p.resolution_is_fix = True
+
+ # save the geometry
+ self.geometries = [g]
+
+ # Store frame shape
+ self.shape = np.array(common.get('shape', g.shape))
+ self.psize = g.psize
+
+ return
+
+ def _initialize_probe(self, probe_ids):
+ """
+ Initialize the probe storage referred to by probe_ids.keys()[0]
+ """
+ logger.info('\n'+headerline('Probe initialization', 'l'))
+
+ # pick storage from container, there's only one probe
+ pid = probe_ids.keys()[0]
+ s = self.ptycho.probe.S.get(pid)
+ logger.info('Initializing probe storage %s' % pid)
+
+ # use the illumination module as a utility
+ logger.info('Initializing as circle of size ' + str(self.p.illumination.size))
+ illu_pars = u.Param({'aperture':
+ {'form': 'circ', 'size': self.p.illumination.size}})
+ illumination.init_storage(s, illu_pars)
+
+ s.model_initialized = True
+
+ def _initialize_object(self, object_ids):
+ """
+ Initializes the probe storage referred to by object_ids.keys()[0]
+ """
+ logger.info('\n'+headerline('Object initialization', 'l'))
+
+ # pick storage from container, there's only one object
+ oid = object_ids.keys()[0]
+ s = self.ptycho.obj.S.get(oid)
+ logger.info('Initializing probe storage %s' % oid)
+
+ # simple fill, no need to use the sample module for this
+ s.fill(self.p.sample.fill)
+
+ s.model_initialized = True
@defaults_tree.parse_doc('scan.Full')
-class Full(Vanilla):
+class Full(ScanModel):
"""
Manage a single scan model (sharing, coherence, propagation, ...)
@@ -861,11 +1023,7 @@ def __init__(self, ptycho=None, pars=None, label=None):
def _create_pods(self):
"""
Create all new pods as specified in the new_positions,
- new_diff_views and new_mask_views object attributes. Also create
- all necessary views on object, probe, and exit wave.
-
- Return the list of new pods, probe and object ids (to allow for
- initialization).
+ new_diff_views and new_mask_views object attributes.
"""
logger.info('\n' + headerline('Creating PODS', 'l'))
new_pods = []
@@ -1009,12 +1167,7 @@ def _create_pods(self):
def _initialize_geo(self, common):
"""
- Initialize the geometry/geometries based on input data package
- Parameters
- ----------
- common: dict
- metadata part of the data package passed into new_data.
-
+ Initialize the geometry/geometries.
"""
# Extract necessary info from the received data package
get_keys = ['distance', 'center', 'energy', 'psize', 'shape']
@@ -1044,7 +1197,11 @@ def _initialize_geo(self, common):
def _initialize_probe(self, probe_ids):
"""
- Initialize the probe storages referred to by the probe_ids
+ Initialize the probe storages referred to by the probe_ids.
+
+ For this case the parameter interface of the illumination module
+ matches the illumination parameters of this class, so they are
+ just fed in directly.
"""
logger.info('\n'+headerline('Probe initialization', 'l'))
@@ -1124,8 +1281,7 @@ def _initialize_object(self, object_ids):
class ModelManager(object):
"""
Thin wrapper class which now just interfaces Ptycho with ScanModel.
- This should probably all be done directly in Ptycho and would take
- like 8 lines of code.
+ This should probably all be done directly in Ptycho.
"""
def __init__(self, ptycho, pars):
@@ -1145,7 +1301,18 @@ def __init__(self, ptycho, pars):
# Create scan model objects
self.scans = OrderedDict()
for label, scan_pars in pars.iteritems():
- self.scans[label] = Full(ptycho=self.ptycho, pars=scan_pars, label=label)
+ # this is not so pretty...
+ if not 'name' in scan_pars:
+ scan_pars.name = Full.DEFAULT.name
+
+ # find out which scan model class to instantiate
+ if scan_pars.name in u.all_subclasses(ScanModel, names=True):
+ cls = eval(scan_pars.name)
+ else:
+ raise RuntimeError('Could not manage model %s' % scan_pars.name)
+
+ # instantiate!
+ self.scans[label] = cls(ptycho=self.ptycho, pars=scan_pars, label=label)
def _to_dict(self):
# Delete the model class. We do not really need to store it.
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index a0b6ba488..592d01cf5 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -215,7 +215,7 @@ class Ptycho(Base):
doc =
[scans.*]
- default = @scan.Vanilla
+ default =
type = @scan.*
help = Wildcard entry for list of scans to load. See :py:data:`scan`
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 4c3fcff16..fee5baaf8 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -37,19 +37,9 @@
from ptypy.core.data import PtydScan, MoonFlowerScan, PtyScan
else:
from ..utils.verbose import logger
+ from .. import utils as u
from ..core.data import PtydScan, MoonFlowerScan, PtyScan
-def all_subclasses(cls, names=False):
- """
- Helper function for finding all subclasses of a base class.
- """
- subs = cls.__subclasses__() + [g for s in cls.__subclasses__()
- for g in all_subclasses(s)]
- if names:
- return [c.__name__ for c in subs]
- else:
- return subs
-
def makePtyScan(pars, scanmodel=None):
"""
Factory for PtyScan object. Return an instance of the appropriate PtyScan subclass based on the
@@ -67,7 +57,7 @@ def makePtyScan(pars, scanmodel=None):
# Extract information on the type of object to build
name = pars.name
- if name in all_subclasses(PtyScan, names=True):
+ if name in u.all_subclasses(PtyScan, names=True):
ps_class = eval(name)
logger.info('Scan will be prepared with the PtyScan subclass "%s"' % name)
ps_instance = ps_class(pars)
diff --git a/ptypy/utils/misc.py b/ptypy/utils/misc.py
index a854c5b00..8c8e43b26 100644
--- a/ptypy/utils/misc.py
+++ b/ptypy/utils/misc.py
@@ -15,9 +15,21 @@
__all__ = ['str2int','str2range',\
'complex_overload','expect2','expect3',\
- 'keV2m','keV2nm','nm2keV', 'clean_path','unique_path','Table']
-
+ 'keV2m','keV2nm','nm2keV', 'clean_path','unique_path','Table',\
+ 'all_subclasses']
+def all_subclasses(cls, names=False):
+ """
+ Helper function for finding all subclasses of a base class.
+ If names is True, returns the names of the classes rather than
+ their object handles.
+ """
+ subs = cls.__subclasses__() + [g for s in cls.__subclasses__()
+ for g in all_subclasses(s)]
+ if names:
+ return [c.__name__ for c in subs]
+ else:
+ return subs
class Table(object):
"""
diff --git a/templates/minimal_prep_and_run.py b/templates/minimal_prep_and_run.py
index 07ba95d97..cb470611d 100644
--- a/templates/minimal_prep_and_run.py
+++ b/templates/minimal_prep_and_run.py
@@ -19,6 +19,7 @@
# max 100 frames (128x128px) of diffraction data
p.scans = u.Param()
p.scans.MF = u.Param()
+#p.scans.MF.name = 'Vanilla' # now you can specify which ScanModel subclass to use, default is 'Full'
p.scans.MF.data= u.Param()
p.scans.MF.data.name = 'MoonFlowerScan'
p.scans.MF.data.shape = 128
From 23f6120938352799d46fc2f9e6471eb95c3c1156 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 31 Oct 2017 15:41:53 +0100
Subject: [PATCH 178/363] Bug fix and slight cleanup in descriptor
---
ptypy/utils/descriptor.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/ptypy/utils/descriptor.py b/ptypy/utils/descriptor.py
index 2581f9bc3..d01dccd16 100644
--- a/ptypy/utils/descriptor.py
+++ b/ptypy/utils/descriptor.py
@@ -824,7 +824,7 @@ def _walk(self, depth=0, pars=None, ignore_symlinks=False, ignore_wildcards=Fals
for cname, c in children.items():
new_path = '.'.join([path, cname]) if path else cname
if pars:
- if cname not in pars:
+ if cname not in pars or pars[cname] is None:
yield {'d': c, 'path': path, 'status': 'nopar', 'info': cname}
else:
for x in c._walk(depth=depth-1, pars=pars[cname], ignore_symlinks=ignore_symlinks,
@@ -861,14 +861,14 @@ def check(self, pars, depth=99):
out[path]['type'] = CODES.PASS
if any([i in d._limtypes for i in d.type]):
lowlim, uplim = d.limits
- if lowlim is None or pars[res['path']] is None:
+ if lowlim is None or pars[path] is None:
out[path]['lowlim'] = CODES.PASS
else:
- out[path]['lowlim'] = CODES.PASS if (pars[res['path']] >= lowlim) else CODES.FAIL
- if uplim is None or pars[res['path']] is None:
+ out[path]['lowlim'] = CODES.PASS if (pars[path] >= lowlim) else CODES.FAIL
+ if uplim is None or pars[path] is None:
out[path]['uplim'] = CODES.PASS
else:
- out[path]['uplim'] = CODES.PASS if (pars[res['path']] <= uplim) else CODES.FAIL
+ out[path]['uplim'] = CODES.PASS if (pars[path] <= uplim) else CODES.FAIL
elif res['status'] == 'wrongtype':
# Wrong type
out[path]['type'] = CODES.INVALID
From 57e9c89d173b8f216fa31a029809d4b17300aff3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 31 Oct 2017 16:28:25 +0100
Subject: [PATCH 179/363] Fixed bug in descriptor._walk, it was missing bad
params
---
ptypy/utils/descriptor.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/ptypy/utils/descriptor.py b/ptypy/utils/descriptor.py
index d01dccd16..d3fe0a7c8 100644
--- a/ptypy/utils/descriptor.py
+++ b/ptypy/utils/descriptor.py
@@ -853,7 +853,8 @@ def check(self, pars, depth=99):
out = OrderedDict()
for res in self._walk(depth=depth, pars=pars):
path = res['path']
- out[path] = {}
+ if not path in out.keys():
+ out[path] = {}
# Switch through all possible statuses
if res['status'] == 'ok':
# Check limits
From e075446670ea6773e193fc1b4bd298d160a7fba6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 31 Oct 2017 16:41:55 +0100
Subject: [PATCH 180/363] Parameter validation for Ptycho
---
ptypy/core/manager.py | 7 -------
ptypy/core/ptycho.py | 3 +++
templates/minimal_prep_and_run.py | 4 +++-
3 files changed, 6 insertions(+), 8 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index d43499ff6..cdbb3b23a 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -1007,8 +1007,6 @@ def __init__(self, ptycho=None, pars=None, label=None):
"""
super(Full, self).__init__(ptycho, pars, label)
- defaults_tree['scan.Full'].validate(self.p)
-
# Sharing dictionary that stores sharing behavior
self.sharing = {'probe_ids': {}, 'object_ids': {}}
@@ -1301,16 +1299,11 @@ def __init__(self, ptycho, pars):
# Create scan model objects
self.scans = OrderedDict()
for label, scan_pars in pars.iteritems():
- # this is not so pretty...
- if not 'name' in scan_pars:
- scan_pars.name = Full.DEFAULT.name
-
# find out which scan model class to instantiate
if scan_pars.name in u.all_subclasses(ScanModel, names=True):
cls = eval(scan_pars.name)
else:
raise RuntimeError('Could not manage model %s' % scan_pars.name)
-
# instantiate!
self.scans[label] = cls(ptycho=self.ptycho, pars=scan_pars, label=label)
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index 592d01cf5..4737102a0 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -274,6 +274,9 @@ def __init__(self, pars=None, level=2, **kwargs):
# That may be a little dangerous
self.p.update(kwargs)
+ # Validate the incoming parameters
+ defaults_tree.validate(self.p)
+
# Instance attributes
# Structures
diff --git a/templates/minimal_prep_and_run.py b/templates/minimal_prep_and_run.py
index cb470611d..60f67b3a6 100644
--- a/templates/minimal_prep_and_run.py
+++ b/templates/minimal_prep_and_run.py
@@ -19,7 +19,9 @@
# max 100 frames (128x128px) of diffraction data
p.scans = u.Param()
p.scans.MF = u.Param()
-#p.scans.MF.name = 'Vanilla' # now you can specify which ScanModel subclass to use, default is 'Full'
+# now you have to specify which ScanModel to use with scans.XX.name,
+# just as you have to give 'name' for engines and PtyScan subclasses.
+p.scans.MF.name = 'Full' # or 'Vanilla'
p.scans.MF.data= u.Param()
p.scans.MF.data.name = 'MoonFlowerScan'
p.scans.MF.data.shape = 128
From c065533cf6b41eb6da7c5fd1f4c5eb5480aa0d87 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 31 Oct 2017 17:03:34 +0100
Subject: [PATCH 181/363] Added slightly more pedagogical output on
descriptor.validate()
---
ptypy/utils/descriptor.py | 12 ++++++++++--
1 file changed, 10 insertions(+), 2 deletions(-)
diff --git a/ptypy/utils/descriptor.py b/ptypy/utils/descriptor.py
index d3fe0a7c8..c9cc6c84f 100644
--- a/ptypy/utils/descriptor.py
+++ b/ptypy/utils/descriptor.py
@@ -919,12 +919,20 @@ def validate(self, pars, raisecodes=(CODES.FAIL, CODES.INVALID)):
d = self.check(pars)
do_raise = False
+ raise_reasons = []
for ep, v in d.items():
for tocheck, outcome in v.items():
logger.log(_logging_levels[CODE_LABEL[outcome]], '%-50s %-20s %7s' % (ep, tocheck, CODE_LABEL[outcome]))
- do_raise |= (outcome in raisecodes)
+ if outcome in raisecodes:
+ do_raise = True
+ reason = str(ep)
+ if tocheck == 'symlink':
+ reason += ' - make sure to specify the .name field'
+ else:
+ reason += ' - %s' % tocheck
+ raise_reasons.append(reason)
if do_raise:
- raise RuntimeError('Parameter validation failed.')
+ raise RuntimeError('Parameter validation failed:\n ' + '\n '.join(raise_reasons))
def sanity_check(self, depth=10):
"""
From 39054bd241ec83e546cc80744eb000641cc2882a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 6 Nov 2017 11:17:42 +0100
Subject: [PATCH 182/363] Engines now check that each POD's model is supported
---
ptypy/core/classes.py | 13 +++++++------
ptypy/core/manager.py | 4 ++++
ptypy/engines/DM.py | 3 +++
ptypy/engines/DM_simple.py | 3 +++
ptypy/engines/ML.py | 3 +++
ptypy/engines/base.py | 8 ++++++++
ptypy/engines/dummy.py | 3 +++
ptypy/engines/ePIE.py | 3 +++
8 files changed, 34 insertions(+), 6 deletions(-)
diff --git a/ptypy/core/classes.py b/ptypy/core/classes.py
index 59fd504e4..ba3a5f2c7 100644
--- a/ptypy/core/classes.py
+++ b/ptypy/core/classes.py
@@ -1907,14 +1907,18 @@ class POD(Base):
_PREFIX = POD_PREFIX
- def __init__(self, ptycho=None, ID=None, views=None, geometry=None,
- **kwargs):
+ def __init__(self, ptycho=None, model=None, ID=None, views=None,
+ geometry=None, **kwargs):
"""
Parameters
----------
ptycho : Ptycho
The instance of Ptycho associated with this pod.
+ model : ScanModel
+ The instance of ScanModel (or it subclasses) which describes
+ this pod.
+
ID : str or int
The pod ID, If None it is managed by the ptycho.
@@ -1926,10 +1930,7 @@ def __init__(self, ptycho=None, ID=None, views=None, geometry=None,
"""
super(POD, self).__init__(ptycho, ID, False)
- # if len(kwargs) > 0:
- # self._initialize(**kwargs)
-
- # def _initialize(self, views=None, geometry=None): #,meta=None):
+ self.model = model
# other defaults:
self.is_empty = False
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index cdbb3b23a..3db8a8578 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -280,6 +280,10 @@ def new_data(self):
# Create new views on object, probe, and exit wave, and connect
# these through new pods.
new_pods, new_probe_ids, new_object_ids = self._create_pods()
+ for pod_ in new_pods:
+ if pod_.model is not None:
+ continue
+ pod_.model = self
logger.info('Process %d created %d new PODs, %d new probes and %d new objects.' % (
parallel.rank, len(new_pods), len(new_probe_ids), len(new_object_ids)), extra={'allprocesses': True})
diff --git a/ptypy/engines/DM.py b/ptypy/engines/DM.py
index b9770e637..ab95711a6 100644
--- a/ptypy/engines/DM.py
+++ b/ptypy/engines/DM.py
@@ -15,6 +15,7 @@
from utils import basic_fourier_update
from . import BaseEngine
from ..utils.descriptor import defaults_tree
+from ..core.manager import Full, Vanilla
__all__ = ['DM']
@@ -112,6 +113,8 @@ class DM(BaseEngine):
"""
+ SUPPORTED_MODELS = [Full, Vanilla]
+
def __init__(self, ptycho_parent, pars=None):
"""
Difference map reconstruction engine.
diff --git a/ptypy/engines/DM_simple.py b/ptypy/engines/DM_simple.py
index 7a603cdc2..b730884bb 100644
--- a/ptypy/engines/DM_simple.py
+++ b/ptypy/engines/DM_simple.py
@@ -15,6 +15,7 @@
import numpy as np
from ..utils import parallel
from ..utils.descriptor import defaults_tree
+from ..core.manager import Full, Vanilla
__all__ = ['DM_simple']
@@ -57,6 +58,8 @@ class DM_simple(BaseEngine):
"""
+ SUPPORTED_MODELS = [Full, Vanilla]
+
def __init__(self, ptycho, pars=None):
"""
Simplest possible Difference map reconstruction engine.
diff --git a/ptypy/engines/ML.py b/ptypy/engines/ML.py
index 0d4132cc4..5c06e7405 100644
--- a/ptypy/engines/ML.py
+++ b/ptypy/engines/ML.py
@@ -18,6 +18,7 @@
from utils import Cnorm2, Cdot
from . import BaseEngine
from ..utils.descriptor import defaults_tree
+from ..core.manager import Full, Vanilla
__all__ = ['ML']
@@ -97,6 +98,8 @@ class ML(BaseEngine):
"""
+ SUPPORTED_MODELS = [Full, Vanilla]
+
def __init__(self, ptycho_parent, pars=None):
"""
Maximum likelihood reconstruction engine.
diff --git a/ptypy/engines/base.py b/ptypy/engines/base.py
index 22587370c..4f572d057 100644
--- a/ptypy/engines/base.py
+++ b/ptypy/engines/base.py
@@ -64,6 +64,9 @@ class BaseEngine(object):
"""
+ # Define with which models this engine can work.
+ COMPATIBLE_MODELS = []
+
def __init__(self, ptycho, pars=None):
"""
Base reconstruction engine.
@@ -149,6 +152,11 @@ def prepare(self):
support = (np.pi * (xx**2 + yy**2) < supp * sh[1] * sh[2])
self.probe_support[name] = support
+ # Make sure all the pods are supported
+ for label_, pod_ in self.pods.iteritems():
+ if not pod_.model.__class__ in self.SUPPORTED_MODELS:
+ raise Exception('Model %s not supported by engine' % pod_.model.__class__)
+
# Call engine specific preparation
self.engine_prepare()
diff --git a/ptypy/engines/dummy.py b/ptypy/engines/dummy.py
index ee24c32ad..162d9059c 100644
--- a/ptypy/engines/dummy.py
+++ b/ptypy/engines/dummy.py
@@ -16,6 +16,7 @@
from ..utils import parallel
from . import BaseEngine
from ..utils.descriptor import defaults_tree
+from ..core.manager import Full, Vanilla
__all__ = ['Dummy']
@@ -40,6 +41,8 @@ class Dummy(BaseEngine):
"""
+ SUPPORTED_MODELS = [Full, Vanilla]
+
def __init__(self, ptycho_parent, pars=None):
"""
Dummy reconstruction engine.
diff --git a/ptypy/engines/ePIE.py b/ptypy/engines/ePIE.py
index b69916f68..c418b74f6 100644
--- a/ptypy/engines/ePIE.py
+++ b/ptypy/engines/ePIE.py
@@ -30,6 +30,7 @@
from ..utils import parallel
from . import BaseEngine
from ..utils.descriptor import defaults_tree
+from ..core.manager import Full, Vanilla
__all__ = ['EPIE']
@@ -114,6 +115,8 @@ class EPIE(BaseEngine):
"""
+ SUPPORTED_MODELS = [Full, Vanilla]
+
def __init__(self, ptycho_parent, pars=None):
"""
ePIE reconstruction engine.
From abf4c7df8db2ac4896b0010dd0411b07e5f9514b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 6 Nov 2017 21:55:02 +0100
Subject: [PATCH 183/363] Less stupid Geo inheritance with new options
structure
---
ptypy/core/geometry.py | 13 ++----
ptypy/core/geometry_bragg.py | 77 ++++++++++++++++++++----------------
2 files changed, 46 insertions(+), 44 deletions(-)
diff --git a/ptypy/core/geometry.py b/ptypy/core/geometry.py
index d7a84b686..616063dc1 100644
--- a/ptypy/core/geometry.py
+++ b/ptypy/core/geometry.py
@@ -23,8 +23,6 @@
from classes import Base, GEO_PREFIX
from ..utils.descriptor import EvalDescriptor
-import numpy as np
-from scipy import fftpack
try:
import pyfftw
import pyfftw.interfaces.numpy_fft as fftw_np
@@ -147,8 +145,8 @@ class Geo(Base):
_keV2m = 1.23984193e-09
_PREFIX = GEO_PREFIX
-
- def __init__(self, owner=None, ID=None, pars=None, default_override=None, **kwargs):
+
+ def __init__(self, owner=None, ID=None, pars=None, **kwargs):
"""
Parameters
----------
@@ -167,10 +165,7 @@ def __init__(self, owner=None, ID=None, pars=None, default_override=None, **kwar
super(Geo, self).__init__(owner, ID)
# Starting parameters
- if default_override is not None:
- p = u.Param(default_override)
- else:
- p = self.DEFAULT.copy(99)
+ p = self.DEFAULT.copy(99)
if pars is not None:
p.update(pars)
for k, v in p.items():
@@ -179,7 +174,7 @@ def __init__(self, owner=None, ID=None, pars=None, default_override=None, **kwar
for k, v in kwargs.iteritems():
if k in p:
p[k] = v
-
+
self.p = p
self._initialize(p)
diff --git a/ptypy/core/geometry_bragg.py b/ptypy/core/geometry_bragg.py
index 8044706b5..44dbb11ab 100644
--- a/ptypy/core/geometry_bragg.py
+++ b/ptypy/core/geometry_bragg.py
@@ -1,51 +1,60 @@
"""
Geometry management and propagation for Bragg geometry.
-
-This class follows the naming convention of:
-Berenguer et al., Phys. Rev. B 88 (2013) 144101.
-
-Indexing into all q-space arrays and storages follows (q3, q1, q2),
-which corresponds to (r3, r1, r2) in the so-called natural real space
-coordinate system. These coordinates are transformed to (x, z, y) as
-described below.
-
"""
from .. import utils as u
from ..utils.verbose import logger
from geometry import Geo as _Geo
+from ..utils.descriptor import EvalDescriptor
from classes import Container, Storage, View
import numpy as np
from scipy.ndimage.interpolation import map_coordinates
__all__ = ['DEFAULT', 'Geo_Bragg']
-DEFAULT = u.Param(
- # Incident photon energy (in keV)
- energy=15.25,
- # Wavelength (in meters)
- lam=None,
- # Distance from object to screen
- distance=2.3,
- # Rocking curve step (in degrees) and pixel sizes (in meters).
- psize=(.065, 172e-6, 172e-6),
- # Bragg angle in degrees
- theta_bragg=6.89,
- # 3D sample pixel size (in meters) in the conjugate (natural) coordinate
- # system
- resolution=None,
- # Number of rocking curve positions and detector pixels
- shape=(31, 128, 128),
-)
-
+local_tree = EvalDescriptor('')
+@local_tree.parse_doc()
class Geo_Bragg(_Geo):
-
- DEFAULT = DEFAULT
-
- def __init__(self, owner=None, ID=None, pars=None, **kwargs):
- super(Geo_Bragg, self).__init__(
- owner, ID, pars, default_override=DEFAULT, **kwargs)
+ """
+ Class which presents a Geo analog valid for the 3d Bragg case.
+
+ This class follows the naming convention of:
+ Berenguer et al., Phys. Rev. B 88 (2013) 144101.
+
+ Indexing into all q-space arrays and storages follows (q3, q1, q2),
+ which corresponds to (r3, r1, r2) in the so-called natural real
+ space coordinate system. These coordinates are transformed to
+ (x, z, y) as described below.
+
+ Defaults:
+
+ [psize]
+ type = tuple
+ default = (.065, 172e-6, 172e-6)
+ help = Rocking curve step (in degrees) and pixel sizes (in meters)
+ doc = First element is the rocking curve step.
+
+ [propagation]
+ doc = Only "farfield" is valid for Bragg
+
+ [shape]
+ type = tuple
+ default = (31, 128, 128)
+ help = Number of rocking curve positions and detector pixels
+ doc = First element is the number of rocking curve positions.
+
+ [theta_bragg]
+ type = float
+ default = 6.89
+ help = Diffraction angle (theta, not two theta) in degrees
+
+ [resolution]
+ type = tuple
+ default = None
+ help = 3D sample pixel size (in meters)
+ doc = Refers to the conjugate (natural) coordinate system as (r3, r1, r2).
+ """
def _initialize(self, p):
"""
@@ -494,8 +503,6 @@ class BasicBragg3dPropagator(object):
magic applied here (at the moment).
"""
- DEFAULT = DEFAULT
-
def __init__(self, geo=None, ffttype='numpy'):
self.geo = geo
if ffttype == 'numpy':
From eca9b2eab41af262071a091857816ddd4142fb1c Mon Sep 17 00:00:00 2001
From: Benders
Date: Tue, 7 Nov 2017 19:44:15 -0800
Subject: [PATCH 184/363] Reverted the deletion of meta in PtyScan. Moved
makePtyScan into ScanModel to avoid crossimport. Adapted the rest to make
test work.
---
ptypy/core/data.py | 76 ++++++++++++-------
ptypy/core/manager.py | 32 +++++++-
ptypy/experiment/__init__.py | 33 --------
.../ptyscan_tests/on_the_fly_ptyd_test.py | 4 +-
4 files changed, 80 insertions(+), 65 deletions(-)
diff --git a/ptypy/core/data.py b/ptypy/core/data.py
index 6fccfd785..abca9413c 100644
--- a/ptypy/core/data.py
+++ b/ptypy/core/data.py
@@ -56,6 +56,7 @@
EOS: 'End of scan reached'}
+
__all__ = ['PtyScan', 'PTYD', 'PtydScan',
'MoonFlowerScan']
@@ -198,7 +199,7 @@ class PtyScan(object):
[center]
type = tuple, str
- default = 'fftshift'
+ default = None
help = Center (pixel) of the optical axes in raw data
doc = If ``None``, this parameter will be set by :py:data:`~.scan.data.auto_center` or elsewhere
userlevel = 1
@@ -232,6 +233,9 @@ class PtyScan(object):
EOS = EOS
CODES = CODES
+ METAKEYS = ['version', 'num_frames', 'label', 'shape', 'psize', 'energy', 'center', 'distance']
+ """ Keys to store in meta param """
+
def __init__(self, pars=None, **kwargs):
# filename='./foo.ptyd', shape=None, save=True):
"""
@@ -295,6 +299,9 @@ def __init__(self, pars=None, **kwargs):
self.dfile = None
self.save = self.info.save
+ # Construct meta
+ self.meta = u.Param({k: self.info[k] for k in self.METAKEYS})
+
self.orientation = self.info.orientation
self.rebin = self.info.rebin
@@ -315,7 +322,8 @@ def initialize(self):
* Sets :py:attr:`num_frames` if needed
* Calls :py:meth:`post_initialize`
"""
- logger.info(headerline('Enter PtyScan.initialize()', 'l'))
+ logger.info(headerline('Enter %s.initialize()'
+ % self.__class__.__name__, 'l'))
# Prepare writing to file
if self.info.save is not None:
@@ -363,6 +371,9 @@ def initialize(self):
if self.has_weight2d:
logger.info('shape = '.rjust(29) + str(self.weight2d.shape))
+ # FIXME: Saving weight to info. This is not ideal, info is optional
+ self.info.weight2d = self.has_weight2d
+
logger.info('All experimental positions : ' + str(self.has_positions))
if self.has_positions:
logger.info('shape = '.rjust(29) + str(positions.shape))
@@ -412,7 +423,7 @@ def initialize(self):
# A note about how much this scan class knows about the number
# of frames expected. PtydScan uses this information.
- self.info.num_frames_actual = self.num_frames
+ self.meta.num_frames = self.num_frames
parallel.barrier()
"""
#logger.info('####### MPI Report: ########\n')
@@ -671,7 +682,7 @@ def get_data_chunk(self, chunksize, start=None):
altweight = self.weight2d
else:
try:
- altweight = self.weight2d
+ altweight = self.info.weight2d
except:
altweight = np.ones(dsh)
weights = dict.fromkeys(data.keys(), altweight)
@@ -701,7 +712,7 @@ def get_data_chunk(self, chunksize, start=None):
cen = self.info.center
if str(cen) == cen:
- cen = geometry.translate_to_pix(sh, cen)
+ cen = geometry.translate_to_pix(dsh, cen)
auto = self.info.auto_center
# Get center in diffraction image
@@ -733,7 +744,7 @@ def get_data_chunk(self, chunksize, start=None):
# Make sure center is in the image frame
assert (cen > 0).all() and (dsh - cen > 0).all(), (
'Optical axes (center = (%.1f, %.1f) outside diffraction image '
- 'frame (%d, %d).' % tuple(cen) + tuple(dsh))
+ 'frame (%d, %d).' % (tuple(cen) + tuple(dsh)))
# Determine if the arrays require further processing
do_flip = (self.orientation is not None
@@ -806,11 +817,11 @@ def get_data_chunk(self, chunksize, start=None):
weights = dict(zip(indices.node, w))
# Adapt geometric info
- self.info.center = cen / float(self.rebin)
- self.info.shape = u.expect2(sh) / self.rebin
+ self.meta.center = cen / float(self.rebin)
+ self.meta.shape = u.expect2(sh) / self.rebin
if self.info.psize is not None:
- self.info.psize = u.expect2(self.info.psize) * self.rebin
+ self.meta.psize = u.expect2(self.info.psize) * self.rebin
# Prepare chunk of data
chunk = u.Param()
@@ -851,7 +862,7 @@ def get_data_chunk(self, chunksize, start=None):
# With first chunk we update info
if self.chunknum < 1:
if self.info.save is not None and parallel.master:
- io.h5append(self.dfile, meta=dict(self.info))
+ io.h5append(self.dfile, meta=dict(self.meta))
parallel.barrier()
@@ -903,9 +914,7 @@ def _make_data_package(self, chunk):
"""
# The "common" part
- keys = ['label', 'experimentID', 'version', 'shape', 'psize', 'energy', 'center', 'distance']
- common = u.Param({k: self.info[k] for k in keys})
- out = {'common': common}
+ out = {'common': self.meta}
# The "iterable" part
iterables = []
@@ -1215,10 +1224,10 @@ class PtydScan(PtyScan):
doc =
[source]
- default = 'scan.ptyd'
- type = str
- help = Input ptyd file
- doc =
+ default = 'file'
+ type = str, None
+ help = Alternate source file path if data is meant to be reprocessed.
+ doc = `None` for input shall be deprecated in future
"""
@@ -1229,7 +1238,7 @@ def __init__(self, pars=None, **kwargs):
# Create parameter set
p = self.DEFAULT.copy(99)
p.update(pars)
-
+ p.update(kwargs)
source = p.source
if source is None or str(source) == 'file':
@@ -1268,26 +1277,33 @@ def __init__(self, pars=None, **kwargs):
# At least ONE chunk must exist to ensure everything works
with h5py.File(source, 'r') as f:
check = f.get('chunks/0')
+ f.close()
# Get number of frames supposedly in the file
# FIXME: try/except clause only for backward compatibilty
# for .ptyd files created priot to commit 2e626ff
- try:
- source_frames = f.get('info/num_frames_actual')[...].item()
- except TypeError:
- source_frames = len(f.get('info/positions_scan')[...])
- f.close()
+ #try:
+ # source_frames = f.get('info/num_frames_actual')[...].item()
+ #except TypeError:
+ # source_frames = len(f.get('info/positions_scan')[...])
+ #f.close()
if check is None:
raise IOError('Ptyd source %s contains no data. Load aborted'
% source)
+ """
if source_frames is None:
logger.warning('Ptyd source is not aware of the total'
'number of diffraction frames expected')
+ """
# Get meta information
meta = u.Param(io.h5read(self.source, 'meta')['meta'])
+ if meta.get('num_frames') is None:
+ logger.warning('Ptyd source is not aware of the total'
+ 'number of diffraction frames expected')
+
if len(meta) == 0:
logger.warning('There should be meta information in '
'%s. Something is odd here.' % source)
@@ -1297,12 +1313,19 @@ def __init__(self, pars=None, **kwargs):
p.update(meta)
else:
# Replace only None entries in p
+ # FIXME:
+ # BE: This was the former right way when the defaults
+ # were mostly None, now this no longer applies, unless
+ # defaults are overwritten to None. I guess t would be
+ # canonical now to overwrite the defaults in the
+ # docstring. But since reprocessing is rare
for k, v in meta.items():
if p.get(k) is None:
p[k] = v
- super(PtydScan, self).__init__(p, **kwargs)
+ super(PtydScan, self).__init__(p)
+ """
if source_frames is not None:
if self.num_frames is None:
self.num_frames = source_frames
@@ -1313,6 +1336,7 @@ def __init__(self, pars=None, **kwargs):
# but we cannot do anything about it. This should be dealt
# with with a flag in the meta package probably.
pass
+ """
# Other instance attributes
self._checked = {}
@@ -1471,9 +1495,7 @@ def __init__(self, pars=None, **kwargs):
super(MoonFlowerScan, self).__init__(p, **kwargs)
# Derive geometry from input
- keys = ['label', 'experimentID', 'version', 'shape', 'psize', 'energy', 'center', 'distance']
- geo_pars = u.Param({k: self.info[k] for k in keys})
- geo = geometry.Geo(pars=geo_pars)
+ geo = geometry.Geo(pars=self.meta)
# Derive scan pattern
pos = u.Param()
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 3db8a8578..a8750f5ab 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -99,8 +99,6 @@ def __init__(self, ptycho=None, pars=None, label=None):
label : str
Unique label
"""
- from .. import experiment
-
# Update parameter structure
# Load default parameter structure
p = self.DEFAULT.copy(99)
@@ -110,7 +108,7 @@ def __init__(self, ptycho=None, pars=None, label=None):
self.ptycho = ptycho
# Create Associated PtyScan object
- self.ptyscan = experiment.makePtyScan(self.p.data)
+ self.ptyscan = self.makePtyScan(self.p.data)
# Initialize instance attributes
self.mask = None
@@ -133,6 +131,34 @@ def __init__(self, ptycho=None, pars=None, label=None):
self.FType = FType
self.frames_per_call = 100000
+ @classmethod
+ def makePtyScan(cls, pars, scanmodel=None):
+ """
+ Factory for PtyScan object. Return an instance of the appropriate PtyScan subclass based on the
+ input parameters.
+
+ Parameters
+ ----------
+ pars: dict or Param
+ Input parameters according to :py:data:`.scan.data`.
+ scanmodel: ScanModel object
+ FIXME: This seems to be needed for simulations but broken for now.
+ """
+
+ # Extract information on the type of object to build
+ name = pars.name
+
+ from .. import experiment
+
+ if name in u.all_subclasses(PtyScan, names=True):
+ ps_class = eval(name)
+ logger.info('Scan will be prepared with the PtyScan subclass "%s"' % name)
+ ps_instance = ps_class(pars)
+ else:
+ raise RuntimeError('Could not manage source "%s"' % str(name))
+
+ return ps_instance
+
def new_data(self):
"""
Feed data from ptyscan object.
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index fee5baaf8..82fd1bd8f 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -32,36 +32,3 @@
from ALS_5321 import ALS5321Scan
-if __name__ == "__main__":
- from ptypy.utils.verbose import logger
- from ptypy.core.data import PtydScan, MoonFlowerScan, PtyScan
-else:
- from ..utils.verbose import logger
- from .. import utils as u
- from ..core.data import PtydScan, MoonFlowerScan, PtyScan
-
-def makePtyScan(pars, scanmodel=None):
- """
- Factory for PtyScan object. Return an instance of the appropriate PtyScan subclass based on the
- input parameters.
-
- Parameters
- ----------
- pars: dict or Param
- Input parameters according to :py:data:`.scan.data`.
-
- scanmodel: ScanModel object
- FIXME: This seems to be needed for simulations but broken for now.
- """
-
- # Extract information on the type of object to build
- name = pars.name
-
- if name in u.all_subclasses(PtyScan, names=True):
- ps_class = eval(name)
- logger.info('Scan will be prepared with the PtyScan subclass "%s"' % name)
- ps_instance = ps_class(pars)
- else:
- raise RuntimeError('Could not manage source "%s"' % str(name))
-
- return ps_instance
diff --git a/ptypy/test/ptyscan_tests/on_the_fly_ptyd_test.py b/ptypy/test/ptyscan_tests/on_the_fly_ptyd_test.py
index bb202dbe5..9a5e2ea2b 100644
--- a/ptypy/test/ptyscan_tests/on_the_fly_ptyd_test.py
+++ b/ptypy/test/ptyscan_tests/on_the_fly_ptyd_test.py
@@ -62,8 +62,8 @@ def _create_PtydScan(self, save='append', **kwargs):
dfile = str(data.dfile)
# Base parameters
- from ptypy.core.data import PtyScan
- data = PtyScan.DEFAULT.copy()
+ from ptypy.core.data import PtydScan
+ data = PtydScan.DEFAULT.copy()
data.dfile = dfile.replace('.ptyd', '_aggregated.ptyd')
data.save = save # maybe replace with merge in future
data.update(**kwargs)
From 2613f0f19073505f72aef7f25c308ad5bb5688e2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 9 Nov 2017 15:17:59 +0100
Subject: [PATCH 185/363] Previous commit broke makePtyScan and geometry.update
---
ptypy/core/data.py | 2 +-
ptypy/core/manager.py | 4 ++--
ptypy/experiment/__init__.py | 2 +-
3 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/ptypy/core/data.py b/ptypy/core/data.py
index abca9413c..052d5c6b5 100644
--- a/ptypy/core/data.py
+++ b/ptypy/core/data.py
@@ -199,7 +199,7 @@ class PtyScan(object):
[center]
type = tuple, str
- default = None
+ default = 'fftshift'
help = Center (pixel) of the optical axes in raw data
doc = If ``None``, this parameter will be set by :py:data:`~.scan.data.auto_center` or elsewhere
userlevel = 1
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index a8750f5ab..bc5f90a55 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -150,8 +150,8 @@ def makePtyScan(cls, pars, scanmodel=None):
from .. import experiment
- if name in u.all_subclasses(PtyScan, names=True):
- ps_class = eval(name)
+ if name in u.all_subclasses(data.PtyScan, names=True):
+ ps_class = eval('experiment.' + name)
logger.info('Scan will be prepared with the PtyScan subclass "%s"' % name)
ps_instance = ps_class(pars)
else:
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index 82fd1bd8f..c5ba47b5d 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -31,4 +31,4 @@
from nanomax import NanomaxStepscanMay2017, NanomaxStepscanNov2016, NanomaxFlyscanJune2017
from ALS_5321 import ALS5321Scan
-
+from ..core.data import MoonFlowerScan, PtydScan
From 54de7b9858d1aee155b4033bed64ea70b8592055 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 9 Nov 2017 15:20:39 +0100
Subject: [PATCH 186/363] minimal_load_and_run needs scans.MF.name
---
templates/minimal_load_and_run.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/templates/minimal_load_and_run.py b/templates/minimal_load_and_run.py
index 09f69ac73..2f815f2f4 100644
--- a/templates/minimal_load_and_run.py
+++ b/templates/minimal_load_and_run.py
@@ -15,6 +15,7 @@
p.scans = u.Param()
p.scans.MF = u.Param()
p.scans.MF.data= u.Param()
+p.scans.MF.name = 'Vanilla'
p.scans.MF.data.name = 'PtydScan'
p.scans.MF.data.source = '/tmp/ptypy/sample.ptyd'#'file'
p.scans.MF.data.dfile = 'out.ptyd'
From eb4a862f81943ede0e7999b4a7608c78912e46a1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 9 Nov 2017 16:02:14 +0100
Subject: [PATCH 187/363] Intermediate commit: commented out sharing and put in
workarounds
---
ptypy/core/manager.py | 193 +++++++++++++------------
ptypy/core/model.py | 319 +++++++++++++++++++++---------------------
ptypy/core/ptycho.py | 21 +--
3 files changed, 270 insertions(+), 263 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index bc5f90a55..87879e113 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -17,7 +17,8 @@
import illumination
import sample
import geometry
-import model
+### FIXME: reimplement sharing before 0.3 release
+# import model
import xy
import data
@@ -132,7 +133,7 @@ def __init__(self, ptycho=None, pars=None, label=None):
self.frames_per_call = 100000
@classmethod
- def makePtyScan(cls, pars, scanmodel=None):
+ def makePtyScan(cls, pars):
"""
Factory for PtyScan object. Return an instance of the appropriate PtyScan subclass based on the
input parameters.
@@ -141,8 +142,6 @@ def makePtyScan(cls, pars, scanmodel=None):
----------
pars: dict or Param
Input parameters according to :py:data:`.scan.data`.
- scanmodel: ScanModel object
- FIXME: This seems to be needed for simulations but broken for now.
"""
# Extract information on the type of object to build
@@ -604,54 +603,55 @@ class Full(ScanModel):
help =
doc =
- [sharing]
- default =
- help = Scan sharing options
- doc =
- type = Param
- userlevel =
-
- [sharing.object_share_with]
- default = None
- help = Label or index of scan to share object with.
- doc = Possible values:
- - ``None``: Do not share
- - *(string)*: Label of the scan to share with
- - *(int)*: Index of scan to share with
- type = str
- userlevel = 1
-
- [sharing.object_share_power]
- default = 1
- help = Relative power for object sharing
- doc =
- type = float
- userlevel = 1
- lowlim = 0
-
- [sharing.probe_share_with]
- default = None
- help = Label or index of scan to share probe with.
- doc = Possible values:
- - ``None``: Do not share
- - *(string)*: Label of the scan to share with
- - *(int)*: Index of scan to share with
- type = str
- userlevel = 1
-
- [sharing.probe_share_power]
- default = 1
- help = Relative power for probe sharing
- doc =
- type = float
- userlevel = 1
- lowlim = 0
-
- [sharing.EP_sharing]
- type = bool
- default = False
- help = Empty probe sharing switch
- doc =
+ ### FIXME: reimplement sharing before 0.3 release
+ # [sharing]
+ # default =
+ # help = Scan sharing options
+ # doc =
+ # type = Param
+ # userlevel =
+
+ # [sharing.object_share_with]
+ # default = None
+ # help = Label or index of scan to share object with.
+ # doc = Possible values:
+ # - ``None``: Do not share
+ # - *(string)*: Label of the scan to share with
+ # - *(int)*: Index of scan to share with
+ # type = str
+ # userlevel = 1
+
+ # [sharing.object_share_power]
+ # default = 1
+ # help = Relative power for object sharing
+ # doc =
+ # type = float
+ # userlevel = 1
+ # lowlim = 0
+
+ # [sharing.probe_share_with]
+ # default = None
+ # help = Label or index of scan to share probe with.
+ # doc = Possible values:
+ # - ``None``: Do not share
+ # - *(string)*: Label of the scan to share with
+ # - *(int)*: Index of scan to share with
+ # type = str
+ # userlevel = 1
+
+ # [sharing.probe_share_power]
+ # default = 1
+ # help = Relative power for probe sharing
+ # doc =
+ # type = float
+ # userlevel = 1
+ # lowlim = 0
+
+ # [sharing.EP_sharing]
+ # type = bool
+ # default = False
+ # help = Empty probe sharing switch
+ # doc =
[coherence]
default =
@@ -1031,22 +1031,23 @@ class Full(ScanModel):
_PREFIX = MODEL_PREFIX
- def __init__(self, ptycho=None, pars=None, label=None):
- """
- Override constructor to add sharing functionality.
- """
- super(Full, self).__init__(ptycho, pars, label)
+ ## FIXME: reimplement sharing before 0.3 release
+ # def __init__(self, ptycho=None, pars=None, label=None):
+ # """
+ # Override constructor to add sharing functionality.
+ # """
+ # super(Full, self).__init__(ptycho, pars, label)
- # Sharing dictionary that stores sharing behavior
- self.sharing = {'probe_ids': {}, 'object_ids': {}}
+ # # Sharing dictionary that stores sharing behavior
+ # self.sharing = {'probe_ids': {}, 'object_ids': {}}
- # REDESIGN: this will be replaced
- # Initialize sharing rules for POD creations
- sharing_pars = u.Param({'model_type': 'basic',
- 'scan_per_probe': 1,
- 'scan_per_object': 1,
- 'npts': None})
- self.sharing_rules = model.parse_model(sharing_pars, self.sharing)
+ # # REDESIGN: this will be replaced
+ # # Initialize sharing rules for POD creations
+ # sharing_pars = u.Param({'model_type': 'basic',
+ # 'scan_per_probe': 1,
+ # 'scan_per_object': 1,
+ # 'npts': None})
+ # self.sharing_rules = model.parse_model(sharing_pars, self.sharing)
def _create_pods(self):
"""
@@ -1062,9 +1063,7 @@ def _create_pods(self):
# Get a list of probe and object that already exist
existing_probes = self.ptycho.probe.storages.keys()
- # SC: delete? self.sharing_rules.probe_ids.keys()
existing_objects = self.ptycho.obj.storages.keys()
- # SC: delete? self.sharing_rules.object_ids.keys()
logger.info('Found these probes : ' + ', '.join(existing_probes))
logger.info('Found these objects: ' + ', '.join(existing_objects))
@@ -1074,10 +1073,13 @@ def _create_pods(self):
di_views = scan.new_diff_views
ma_views = scan.new_mask_views
- # Compute sharing rules
- share = scan.p.sharing
- alt_obj = share.object_share_with if share is not None else None
- alt_pr = share.probe_share_with if share is not None else None
+ ### FIXME: reimplement sharing before 0.3 release
+ alt_obj = None
+ alt_pr = None
+ # # Compute sharing rules
+ # share = scan.p.sharing
+ # alt_obj = share.object_share_with if share is not None else None
+ # alt_pr = share.probe_share_with if share is not None else None
obj_label = label if alt_obj is None else alt_obj
pr_label = label if alt_pr is None else alt_pr
@@ -1092,8 +1094,11 @@ def _create_pods(self):
pos_pr = u.expect2(0.0)
pos_obj = positions[i] if 'empty' not in scan.p.tags else 0.0
- t, object_id = self.sharing_rules(obj_label, index)
- probe_id, t = self.sharing_rules(pr_label, index)
+ ### FIXME: reimplement sharing before 0.3 release
+ object_id = 'S00'
+ probe_id = 'S00'
+ # t, object_id = self.sharing_rules(obj_label, index)
+ # probe_id, t = self.sharing_rules(pr_label, index)
# For multiwavelength reconstructions: loop here over
# geometries, and modify probe_id and object_id.
@@ -1112,7 +1117,9 @@ def _create_pods(self):
if (probe_id_suf not in new_probe_ids.keys()
and probe_id_suf not in existing_probes):
new_probe_ids[probe_id_suf] = (
- self.sharing_rules.probe_ids[probe_id])
+ ### FIXME: reimplement sharing before 0.3 release
+ [self.label,])
+ #self.sharing_rules.probe_ids[probe_id])
odis = scan.p.coherence.object_dispersion
@@ -1125,7 +1132,9 @@ def _create_pods(self):
if (object_id_suf not in new_object_ids.keys()
and object_id_suf not in existing_objects):
new_object_ids[object_id_suf] = (
- self.sharing_rules.object_ids[object_id])
+ ### FIXME: reimplement sharing before 0.3 release
+ [self.label,])
+ #self.sharing_rules.object_ids[object_id])
# Loop through modes
for pm in range(scan.p.coherence.num_probe_modes):
@@ -1178,18 +1187,21 @@ def _create_pods(self):
new_pods.append(pod)
- # If Empty Probe sharing is enabled,
- # adjust POD accordingly.
- if share is not None:
- pod.probe_weight = share.probe_share_power
- pod.object_weight = share.object_share_power
- if share.EP_sharing:
- pod.is_empty = True
- else:
- pod.is_empty = False
- else:
- pod.probe_weight = 1
- pod.object_weight = 1
+ ### FIXME: reimplement sharing before 0.3 release
+ pod.probe_weight = 1
+ pod.object_weight = 1
+ # # If Empty Probe sharing is enabled,
+ # # adjust POD accordingly.
+ # if share is not None:
+ # pod.probe_weight = share.probe_share_power
+ # pod.object_weight = share.object_share_power
+ # if share.EP_sharing:
+ # pod.is_empty = True
+ # else:
+ # pod.is_empty = False
+ # else:
+ # pod.probe_weight = 1
+ # pod.object_weight = 1
return new_pods, new_probe_ids, new_object_ids
@@ -1338,8 +1350,9 @@ def __init__(self, ptycho, pars):
self.scans[label] = cls(ptycho=self.ptycho, pars=scan_pars, label=label)
def _to_dict(self):
- # Delete the model class. We do not really need to store it.
- del self.sharing_rules
+ ### FIXME: reimplement sharing before 0.3 release
+ # # Delete the model class. We do not really need to store it.
+ # del self.sharing_rules
return self.__dict__.copy()
@classmethod
diff --git a/ptypy/core/model.py b/ptypy/core/model.py
index 9b4882de4..c2f7aea96 100644
--- a/ptypy/core/model.py
+++ b/ptypy/core/model.py
@@ -7,161 +7,164 @@
:copyright: Copyright 2014 by the PTYPY team, see AUTHORS.
:license: GPLv2, see LICENSE for details.
"""
-from .. import utils as u
-from ..utils.verbose import logger
-from classes import STORAGE_PREFIX
-
-__all__ = ['parse_model']
-
-DEFAULT = u.Param(
- model_type='basic',
- scan_per_probe=1,
- scan_per_object=1,
- npts=None
-)
-
-MAX_SCAN_COUNT = 100
-
-
-def parse_model(pars, sharing_dct):
- """
- This factory function takes a model description in the input parameters
- and returns an object that can be called with a scan_label (or index) and
- a diffraction pattern index, and returns probe and object ids.
- """
- p = u.Param(DEFAULT)
- p.update(pars)
- if p.model_type.lower() == 'basic':
- return BasicSharingModel(sharing_dct,
- p.scan_per_probe,
- p.scan_per_object,
- p.npts)
- else:
- raise RuntimeError('model type %s not supported.' % p.model_type)
-
-
-class BasicSharingModel(object):
- """
- BasicSharingModel: implements the most common scan-sharing patterns.
- """
-
- def __init__(self, sharing_dct, scan_per_probe, scan_per_object, npts=None):
- """
- BasicSharingModel: implements the most common scan-sharing patterns.
-
- Parameters:
- -----------
- scan_per_probe: float < 1 or int
- number of contiguous scans using the same probe. If a int, the
- number of scans. If a float < 0, split the scans into
- 1/scan_per_probe independent probes. For instance,
- scan_per_probe = .5 will split all scans in two and assign a
- different probe to each.
- scan_per_object: int
- number of contiguous scans using the same object.
- npts: int
- number of diffraction patterns in a given scan. Needed only if
- scan_per_probe < 1.
- """
- # Prepare probe sharing
- if scan_per_probe == 0:
- self.shared_probe = True
- self.single_probe = True
- logger.info('Sharing a single probe for ALL scans.')
- elif scan_per_probe >= 1:
- self.shared_probe = True
- self.single_probe = False
- self.scan_per_probe = int(scan_per_probe)
- logger.info(
- 'Model: sharing probe between scans '
- '(one new probe every %d scan)' % self.scan_per_probe)
- else:
- self.shared_probe = False
- self.single_probe = False
- # The following will fail if npts wasn't provided.
- self.diff_per_probe = int(npts * scan_per_probe)
- self.npts = npts
- logger.info(
- 'Model: splitting scans (every %d diffraction patter)'
- % self.diff_per_probe)
-
- # Prepare object sharing
- if scan_per_object == 0:
- self.single_object = True
- self.shared_object = True
- logger.info('Sharing a single object for ALL scans.')
- elif scan_per_object >= 1:
- self.single_object = False
- self.shared_object = True
- self.scan_per_object = int(scan_per_object)
- logger.info(
- 'Model: sharing object between scans '
- '(one new object every %d scan)' % self.scan_per_object)
- else:
- raise RuntimeError(
- 'scan_per_object < 1. not supported. What does it mean anyway?')
-
- self.scan_labels = []
- self.probe_ids = sharing_dct['probe_ids']
- self.object_ids = sharing_dct['object_ids']
-
- def __call__(self, scan_label, diff_index):
- """
- Return probe and object ids given a scan and diffraction pattern index.
-
- Parameters:
- -----------
- scan_label: str or int
- An identifier for a scan (label or index)
- diff_index: int
- The index of the diffraction pattern in the given scan.
- """
- # Get an index for the scan_label
- if str(scan_label) == scan_label:
- # If it is a string, look it up
- if scan_label in self.scan_labels:
- scan_index = self.scan_labels.index(scan_label)
- else:
- scan_index = len(self.scan_labels)
- self.scan_labels.append(scan_label)
- else:
- # Nothing to do if it is an index
- scan_index = scan_label
-
- # Apply the rules for probe sharing
- if self.single_probe:
- probe_id = 0
- elif self.shared_probe:
- probe_id = scan_index // self.scan_per_probe
- else:
- probe_id = (scan_index * (self.npts // self.diff_per_probe)
- + diff_index // self.diff_per_probe)
-
- # Follow the format specified in Viewmanager
- probe_id = STORAGE_PREFIX + '%02d' % probe_id
-
- # ... and the rules for object sharing
- if self.single_object:
- object_id = 0
- else:
- object_id = scan_index // self.scan_per_object
-
- # Follow the format specified in Viewmanager
- object_id = STORAGE_PREFIX + '%02d' % object_id
-
- logger.debug(
- "Model assigned frame %d of scan %s to probe %s & object %s"
- % (diff_index, str(scan_label), probe_id, object_id))
-
- # Store sharing info
- pl = self.probe_ids.get(probe_id, [])
- if scan_label not in pl:
- pl.append(scan_label)
- self.probe_ids[probe_id] = pl
-
- ol = self.object_ids.get(object_id, [])
- if scan_label not in ol:
- ol.append(scan_label)
- self.object_ids[object_id] = ol
-
- return probe_id, object_id
+
+### FIXME: reimplement sharing before 0.3 release
+raise Exception('This module is not currently used - don''t import!')
+# from .. import utils as u
+# from ..utils.verbose import logger
+# from classes import STORAGE_PREFIX
+
+# __all__ = ['parse_model']
+
+# DEFAULT = u.Param(
+# model_type='basic',
+# scan_per_probe=1,
+# scan_per_object=1,
+# npts=None
+# )
+
+# MAX_SCAN_COUNT = 100
+
+
+# def parse_model(pars, sharing_dct):
+# """
+# This factory function takes a model description in the input parameters
+# and returns an object that can be called with a scan_label (or index) and
+# a diffraction pattern index, and returns probe and object ids.
+# """
+# p = u.Param(DEFAULT)
+# p.update(pars)
+# if p.model_type.lower() == 'basic':
+# return BasicSharingModel(sharing_dct,
+# p.scan_per_probe,
+# p.scan_per_object,
+# p.npts)
+# else:
+# raise RuntimeError('model type %s not supported.' % p.model_type)
+
+
+# class BasicSharingModel(object):
+# """
+# BasicSharingModel: implements the most common scan-sharing patterns.
+# """
+
+# def __init__(self, sharing_dct, scan_per_probe, scan_per_object, npts=None):
+# """
+# BasicSharingModel: implements the most common scan-sharing patterns.
+
+# Parameters:
+# -----------
+# scan_per_probe: float < 1 or int
+# number of contiguous scans using the same probe. If a int, the
+# number of scans. If a float < 0, split the scans into
+# 1/scan_per_probe independent probes. For instance,
+# scan_per_probe = .5 will split all scans in two and assign a
+# different probe to each.
+# scan_per_object: int
+# number of contiguous scans using the same object.
+# npts: int
+# number of diffraction patterns in a given scan. Needed only if
+# scan_per_probe < 1.
+# """
+# # Prepare probe sharing
+# if scan_per_probe == 0:
+# self.shared_probe = True
+# self.single_probe = True
+# logger.info('Sharing a single probe for ALL scans.')
+# elif scan_per_probe >= 1:
+# self.shared_probe = True
+# self.single_probe = False
+# self.scan_per_probe = int(scan_per_probe)
+# logger.info(
+# 'Model: sharing probe between scans '
+# '(one new probe every %d scan)' % self.scan_per_probe)
+# else:
+# self.shared_probe = False
+# self.single_probe = False
+# # The following will fail if npts wasn't provided.
+# self.diff_per_probe = int(npts * scan_per_probe)
+# self.npts = npts
+# logger.info(
+# 'Model: splitting scans (every %d diffraction patter)'
+# % self.diff_per_probe)
+
+# # Prepare object sharing
+# if scan_per_object == 0:
+# self.single_object = True
+# self.shared_object = True
+# logger.info('Sharing a single object for ALL scans.')
+# elif scan_per_object >= 1:
+# self.single_object = False
+# self.shared_object = True
+# self.scan_per_object = int(scan_per_object)
+# logger.info(
+# 'Model: sharing object between scans '
+# '(one new object every %d scan)' % self.scan_per_object)
+# else:
+# raise RuntimeError(
+# 'scan_per_object < 1. not supported. What does it mean anyway?')
+
+# self.scan_labels = []
+# self.probe_ids = sharing_dct['probe_ids']
+# self.object_ids = sharing_dct['object_ids']
+
+# def __call__(self, scan_label, diff_index):
+# """
+# Return probe and object ids given a scan and diffraction pattern index.
+
+# Parameters:
+# -----------
+# scan_label: str or int
+# An identifier for a scan (label or index)
+# diff_index: int
+# The index of the diffraction pattern in the given scan.
+# """
+# # Get an index for the scan_label
+# if str(scan_label) == scan_label:
+# # If it is a string, look it up
+# if scan_label in self.scan_labels:
+# scan_index = self.scan_labels.index(scan_label)
+# else:
+# scan_index = len(self.scan_labels)
+# self.scan_labels.append(scan_label)
+# else:
+# # Nothing to do if it is an index
+# scan_index = scan_label
+
+# # Apply the rules for probe sharing
+# if self.single_probe:
+# probe_id = 0
+# elif self.shared_probe:
+# probe_id = scan_index // self.scan_per_probe
+# else:
+# probe_id = (scan_index * (self.npts // self.diff_per_probe)
+# + diff_index // self.diff_per_probe)
+
+# # Follow the format specified in Viewmanager
+# probe_id = STORAGE_PREFIX + '%02d' % probe_id
+
+# # ... and the rules for object sharing
+# if self.single_object:
+# object_id = 0
+# else:
+# object_id = scan_index // self.scan_per_object
+
+# # Follow the format specified in Viewmanager
+# object_id = STORAGE_PREFIX + '%02d' % object_id
+
+# logger.debug(
+# "Model assigned frame %d of scan %s to probe %s & object %s"
+# % (diff_index, str(scan_label), probe_id, object_id))
+
+# # Store sharing info
+# pl = self.probe_ids.get(probe_id, [])
+# if scan_label not in pl:
+# pl.append(scan_label)
+# self.probe_ids[probe_id] = pl
+
+# ol = self.object_ids.get(object_id, [])
+# if scan_label not in ol:
+# ol.append(scan_label)
+# self.object_ids[object_id] = ol
+
+# return probe_id, object_id
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index 4737102a0..eb613cfcf 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -21,7 +21,6 @@
from ..io import interaction
from classes import Base, Container, Storage, PTYCHO_PREFIX
from manager import ModelManager
-from . import model
from ..utils.descriptor import defaults_tree
__all__ = ['Ptycho']
@@ -723,25 +722,17 @@ def load_run(cls, runfile, load_data=True):
P._configure()
- logger.info('Reconfiguring sharing rules') # and loading data')
- print u.verbose.report(P.p)
- P.modelm.sharing_rules = model.parse_model(P.modelm.p['sharing'],
- P.modelm.sharing)
+ ### FIXME: removed sharing, to be reimplemented before 0.3
+ ### release, perhaps as a feature of the Full scan model.
+ # logger.info('Reconfiguring sharing rules') # and loading data')
+ # print u.verbose.report(P.p)
+ # P.modelm.sharing_rules = model.parse_model(P.modelm.p['sharing'],
+ # P.modelm.sharing)
logger.info('Regenerating exit waves')
P.exit.reformat()
P.modelm._initialize_exit(P.pods.values())
- """
- logger.info('Attaching datasource')
- P.datasource = P.modelm.make_datasource(P.p.data)
- logger.info('Reconfiguring sharing rules and loading data')
- P.modelm.sharing_rules = model.parse_model(P.p.model['sharing'],
- P.modelm.sharing)
- P.modelm.new_data()
-
-
- """
if load_data:
logger.info('Loading data')
P.init_data()
From 14d882d3086802dfed82d2dc2122ac791f25b3a7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Thu, 9 Nov 2017 16:13:12 +0100
Subject: [PATCH 188/363] Removed sharing all together, to be reimplemented
before release
---
ptypy/core/manager.py | 134 ++++-----------------------------
ptypy/core/model.py | 170 ------------------------------------------
ptypy/core/ptycho.py | 7 --
3 files changed, 13 insertions(+), 298 deletions(-)
delete mode 100644 ptypy/core/model.py
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 87879e113..549469e9b 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -17,8 +17,6 @@
import illumination
import sample
import geometry
-### FIXME: reimplement sharing before 0.3 release
-# import model
import xy
import data
@@ -603,56 +601,6 @@ class Full(ScanModel):
help =
doc =
- ### FIXME: reimplement sharing before 0.3 release
- # [sharing]
- # default =
- # help = Scan sharing options
- # doc =
- # type = Param
- # userlevel =
-
- # [sharing.object_share_with]
- # default = None
- # help = Label or index of scan to share object with.
- # doc = Possible values:
- # - ``None``: Do not share
- # - *(string)*: Label of the scan to share with
- # - *(int)*: Index of scan to share with
- # type = str
- # userlevel = 1
-
- # [sharing.object_share_power]
- # default = 1
- # help = Relative power for object sharing
- # doc =
- # type = float
- # userlevel = 1
- # lowlim = 0
-
- # [sharing.probe_share_with]
- # default = None
- # help = Label or index of scan to share probe with.
- # doc = Possible values:
- # - ``None``: Do not share
- # - *(string)*: Label of the scan to share with
- # - *(int)*: Index of scan to share with
- # type = str
- # userlevel = 1
-
- # [sharing.probe_share_power]
- # default = 1
- # help = Relative power for probe sharing
- # doc =
- # type = float
- # userlevel = 1
- # lowlim = 0
-
- # [sharing.EP_sharing]
- # type = bool
- # default = False
- # help = Empty probe sharing switch
- # doc =
-
[coherence]
default =
help = Coherence parameters
@@ -1031,24 +979,6 @@ class Full(ScanModel):
_PREFIX = MODEL_PREFIX
- ## FIXME: reimplement sharing before 0.3 release
- # def __init__(self, ptycho=None, pars=None, label=None):
- # """
- # Override constructor to add sharing functionality.
- # """
- # super(Full, self).__init__(ptycho, pars, label)
-
- # # Sharing dictionary that stores sharing behavior
- # self.sharing = {'probe_ids': {}, 'object_ids': {}}
-
- # # REDESIGN: this will be replaced
- # # Initialize sharing rules for POD creations
- # sharing_pars = u.Param({'model_type': 'basic',
- # 'scan_per_probe': 1,
- # 'scan_per_object': 1,
- # 'npts': None})
- # self.sharing_rules = model.parse_model(sharing_pars, self.sharing)
-
def _create_pods(self):
"""
Create all new pods as specified in the new_positions,
@@ -1067,22 +997,12 @@ def _create_pods(self):
logger.info('Found these probes : ' + ', '.join(existing_probes))
logger.info('Found these objects: ' + ', '.join(existing_objects))
- scan = self
+ object_id = 'S00'
+ probe_id = 'S00'
- positions = scan.new_positions
- di_views = scan.new_diff_views
- ma_views = scan.new_mask_views
-
- ### FIXME: reimplement sharing before 0.3 release
- alt_obj = None
- alt_pr = None
- # # Compute sharing rules
- # share = scan.p.sharing
- # alt_obj = share.object_share_with if share is not None else None
- # alt_pr = share.probe_share_with if share is not None else None
-
- obj_label = label if alt_obj is None else alt_obj
- pr_label = label if alt_pr is None else alt_pr
+ positions = self.new_positions
+ di_views = self.new_diff_views
+ ma_views = self.new_mask_views
# Loop through diffraction patterns
for i in range(len(di_views)):
@@ -1092,21 +1012,15 @@ def _create_pods(self):
# Object and probe position
pos_pr = u.expect2(0.0)
- pos_obj = positions[i] if 'empty' not in scan.p.tags else 0.0
-
- ### FIXME: reimplement sharing before 0.3 release
- object_id = 'S00'
- probe_id = 'S00'
- # t, object_id = self.sharing_rules(obj_label, index)
- # probe_id, t = self.sharing_rules(pr_label, index)
+ pos_obj = positions[i] if 'empty' not in self.p.tags else 0.0
# For multiwavelength reconstructions: loop here over
# geometries, and modify probe_id and object_id.
- for ii, geometry in enumerate(scan.geometries):
+ for ii, geometry in enumerate(self.geometries):
# Make new IDs and keep them in record
# sharing_rules is not aware of IDs with suffix
- pdis = scan.p.coherence.probe_dispersion
+ pdis = self.p.coherence.probe_dispersion
if pdis is None or str(pdis) == 'achromatic':
gind = 0
@@ -1116,12 +1030,9 @@ def _create_pods(self):
probe_id_suf = probe_id + 'G%02d' % gind
if (probe_id_suf not in new_probe_ids.keys()
and probe_id_suf not in existing_probes):
- new_probe_ids[probe_id_suf] = (
- ### FIXME: reimplement sharing before 0.3 release
- [self.label,])
- #self.sharing_rules.probe_ids[probe_id])
+ new_probe_ids[probe_id_suf] = True
- odis = scan.p.coherence.object_dispersion
+ odis = self.p.coherence.object_dispersion
if odis is None or str(odis) == 'achromatic':
gind = 0
@@ -1131,14 +1042,11 @@ def _create_pods(self):
object_id_suf = object_id + 'G%02d' % gind
if (object_id_suf not in new_object_ids.keys()
and object_id_suf not in existing_objects):
- new_object_ids[object_id_suf] = (
- ### FIXME: reimplement sharing before 0.3 release
- [self.label,])
- #self.sharing_rules.object_ids[object_id])
+ new_object_ids[object_id_suf] = True
# Loop through modes
- for pm in range(scan.p.coherence.num_probe_modes):
- for om in range(scan.p.coherence.num_object_modes):
+ for pm in range(self.p.coherence.num_probe_modes):
+ for om in range(self.p.coherence.num_object_modes):
# Make a unique layer index for exit view
# The actual number does not matter due to the
# layermap access
@@ -1187,21 +1095,8 @@ def _create_pods(self):
new_pods.append(pod)
- ### FIXME: reimplement sharing before 0.3 release
pod.probe_weight = 1
pod.object_weight = 1
- # # If Empty Probe sharing is enabled,
- # # adjust POD accordingly.
- # if share is not None:
- # pod.probe_weight = share.probe_share_power
- # pod.object_weight = share.object_share_power
- # if share.EP_sharing:
- # pod.is_empty = True
- # else:
- # pod.is_empty = False
- # else:
- # pod.probe_weight = 1
- # pod.object_weight = 1
return new_pods, new_probe_ids, new_object_ids
@@ -1350,9 +1245,6 @@ def __init__(self, ptycho, pars):
self.scans[label] = cls(ptycho=self.ptycho, pars=scan_pars, label=label)
def _to_dict(self):
- ### FIXME: reimplement sharing before 0.3 release
- # # Delete the model class. We do not really need to store it.
- # del self.sharing_rules
return self.__dict__.copy()
@classmethod
diff --git a/ptypy/core/model.py b/ptypy/core/model.py
deleted file mode 100644
index c2f7aea96..000000000
--- a/ptypy/core/model.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Data sharing models.
-
-This file is part of the PTYPY package.
-
- :copyright: Copyright 2014 by the PTYPY team, see AUTHORS.
- :license: GPLv2, see LICENSE for details.
-"""
-
-### FIXME: reimplement sharing before 0.3 release
-raise Exception('This module is not currently used - don''t import!')
-# from .. import utils as u
-# from ..utils.verbose import logger
-# from classes import STORAGE_PREFIX
-
-# __all__ = ['parse_model']
-
-# DEFAULT = u.Param(
-# model_type='basic',
-# scan_per_probe=1,
-# scan_per_object=1,
-# npts=None
-# )
-
-# MAX_SCAN_COUNT = 100
-
-
-# def parse_model(pars, sharing_dct):
-# """
-# This factory function takes a model description in the input parameters
-# and returns an object that can be called with a scan_label (or index) and
-# a diffraction pattern index, and returns probe and object ids.
-# """
-# p = u.Param(DEFAULT)
-# p.update(pars)
-# if p.model_type.lower() == 'basic':
-# return BasicSharingModel(sharing_dct,
-# p.scan_per_probe,
-# p.scan_per_object,
-# p.npts)
-# else:
-# raise RuntimeError('model type %s not supported.' % p.model_type)
-
-
-# class BasicSharingModel(object):
-# """
-# BasicSharingModel: implements the most common scan-sharing patterns.
-# """
-
-# def __init__(self, sharing_dct, scan_per_probe, scan_per_object, npts=None):
-# """
-# BasicSharingModel: implements the most common scan-sharing patterns.
-
-# Parameters:
-# -----------
-# scan_per_probe: float < 1 or int
-# number of contiguous scans using the same probe. If a int, the
-# number of scans. If a float < 0, split the scans into
-# 1/scan_per_probe independent probes. For instance,
-# scan_per_probe = .5 will split all scans in two and assign a
-# different probe to each.
-# scan_per_object: int
-# number of contiguous scans using the same object.
-# npts: int
-# number of diffraction patterns in a given scan. Needed only if
-# scan_per_probe < 1.
-# """
-# # Prepare probe sharing
-# if scan_per_probe == 0:
-# self.shared_probe = True
-# self.single_probe = True
-# logger.info('Sharing a single probe for ALL scans.')
-# elif scan_per_probe >= 1:
-# self.shared_probe = True
-# self.single_probe = False
-# self.scan_per_probe = int(scan_per_probe)
-# logger.info(
-# 'Model: sharing probe between scans '
-# '(one new probe every %d scan)' % self.scan_per_probe)
-# else:
-# self.shared_probe = False
-# self.single_probe = False
-# # The following will fail if npts wasn't provided.
-# self.diff_per_probe = int(npts * scan_per_probe)
-# self.npts = npts
-# logger.info(
-# 'Model: splitting scans (every %d diffraction patter)'
-# % self.diff_per_probe)
-
-# # Prepare object sharing
-# if scan_per_object == 0:
-# self.single_object = True
-# self.shared_object = True
-# logger.info('Sharing a single object for ALL scans.')
-# elif scan_per_object >= 1:
-# self.single_object = False
-# self.shared_object = True
-# self.scan_per_object = int(scan_per_object)
-# logger.info(
-# 'Model: sharing object between scans '
-# '(one new object every %d scan)' % self.scan_per_object)
-# else:
-# raise RuntimeError(
-# 'scan_per_object < 1. not supported. What does it mean anyway?')
-
-# self.scan_labels = []
-# self.probe_ids = sharing_dct['probe_ids']
-# self.object_ids = sharing_dct['object_ids']
-
-# def __call__(self, scan_label, diff_index):
-# """
-# Return probe and object ids given a scan and diffraction pattern index.
-
-# Parameters:
-# -----------
-# scan_label: str or int
-# An identifier for a scan (label or index)
-# diff_index: int
-# The index of the diffraction pattern in the given scan.
-# """
-# # Get an index for the scan_label
-# if str(scan_label) == scan_label:
-# # If it is a string, look it up
-# if scan_label in self.scan_labels:
-# scan_index = self.scan_labels.index(scan_label)
-# else:
-# scan_index = len(self.scan_labels)
-# self.scan_labels.append(scan_label)
-# else:
-# # Nothing to do if it is an index
-# scan_index = scan_label
-
-# # Apply the rules for probe sharing
-# if self.single_probe:
-# probe_id = 0
-# elif self.shared_probe:
-# probe_id = scan_index // self.scan_per_probe
-# else:
-# probe_id = (scan_index * (self.npts // self.diff_per_probe)
-# + diff_index // self.diff_per_probe)
-
-# # Follow the format specified in Viewmanager
-# probe_id = STORAGE_PREFIX + '%02d' % probe_id
-
-# # ... and the rules for object sharing
-# if self.single_object:
-# object_id = 0
-# else:
-# object_id = scan_index // self.scan_per_object
-
-# # Follow the format specified in Viewmanager
-# object_id = STORAGE_PREFIX + '%02d' % object_id
-
-# logger.debug(
-# "Model assigned frame %d of scan %s to probe %s & object %s"
-# % (diff_index, str(scan_label), probe_id, object_id))
-
-# # Store sharing info
-# pl = self.probe_ids.get(probe_id, [])
-# if scan_label not in pl:
-# pl.append(scan_label)
-# self.probe_ids[probe_id] = pl
-
-# ol = self.object_ids.get(object_id, [])
-# if scan_label not in ol:
-# ol.append(scan_label)
-# self.object_ids[object_id] = ol
-
-# return probe_id, object_id
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index eb613cfcf..69c08462e 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -722,13 +722,6 @@ def load_run(cls, runfile, load_data=True):
P._configure()
- ### FIXME: removed sharing, to be reimplemented before 0.3
- ### release, perhaps as a feature of the Full scan model.
- # logger.info('Reconfiguring sharing rules') # and loading data')
- # print u.verbose.report(P.p)
- # P.modelm.sharing_rules = model.parse_model(P.modelm.p['sharing'],
- # P.modelm.sharing)
-
logger.info('Regenerating exit waves')
P.exit.reformat()
P.modelm._initialize_exit(P.pods.values())
From 932b1f3853150532377066898c5bc44550582213 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Fri, 10 Nov 2017 16:32:28 +0100
Subject: [PATCH 189/363] Sketched a 3d Bragg PtyScan for simulation
---
ptypy/experiment/Bragg3dSim.py | 192 +++++++++++++++++++++++++++++++++
ptypy/experiment/__init__.py | 1 +
tutorial/bragg3d_initial.py | 4 +-
3 files changed, 195 insertions(+), 2 deletions(-)
create mode 100644 ptypy/experiment/Bragg3dSim.py
diff --git a/ptypy/experiment/Bragg3dSim.py b/ptypy/experiment/Bragg3dSim.py
new file mode 100644
index 000000000..5c8a3d988
--- /dev/null
+++ b/ptypy/experiment/Bragg3dSim.py
@@ -0,0 +1,192 @@
+"""
+This module provides simulated 3D Bragg data.
+"""
+
+import ptypy
+from ptypy.core.data import PtyScan
+import ptypy.utils as u
+from ptypy.utils.descriptor import defaults_tree
+from ptypy.core import geometry_bragg
+
+import numpy as np
+import time
+
+logger = u.verbose.logger
+
+
+@defaults_tree.parse_doc('scandata.Bragg3dSim')
+class Bragg3dSimScan(PtyScan):
+ """
+ Provides simulated 3D Bragg data based on the numerical
+ experiment in Berenguer et al., PRB 88 (2013) 144101.
+
+ Defaults:
+
+ [shape]
+ # Godard: default = 1024
+ default = 256
+
+ [distance]
+ default = 2
+
+ [psize]
+ default = 13e-6
+
+ [energy]
+ default = 8.5
+
+ [probe_fwhm]
+ default = 1e-6
+ type = float
+ lowlim = 0.0
+ help = FWHM of the gaussian probe
+
+ [rocking_step]
+ # Godard: default = .01
+ default = .0025
+ type = float
+ help = Step size in the rocking curve in degrees
+
+ [rocking_steps]
+ # Godard: default = 9
+ default = 40
+ type = int
+ help = Number of rocking positions
+
+ [theta_bragg]
+ default = 22.32
+ type = float
+ help = Bragg angle in degrees
+
+ """
+
+ def __init__(self, pars=None, **kwargs):
+ self.p = self.DEFAULT.copy(99)
+ self.p.update(pars)
+ super(Bragg3dSimScan, self).__init__(self.p)
+
+ # do the simulation
+ self.calculate()
+
+ def calculate(self):
+ # Set up a 3D geometry and a scan
+ # -------------------------------
+
+ shape = tuple(u.expect2(self.p.shape))
+ psize = tuple(u.expect2(self.p.psize))
+ g = ptypy.core.geometry_bragg.Geo_Bragg(
+ psize=(self.p.rocking_step,) + psize,
+ shape=(self.p.rocking_steps,) + shape,
+ energy=self.p.energy,
+ distance=self.p.distance,
+ theta_bragg=self.p.theta_bragg)
+
+ # The Geo_Bragg object contains mostly the same things as Geo, but in
+ # three dimensions. The third element of the shape is the number of
+ # rocking curve positions, the third element of the psize denotes theta
+ # step in degrees.
+ print g
+
+ # Set up scan positions along y, perpendicular to the incoming beam and
+ # to the thin layer stripes.
+ Npos = 11
+ positions = np.zeros((Npos,3))
+ positions[:, 2] = np.arange(Npos) - Npos/2.0
+ positions *= .43e-6
+
+ # Set up the object and its views
+ # -------------------------------
+
+ # Create a container for the object array, which will represent the
+ # object in the non-orthogonal coordinate system conjugate to the
+ # q-space measurement frame.
+ C = ptypy.core.Container(data_type=np.complex128, data_dims=3)
+
+ # For each scan position in the orthogonal coordinate system, find the
+ # natural coordinates and create a View instance there.
+ views = []
+ for pos in positions:
+ pos_ = g._r3r1r2(pos)
+ views.append(ptypy.core.View(C, storageID='Sobj', psize=g.resolution, coord=pos_, shape=g.shape))
+ S = C.storages['Sobj']
+ C.reformat()
+
+ # Define the test sample based on the orthogonal position of each voxel.
+ # First, the cartesian grid is obtained from the geometry object, then
+ # this grid is used as a condition for the sample's magnitude.
+ xx, zz, yy = g.transformed_grid(S, input_space='real', input_system='natural')
+ S.fill(0.0)
+ S.data[(zz >= -90e-9) & (zz < 90e-9) & (yy + .3*zz >= 1e-6) & (yy - .3*zz< 2e-6) & (xx < 1e-6)] = 1
+ S.data[(zz >= -90e-9) & (zz < 90e-9) & (yy + .3*zz >= -2e-6) & (yy - .3*zz < -1e-6)] = 1
+ #import matplotlib.pyplot as plt
+ #plt.imshow(np.abs(S.data[0, S.data.shape[0]/2, :, :]), interpolation='none')
+ #plt.show()
+
+ # Set up the probe and calculate diffraction patterns
+ # ---------------------------------------------------
+
+ # First set up a two-dimensional representation of the probe, with
+ # arbitrary pixel spacing. The probe here is defined as a 1.5 um by 3 um
+ # flat square, but this container will typically come from a 2d
+ # transmission ptycho scan of an easy test object.
+ Cprobe = ptypy.core.Container(data_dims=2, data_type='float')
+ Sprobe = Cprobe.new_storage(psize=10e-9, shape=500)
+ zi, yi = Sprobe.grids()
+
+ # gaussian probe
+ sigma = self.p.probe_fwhm / 2.3548
+ Sprobe.data = np.exp(-zi**2 / (2 * sigma**2) - yi**2 / (2 * sigma**2))
+
+ # The Bragg geometry has a method to prepare a 3d Storage by extruding
+ # the 2d probe and interpolating to the right grid. The returned storage
+ # contains a single view compatible with the object views.
+ Sprobe_3d = g.prepare_3d_probe(Sprobe, system='natural')
+ probeView = Sprobe_3d.views[0]
+
+ # Calculate diffraction patterns by using the geometry's propagator.
+ diff = []
+ for v in views:
+ diff.append(np.abs(g.propagator.fw(v.data * probeView.data))**2)
+
+ # stack the 2d diffraction patterns and save
+ self.diff = []
+ for i in range(len(diff)):
+ for j in range(len(diff[i])):
+ self.diff.append(diff[i][j,:,:])
+
+ # convert the positions from (x, z, y) to (angle, x, z, y) and
+ # save, we need the angle and in future we won't know in which
+ # plane the scan was done (although here it is in xy).
+ # these xyz axis still follow Berenguer et al PRB 2013.
+ self.positions = np.empty((g.shape[0] * Npos, 4), dtype=float)
+ angles = (np.arange(g.shape[0]) - g.shape[0] / 2) * g.psize[0]
+ for i in range(Npos):
+ for j in range(g.shape[0]):
+ self.positions[i * g.shape[0] + j, 1:] = positions[i, :]
+ self.positions[i * g.shape[0] + j, 0] = angles[j]
+
+ def load_positions(self):
+ return self.positions
+
+ def load(self, indices):
+ raw, positions, weights = {}, {}, {}
+
+ # pick out the requested indices
+ for i in indices:
+ raw[i] = self.diff[i]
+
+ return raw, positions, weights
+
+ def load_weight(self):
+ return np.ones_like(self.diff[0])
+
+
+if __name__ == '__main__':
+ u.verbose.set_level(3)
+ ps = Bragg3dSimScan()
+ ps.initialize()
+ while True:
+ msg = ps.auto(23)
+ logger.info('Got %d images' % len(msg['iterable']))
+ if msg == ps.EOS:
+ break
diff --git a/ptypy/experiment/__init__.py b/ptypy/experiment/__init__.py
index c5ba47b5d..8c45cf474 100644
--- a/ptypy/experiment/__init__.py
+++ b/ptypy/experiment/__init__.py
@@ -30,5 +30,6 @@
from UCL import UCLLaserScan
from nanomax import NanomaxStepscanMay2017, NanomaxStepscanNov2016, NanomaxFlyscanJune2017
from ALS_5321 import ALS5321Scan
+from Bragg3dSim import Bragg3dSimScan
from ..core.data import MoonFlowerScan, PtydScan
diff --git a/tutorial/bragg3d_initial.py b/tutorial/bragg3d_initial.py
index a704ac6b4..5f480cd71 100644
--- a/tutorial/bragg3d_initial.py
+++ b/tutorial/bragg3d_initial.py
@@ -22,8 +22,8 @@
g = ptypy.core.geometry_bragg.Geo_Bragg(psize=(0.01/4, 13e-6, 13e-6), shape=(9*4, 128, 128), energy=8.5, distance=2.0, theta_bragg=22.32)
# The Geo_Bragg object contains mostly the same things as Geo, but in
-# three dimensions. The third element of the shape is the number of
-# rocking curve positions, the third element of the psize denotes theta
+# three dimensions. The first element of the shape is the number of
+# rocking curve positions, the first element of the psize denotes theta
# step in degrees.
print g
From f5ba8dbf1dd5425b482c075836a020ffdd105eae Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Sat, 11 Nov 2017 22:42:24 +0100
Subject: [PATCH 190/363] WIP: sketched Bragg ScanModel
---
ptypy/core/data.py | 4 +-
ptypy/core/manager.py | 171 +++++++++++++++++++++++++++++++-
ptypy/experiment/Bragg3dSim.py | 38 +++++--
templates/bragg_prep_and_run.py | 17 ++++
4 files changed, 214 insertions(+), 16 deletions(-)
create mode 100644 templates/bragg_prep_and_run.py
diff --git a/ptypy/core/data.py b/ptypy/core/data.py
index 052d5c6b5..ff3b277f5 100644
--- a/ptypy/core/data.py
+++ b/ptypy/core/data.py
@@ -884,9 +884,9 @@ def auto(self, frames):
-------
variable
one of the following
- - None, if scan's end is not reached,
+ - WAIT, if scan's end is not reached,
but no data could be prepared yet
- - False, if scan's end is reached
+ - EOS, if scan's end is reached
- a data package otherwise
"""
# attempt to get data:
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 549469e9b..f9c34605a 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -33,7 +33,7 @@
FType = np.float64
CType = np.complex128
-__all__ = ['ModelManager', 'ScanModel', 'Full', 'Vanilla']
+__all__ = ['ModelManager', 'ScanModel', 'Full', 'Vanilla', 'Bragg3dModel']
@defaults_tree.parse_doc('scan.ScanModel')
@@ -156,6 +156,9 @@ def makePtyScan(cls, pars):
return ps_instance
+ def _extra_analysis(self):
+ return True
+
def new_data(self):
"""
Feed data from ptyscan object.
@@ -215,6 +218,11 @@ def new_data(self):
for v in old_mask_views:
old_mask_layers.append(v.layer)
+ # this is a hack for now
+ dp = self._new_data_extra_analysis(dp)
+ if dp is None:
+ return None
+
# Prepare for View generation
AR_diff_base = DEFAULT_ACCESSRULE.copy()
AR_diff_base.shape = self.shape
@@ -484,13 +492,12 @@ def _create_pods(self):
for i in range(len(self.new_diff_views)):
dv, mv = self.new_diff_views.pop(0), self.new_mask_views.pop(0)
-
-
# Create views
+ ndim = self.Cdiff.ndim
pv = View(container=self.ptycho.probe,
accessrule={'shape': geometry.shape,
'psize': geometry.resolution,
- 'coord': u.expect2(0.0),
+ 'coord': u.expectN(0.0, ndim),
'storageID': ID,
'layer': 0,
'active': True})
@@ -506,7 +513,7 @@ def _create_pods(self):
ev = View(container=self.ptycho.exit,
accessrule={'shape': geometry.shape,
'psize': geometry.resolution,
- 'coord': u.expect2(0.0),
+ 'coord': u.expectN(0.0, ndim),
'storageID': ID,
'layer': dv.layer,
'active': dv.active})
@@ -1213,6 +1220,160 @@ def _initialize_object(self, object_ids):
s.model_initialized = True
+import geometry_bragg
+@defaults_tree.parse_doc('scan.Bragg3dModel')
+class Bragg3dModel(Vanilla):
+ """
+ Model for 3D Bragg ptycho data, where a set of rocking angles are
+ measured for each scanning position. The result is pods carrying
+ 3D diffraction patterns and 3D Views into a 3D object.
+
+ Inherits from Vanilla because _create_pods and the probe/object
+ initializations are identical.
+
+ Defaults:
+
+ [name]
+ default = Bragg3dModel
+ type = str
+ help =
+
+ [illumination.size]
+ default = None
+ type = float
+ help = Initial probe size
+ doc = The probe is initialized as a flat circle.
+
+ [sample.fill]
+ default = 1
+ type = float, complex
+ help = Initial sample value
+ doc = The sample is initialized with this value everywhere.
+ """
+
+ def __init__(self, ptycho=None, pars=None, label=None):
+ super(Bragg3dModel, self).__init__(ptycho, pars, label)
+ # This model holds on to incoming frames until a complete 3d
+ # diffraction pattern can be built for that position.
+ self.buffered_frames = {}
+ self.buffered_positions = []
+ #self.frames_per_call = 100 # just for testing
+
+ def _new_data_extra_analysis(self, dp):
+ """
+ The heavy override is new_data. I've inserted an extra method
+ for now, to not have to duplicate all the new_data code.
+
+ The PtyScans give 2d diff images at 4d (angle, x, z, y)
+ positions in the sample frame. These need to be assembled into
+ 3d (q3, q1, q2) at 3d positions. This means receiving images,
+ holding on to them, and only calling _create_pods once a
+ complete 3d diff View has been created.
+ """
+
+ print '*** not managing positions properly, two measurements at the same positions would break this. Make self.buffered_positions a dict or something.'
+ # go through and buffer the new 2d frames
+ for dct in dp['iterable']:
+ pos = dct['position'][1:]
+ try:
+ # index into the frame buffer where this frame belongs
+ pos
+ idx = np.where(np.prod(np.isclose(pos, self.buffered_positions), axis=1))[0][0]
+ except:
+ # this position hasn't been encountered before, so create a buffer entry
+ idx = len(self.buffered_positions)
+ print 'Creating frame buffer entry %d for frame %d' % (idx, dct['index'])
+ self.buffered_positions.append(pos)
+ self.buffered_frames[idx] = {
+ 'position': pos,
+ 'frames': [],
+ 'masks': [],
+ 'angles': [],
+ }
+
+ # buffer the frame, mask, and angle
+ self.buffered_frames[idx]['frames'].append(dct['data'])
+ self.buffered_frames[idx]['masks'].append(dct['mask'])
+ self.buffered_frames[idx]['angles'].append(dct['position'][0])
+
+ # go through the buffer to see if any positions have all their
+ # 2d frames, and create a new dp-compatible structure with
+ # complete positions.
+ dp_new = {'iterable': []}
+ for idx, dct in self.buffered_frames.iteritems():
+ if len(dct['angles']) == self.geometries[0].shape[0]:
+ # this one is ready to go
+ print idx, 'ready'
+
+ # first sort the frames, in increasing order for now
+ order = [i[0] for i in sorted(enumerate(dct['angles']), key=lambda x:x[1])]
+ dct['frames'] = [dct['frames'][i] for i in order]
+ dct['masks'] = [dct['masks'][i] for i in order]
+
+ # then assemble the data and masks
+ dp_new['iterable'].append({
+ 'index': idx,
+ 'position': dct['position'],
+ 'data': np.array(dct['frames'], dtype=self.ptycho.FType),
+ 'mask': np.array(dct['masks'], dtype=bool),
+ })
+ else:
+ print idx, 'not ready, have', len(dct['angles']), 'frames'
+
+ # delete complete entries from the buffer
+ for dct in dp_new['iterable']:
+ del self.buffered_frames[dct['index']]
+
+ # continue to pod creation if there is data for it
+ if len(dp_new['iterable']):
+ return dp_new
+ else:
+ return None
+
+ def _initialize_containers(self):
+ """
+ Override to get 3D containers.
+ """
+ self.ptycho.probe = Container(ptycho=self.ptycho, ID='Cprobe', data_type='complex', data_dims=3)
+ self.ptycho.obj = Container(ptycho=self.ptycho, ID='Cobj', data_type='complex', data_dims=3)
+ self.ptycho.exit = Container(ptycho=self.ptycho, ID='Cexit', data_type='complex', data_dims=3)
+ self.ptycho.diff = Container(ptycho=self.ptycho, ID='Cdiff', data_type='real', data_dims=3)
+ self.ptycho.mask = Container(ptycho=self.ptycho, ID='Cmask', data_type='bool', data_dims=3)
+ self.Cdiff = self.ptycho.diff
+ self.Cmask = self.ptycho.mask
+ self.containers_initialized = True
+
+ def _initialize_geo(self, common):
+ """
+ Initialize the geometry based on parameters from a PtyScan.auto
+ data package. Now psize and shape change meanings: from referring
+ to raw data frames, they now refer to 3-dimensional diffraction
+ patterns as specified by Geo_Bragg.
+ """
+
+ # Collect and assemble geometric parameters
+ get_keys = ['distance', 'center', 'energy']
+ geo_pars = u.Param({key: common[key] for key in get_keys})
+ geo_pars.propagation = self.p.propagation
+ # take extra Bragg information into account
+ psize = tuple(common['psize'])
+ geo_pars.psize = (self.ptyscan.common.rocking_step,) + psize
+ sh = tuple(common['shape'])
+ geo_pars.shape = (self.ptyscan.common.n_rocking_positions,) + sh
+ geo_pars.theta_bragg = self.ptyscan.common.theta_bragg
+
+ # make a Geo instance and fix resolution
+ g = geometry_bragg.Geo_Bragg(owner=self.ptycho, pars=geo_pars)
+ g.p.resolution_is_fix = True
+
+ # save the geometry
+ self.geometries = [g]
+
+ # Store frame shape
+ self.shape = g.shape
+ self.psize = g.psize
+
+
class ModelManager(object):
"""
Thin wrapper class which now just interfaces Ptycho with ScanModel.
diff --git a/ptypy/experiment/Bragg3dSim.py b/ptypy/experiment/Bragg3dSim.py
index 5c8a3d988..2afd2939f 100644
--- a/ptypy/experiment/Bragg3dSim.py
+++ b/ptypy/experiment/Bragg3dSim.py
@@ -14,7 +14,7 @@
logger = u.verbose.logger
-@defaults_tree.parse_doc('scandata.Bragg3dSim')
+@defaults_tree.parse_doc('scandata.Bragg3dSimScan')
class Bragg3dSimScan(PtyScan):
"""
Provides simulated 3D Bragg data based on the numerical
@@ -22,6 +22,11 @@ class Bragg3dSimScan(PtyScan):
Defaults:
+ [name]
+ default = Bragg3dSimScan
+ type = str
+ help = PtyScan subclass identifier
+
[shape]
# Godard: default = 1024
default = 256
@@ -47,7 +52,7 @@ class Bragg3dSimScan(PtyScan):
type = float
help = Step size in the rocking curve in degrees
- [rocking_steps]
+ [n_rocking_positions]
# Godard: default = 9
default = 40
type = int
@@ -63,12 +68,13 @@ class Bragg3dSimScan(PtyScan):
def __init__(self, pars=None, **kwargs):
self.p = self.DEFAULT.copy(99)
self.p.update(pars)
+ self.p.update(kwargs)
super(Bragg3dSimScan, self).__init__(self.p)
-
+
# do the simulation
- self.calculate()
+ self.simulate()
- def calculate(self):
+ def simulate(self):
# Set up a 3D geometry and a scan
# -------------------------------
@@ -76,7 +82,7 @@ def calculate(self):
psize = tuple(u.expect2(self.p.psize))
g = ptypy.core.geometry_bragg.Geo_Bragg(
psize=(self.p.rocking_step,) + psize,
- shape=(self.p.rocking_steps,) + shape,
+ shape=(self.p.n_rocking_positions,) + shape,
energy=self.p.energy,
distance=self.p.distance,
theta_bragg=self.p.theta_bragg)
@@ -85,7 +91,8 @@ def calculate(self):
# three dimensions. The third element of the shape is the number of
# rocking curve positions, the third element of the psize denotes theta
# step in degrees.
- print g
+ logger.info('Data will be simulated with these geometric parameters:')
+ logger.info(g)
# Set up scan positions along y, perpendicular to the incoming beam and
# to the thin layer stripes.
@@ -131,6 +138,7 @@ def calculate(self):
# transmission ptycho scan of an easy test object.
Cprobe = ptypy.core.Container(data_dims=2, data_type='float')
Sprobe = Cprobe.new_storage(psize=10e-9, shape=500)
+ print 'WARNING! fix simulation probe extent'
zi, yi = Sprobe.grids()
# gaussian probe
@@ -159,12 +167,24 @@ def calculate(self):
# plane the scan was done (although here it is in xy).
# these xyz axis still follow Berenguer et al PRB 2013.
self.positions = np.empty((g.shape[0] * Npos, 4), dtype=float)
- angles = (np.arange(g.shape[0]) - g.shape[0] / 2) * g.psize[0]
+ angles = (np.arange(g.shape[0]) - g.shape[0] / 2.0 + 1.0/2) * g.psize[0]
for i in range(Npos):
for j in range(g.shape[0]):
self.positions[i * g.shape[0] + j, 1:] = positions[i, :]
self.positions[i * g.shape[0] + j, 0] = angles[j]
+ def load_common(self):
+ """
+ We have to communicate the number of rocking positions that the
+ model should expect, otherwise it never knows when there is data
+ for a complete POD.
+ """
+ return {
+ 'rocking_step': self.p.rocking_step,
+ 'n_rocking_positions': self.p.n_rocking_positions,
+ 'theta_bragg': self.p.theta_bragg,
+ }
+
def load_positions(self):
return self.positions
@@ -187,6 +207,6 @@ def load_weight(self):
ps.initialize()
while True:
msg = ps.auto(23)
- logger.info('Got %d images' % len(msg['iterable']))
if msg == ps.EOS:
break
+ logger.info('Got %d images' % len(msg['iterable']))
diff --git a/templates/bragg_prep_and_run.py b/templates/bragg_prep_and_run.py
new file mode 100644
index 000000000..6d5434f85
--- /dev/null
+++ b/templates/bragg_prep_and_run.py
@@ -0,0 +1,17 @@
+from ptypy.core import Ptycho
+from ptypy import utils as u
+
+p = u.Param()
+
+# for verbose output
+p.verbose_level = 3
+
+# max 100 frames (128x128px) of diffraction data
+p.scans = u.Param()
+p.scans.scan01 = u.Param()
+p.scans.scan01.name = 'Bragg3dModel'
+p.scans.scan01.data= u.Param()
+p.scans.scan01.data.name = 'Bragg3dSimScan'
+
+# prepare and run
+P = Ptycho(p,level=5)
From c52855ff774f57cd6bea716218ea294a82979257 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 13 Nov 2017 09:06:08 +0100
Subject: [PATCH 191/363] Fixed Ptycho's logging parameter descriptor
---
ptypy/core/ptycho.py | 13 +++++++------
1 file changed, 7 insertions(+), 6 deletions(-)
diff --git a/ptypy/core/ptycho.py b/ptypy/core/ptycho.py
index 69c08462e..b0b8b7345 100644
--- a/ptypy/core/ptycho.py
+++ b/ptypy/core/ptycho.py
@@ -70,15 +70,16 @@ class Ptycho(Base):
default = 1
help = Verbosity level
doc = Verbosity level for information logging.
- - ``0``: Only errors
- - ``1``: Warning
- - ``2``: Process Information
- - ``3``: Object Information
- - ``4``: Debug
+ - ``0``: Only critical errors
+ - ``1``: All errors
+ - ``2``: Warning
+ - ``3``: Process Information
+ - ``4``: Object Information
+ - ``5``: Debug
type = int
userlevel = 0
lowlim = 0
- uplim = 4
+ uplim = 5
[data_type]
default = 'single'
From 224643270259966c4feb79a103730ff29cbce49b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 13 Nov 2017 10:06:33 +0100
Subject: [PATCH 192/363] Clarifications and unit test for the Bragg scan model
---
ptypy/core/manager.py | 28 +++++++----
ptypy/experiment/Bragg3dSim.py | 32 +++++++++++++
ptypy/test/core_tests/bragg_scanmodel_test.py | 47 +++++++++++++++++++
3 files changed, 99 insertions(+), 8 deletions(-)
create mode 100644 ptypy/test/core_tests/bragg_scanmodel_test.py
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index f9c34605a..47f671c09 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -1231,6 +1231,10 @@ class Bragg3dModel(Vanilla):
Inherits from Vanilla because _create_pods and the probe/object
initializations are identical.
+ Frames for each position are assembled according to the actual
+ xyz data, so it will not work if two acquisitions are done at the
+ same position.
+
Defaults:
[name]
@@ -1257,32 +1261,37 @@ def __init__(self, ptycho=None, pars=None, label=None):
# diffraction pattern can be built for that position.
self.buffered_frames = {}
self.buffered_positions = []
- #self.frames_per_call = 100 # just for testing
+ #self.frames_per_call = 216 # just for testing
def _new_data_extra_analysis(self, dp):
"""
The heavy override is new_data. I've inserted an extra method
- for now, to not have to duplicate all the new_data code.
+ for now, so as not to duplicate all the new_data code.
The PtyScans give 2d diff images at 4d (angle, x, z, y)
positions in the sample frame. These need to be assembled into
3d (q3, q1, q2) at 3d positions. This means receiving images,
holding on to them, and only calling _create_pods once a
complete 3d diff View has been created.
+
+ The xyz axes are those specified in Geo_Bragg, and the angle
+ parameter defined such that a more positive angle corresponds to
+ a more positive q3. That is, it is the angle between the xy
+ plane of the sample with respect to the incident beam.
"""
- print '*** not managing positions properly, two measurements at the same positions would break this. Make self.buffered_positions a dict or something.'
# go through and buffer the new 2d frames
for dct in dp['iterable']:
pos = dct['position'][1:]
try:
# index into the frame buffer where this frame belongs
- pos
idx = np.where(np.prod(np.isclose(pos, self.buffered_positions), axis=1))[0][0]
+ logger.debug('Frame %d belongs in frame buffer %d'
+ % (dct['index'], idx))
except:
# this position hasn't been encountered before, so create a buffer entry
idx = len(self.buffered_positions)
- print 'Creating frame buffer entry %d for frame %d' % (idx, dct['index'])
+ logger.debug('Frame %d doesn\'t belong in an existing frame buffer, creating buffer %d' % (dct['index'], idx))
self.buffered_positions.append(pos)
self.buffered_frames[idx] = {
'position': pos,
@@ -1303,9 +1312,9 @@ def _new_data_extra_analysis(self, dp):
for idx, dct in self.buffered_frames.iteritems():
if len(dct['angles']) == self.geometries[0].shape[0]:
# this one is ready to go
- print idx, 'ready'
+ logger.debug('3d diffraction data for position %d ready, will create POD' % idx)
- # first sort the frames, in increasing order for now
+ # first sort the frames in increasing angle (increasing q3) order
order = [i[0] for i in sorted(enumerate(dct['angles']), key=lambda x:x[1])]
dct['frames'] = [dct['frames'][i] for i in order]
dct['masks'] = [dct['masks'][i] for i in order]
@@ -1318,7 +1327,8 @@ def _new_data_extra_analysis(self, dp):
'mask': np.array(dct['masks'], dtype=bool),
})
else:
- print idx, 'not ready, have', len(dct['angles']), 'frames'
+ logger.debug('3d diffraction data for position %d isn\'t ready, have %d out of %d frames'
+ % (idx, len(dct['angles']), self.geometries[0].shape[0]))
# delete complete entries from the buffer
for dct in dp_new['iterable']:
@@ -1326,6 +1336,8 @@ def _new_data_extra_analysis(self, dp):
# continue to pod creation if there is data for it
if len(dp_new['iterable']):
+ logger.debug('Will continue with POD creation for %d complete positions'
+ % len(dp_new['iterable']))
return dp_new
else:
return None
diff --git a/ptypy/experiment/Bragg3dSim.py b/ptypy/experiment/Bragg3dSim.py
index 2afd2939f..414f10c73 100644
--- a/ptypy/experiment/Bragg3dSim.py
+++ b/ptypy/experiment/Bragg3dSim.py
@@ -63,6 +63,17 @@ class Bragg3dSimScan(PtyScan):
type = float
help = Bragg angle in degrees
+ [shuffle]
+ default = False
+ type = bool
+ help = Shuffles all diffraction patterns
+ doc = Mainly to test that they are still assembled correctly.
+
+ [dump]
+ default = None
+ type = str
+ help = Dump raw simulated 3d diffraction data to npz file
+
"""
def __init__(self, pars=None, **kwargs):
@@ -156,6 +167,10 @@ def simulate(self):
for v in views:
diff.append(np.abs(g.propagator.fw(v.data * probeView.data))**2)
+ # dump the 3d arrays for testing
+ if self.p.dump is not None:
+ np.savez(self.p.dump, **{'diff%02d'%i : diff[i] for i in range(len(diff))})
+
# stack the 2d diffraction patterns and save
self.diff = []
for i in range(len(diff)):
@@ -173,6 +188,17 @@ def simulate(self):
self.positions[i * g.shape[0] + j, 1:] = positions[i, :]
self.positions[i * g.shape[0] + j, 0] = angles[j]
+ # shuffle everything as a test
+ if self.p.shuffle:
+ order = range(len(self.diff))
+ from random import shuffle
+ shuffle(order)
+ self.diff = [self.diff[i] for i in order]
+ new_pos = np.empty_like(self.positions)
+ for i in range(len(new_pos)):
+ new_pos[i] = self.positions[order[i]]
+ self.positions = new_pos
+
def load_common(self):
"""
We have to communicate the number of rocking positions that the
@@ -186,6 +212,12 @@ def load_common(self):
}
def load_positions(self):
+ """
+ For the 3d Bragg model, load_positions returns N-by-4 positions,
+ (angle, x, z, y). The angle can be relative or absolute, the
+ model doesn't care, but it does have to be uniformly spaced for
+ the analysis to make any sense.
+ """
return self.positions
def load(self, indices):
diff --git a/ptypy/test/core_tests/bragg_scanmodel_test.py b/ptypy/test/core_tests/bragg_scanmodel_test.py
new file mode 100644
index 000000000..fd9f351ed
--- /dev/null
+++ b/ptypy/test/core_tests/bragg_scanmodel_test.py
@@ -0,0 +1,47 @@
+"""
+Tests that the assembly of frames into 3d pods gives the original
+3d diffraction patterns.
+"""
+
+import unittest
+from ptypy.core import Ptycho
+from ptypy import utils as u
+import numpy as np
+
+class Bragg3dModelTest(unittest.TestCase):
+ def test_frame_assembly(self):
+
+ # parameter tree
+ p = u.Param()
+ p.scans = u.Param()
+ p.scans.scan01 = u.Param()
+ p.scans.scan01.name = 'Bragg3dModel'
+ p.scans.scan01.data= u.Param()
+ p.scans.scan01.data.name = 'Bragg3dSimScan'
+ p.scans.scan01.data.dump = '/tmp/tmp.npz'
+ p.scans.scan01.data.shuffle = True
+
+ # simulate and then load data
+ P = Ptycho(p,level=2)
+
+ # load raw simulation data
+ diff = np.load('/tmp/tmp.npz')
+
+ # check that the pods reflect the raw data
+ assert len(diff.keys()) == len(P.pods)
+ checked_pods = []
+ for i in range(len(diff.keys())):
+ diff_raw = diff['diff%02d'%i]
+ ok = False
+ for j in range(len(P.pods)):
+ if j in checked_pods:
+ continue
+ diff_pod = P.pods['P%04d'%j].diff
+ if np.allclose(diff_raw, diff_pod):
+ checked_pods.append(j)
+ ok = True
+ break
+ assert ok
+
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
From 9479097192f37b9de06bf49cc49f71d3807b9ce3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 13 Nov 2017 11:05:33 +0100
Subject: [PATCH 193/363] Updated classes test to reflect updated Container
---
ptypy/test/core_tests/test_classes.py | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/ptypy/test/core_tests/test_classes.py b/ptypy/test/core_tests/test_classes.py
index 370214980..befee52ab 100644
--- a/ptypy/test/core_tests/test_classes.py
+++ b/ptypy/test/core_tests/test_classes.py
@@ -55,7 +55,10 @@ def test_default_parameters(self):
self.assertEqual(c.DEFAULT_PSIZE, 1.,
'Default value changed.')
- self.assertEqual(c.DEFAULT_SHAPE, (1, 1, 1),
+ self.assertEqual(c.DEFAULT_SHAPE[2], (1, 1, 1),
+ 'Default value changed.')
+
+ self.assertEqual(c.DEFAULT_SHAPE[3], (1, 1, 1, 1),
'Default value changed.')
self.assertEqual(c.DEFAULT_ACCESSRULE.storageID, None,
@@ -806,13 +809,13 @@ def test_init(self):
self.assertEqual(
self.basic_storage_dpt.shape,
- c.DEFAULT_SHAPE,
+ c.DEFAULT_SHAPE[self.basic_container_dpt.ndim],
'Assigning of instance attribute shape failed.'
)
self.assertEqual(
self.basic_storage_dpt.data,
- np.empty(c.DEFAULT_SHAPE, np.complex128),
+ np.empty(c.DEFAULT_SHAPE[self.basic_container_dpt.ndim], np.complex128),
'Assigning and filling of instance attribute data failed.'
)
From 203767b0958a7cd08839ab600fab903f79b98853 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 13 Nov 2017 14:47:17 +0100
Subject: [PATCH 194/363] Container.copy() didnt understand about data dims
---
ptypy/core/classes.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/ptypy/core/classes.py b/ptypy/core/classes.py
index 8d86e4217..8c8e54fd6 100644
--- a/ptypy/core/classes.py
+++ b/ptypy/core/classes.py
@@ -1664,7 +1664,8 @@ def copy(self, ID=None, fill=None, dtype=None):
data_type = self.data_type if dtype is None else dtype
new_cont = type(self)(ptycho=self.owner,
ID=ID,
- data_type=data_type)
+ data_type=data_type,
+ data_dims=self.ndim)
new_cont.original = self
# If changing data type, avoid casting by producing empty buffers
From c65ee175ac0549b3f18d05797f2ca26943f4668a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 13 Nov 2017 16:07:34 +0100
Subject: [PATCH 195/363] Initializing 3d probe in scan model
---
ptypy/core/manager.py | 34 +++++++++++++++++++++++++++++++---
1 file changed, 31 insertions(+), 3 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 47f671c09..c2eccfe39 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -1228,8 +1228,8 @@ class Bragg3dModel(Vanilla):
measured for each scanning position. The result is pods carrying
3D diffraction patterns and 3D Views into a 3D object.
- Inherits from Vanilla because _create_pods and the probe/object
- initializations are identical.
+ Inherits from Vanilla because _create_pods and the object init
+ is identical.
Frames for each position are assembled according to the actual
xyz data, so it will not work if two acquisitions are done at the
@@ -1243,7 +1243,7 @@ class Bragg3dModel(Vanilla):
help =
[illumination.size]
- default = None
+ default = 1e-6
type = float
help = Initial probe size
doc = The probe is initialized as a flat circle.
@@ -1385,6 +1385,34 @@ def _initialize_geo(self, common):
self.shape = g.shape
self.psize = g.psize
+ def _initialize_probe(self, probe_ids):
+ """
+ Initialize the probe storage referred to by probe_ids.keys()[0]
+ """
+ logger.info('\n'+headerline('Probe initialization', 'l'))
+
+ # pick storage from container, there's only one probe
+ pid = probe_ids.keys()[0]
+ s = self.ptycho.probe.S.get(pid)
+ logger.info('Initializing probe storage %s' % pid)
+
+ # create an oversampled probe perpendicular to its incoming
+ # direction, using the illumination module as a utility.
+ logger.info('Initializing as circle of size ' + str(self.p.illumination.size))
+ Cprobe = Container(data_dims=2, data_type='float')
+ geo = self.geometries[0]
+ psize = min(geo.resolution) * .1
+ extent = int(np.ceil(self.p.illumination.size / psize))
+ Sprobe = Cprobe.new_storage(psize=10e-9, shape=extent)
+ illu_pars = u.Param({'aperture':
+ {'form': 'circ', 'size': self.p.illumination.size}})
+ illumination.init_storage(Sprobe, illu_pars)
+
+ # Extrude the incoming probe in the right direction and frame
+ s.data[:] = geo.prepare_3d_probe(Sprobe, system='natural').data
+
+ s.model_initialized = True
+
class ModelManager(object):
"""
From f5ec8b236b580a53df74d5d39ff8f4f2637b72c3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Mon, 13 Nov 2017 16:17:58 +0100
Subject: [PATCH 196/363] Base for 3d Bragg engines
---
ptypy/engines/base.py | 32 +++++++++++++++++++++++++++++++-
1 file changed, 31 insertions(+), 1 deletion(-)
diff --git a/ptypy/engines/base.py b/ptypy/engines/base.py
index 4f572d057..1b3a662de 100644
--- a/ptypy/engines/base.py
+++ b/ptypy/engines/base.py
@@ -15,7 +15,7 @@
from ..utils.verbose import logger, headerline
from ..utils.descriptor import defaults_tree
-__all__ = ['BaseEngine', 'DEFAULT_iter_info']
+__all__ = ['BaseEngine', 'Base3dBraggEngine', 'DEFAULT_iter_info']
DEFAULT_iter_info = u.Param(
iteration=0,
@@ -280,3 +280,33 @@ def engine_finalize(self):
self.finalize()
"""
raise NotImplementedError()
+
+
+class Base3dBraggEngine(BaseEngine):
+ """
+ 3d Bragg engines need a slightly different prepare() method, because
+ a 2d probe support makes no sense (at least not yet...)
+
+ Defaults:
+
+ [probe_support]
+ default = None
+ """
+
+ def prepare(self):
+ """
+ Last-minute preparation before iterating.
+ """
+ self.finished = False
+ # Simple 2d probe support isn't applicable to the 3d case.
+ supp = self.p.probe_support
+ if supp is not None:
+ raise NotImplementedError
+
+ # Make sure all the pods are supported
+ for label_, pod_ in self.pods.iteritems():
+ if not pod_.model.__class__ in self.SUPPORTED_MODELS:
+ raise Exception('Model %s not supported by engine' % pod_.model.__class__)
+
+ # Call engine specific preparation
+ self.engine_prepare()
From 851fe96c395a39cc5a8eadb1e47b712895df7f04 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 14 Nov 2017 14:47:24 +0100
Subject: [PATCH 197/363] Added a template for visualizing Bragg field of view
---
ptypy/experiment/Bragg3dSim.py | 15 +++++-
templates/bragg_display_field_of_view.py | 66 ++++++++++++++++++++++++
2 files changed, 79 insertions(+), 2 deletions(-)
create mode 100644 templates/bragg_display_field_of_view.py
diff --git a/ptypy/experiment/Bragg3dSim.py b/ptypy/experiment/Bragg3dSim.py
index 414f10c73..ad61f03cb 100644
--- a/ptypy/experiment/Bragg3dSim.py
+++ b/ptypy/experiment/Bragg3dSim.py
@@ -74,6 +74,12 @@ class Bragg3dSimScan(PtyScan):
type = str
help = Dump raw simulated 3d diffraction data to npz file
+ [dry_run]
+ default = False
+ type = bool
+ help = Don't calculate diffraction patterns
+ doc = Skips the heavy FFT and just returns empty diff patterns.
+
"""
def __init__(self, pars=None, **kwargs):
@@ -136,6 +142,7 @@ def simulate(self):
S.fill(0.0)
S.data[(zz >= -90e-9) & (zz < 90e-9) & (yy + .3*zz >= 1e-6) & (yy - .3*zz< 2e-6) & (xx < 1e-6)] = 1
S.data[(zz >= -90e-9) & (zz < 90e-9) & (yy + .3*zz >= -2e-6) & (yy - .3*zz < -1e-6)] = 1
+ self.simulated_object = S
#import matplotlib.pyplot as plt
#plt.imshow(np.abs(S.data[0, S.data.shape[0]/2, :, :]), interpolation='none')
#plt.show()
@@ -164,8 +171,12 @@ def simulate(self):
# Calculate diffraction patterns by using the geometry's propagator.
diff = []
- for v in views:
- diff.append(np.abs(g.propagator.fw(v.data * probeView.data))**2)
+ if self.p.dry_run:
+ for v in views:
+ diff.append(np.zeros(probeView.shape))
+ else:
+ for v in views:
+ diff.append(np.abs(g.propagator.fw(v.data * probeView.data))**2)
# dump the 3d arrays for testing
if self.p.dump is not None:
diff --git a/templates/bragg_display_field_of_view.py b/templates/bragg_display_field_of_view.py
new file mode 100644
index 000000000..b166bd422
--- /dev/null
+++ b/templates/bragg_display_field_of_view.py
@@ -0,0 +1,66 @@
+from ptypy.core import Ptycho
+from ptypy import utils as u
+import matplotlib.pyplot as plt
+import numpy as np
+
+# Set up a parameter tree
+p = u.Param()
+
+p.verbose_level = 3
+
+# these parameters determine the whole geometry (rocking steps, theta, energy, ...)
+p.scans = u.Param()
+p.scans.scan01 = u.Param()
+p.scans.scan01.name = 'Bragg3dModel' # 3d Bragg
+p.scans.scan01.data= u.Param()
+p.scans.scan01.data.name = 'Bragg3dSimScan' # PtyScan which provides simulated data
+p.scans.scan01.data.theta_bragg = 20.0 # the central Bragg angle
+p.scans.scan01.data.shape = 512
+p.scans.scan01.data.psize = 40e-6
+p.scans.scan01.data.n_rocking_positions = 40 # 40 rocking positions per scanning position
+p.scans.scan01.data.dry_run = True # Don't actually calculate diff patterns
+
+# Create a Ptycho instance, this creates a numerical sample and simulates
+# the diffraction experiment
+P = Ptycho(p,level=2)
+
+# This particular PtyScan also exports the object used for simulation as an attribute
+S_true = P.modelm.scans['scan01'].ptyscan.simulated_object
+
+# We can grab the object storage from the Ptycho instance
+S = P.obj.storages.values()[0]
+
+# Similarly, we can find a view of the probe
+probeView = P.probe.views.values()[0]
+
+# Let's define an object view to study
+objView = S.views[1]
+
+# In order to visualize the field of view, we'll create an empty copy of
+# the object and set its value to 1 where covered by the chosen view.
+S_display = S.copy(owner=S.owner, ID='Sdisplay')
+S_display.fill(0.0)
+S_display[objView] = 1
+
+# Then, to see how the probe is contained by this field of view, we add
+# the probe and the numerical sample itself to the above view.
+S_display[objView] += probeView.data
+S_display.data += S_true.data
+
+# Until now, we've been operating in the non-orthogonal 'natural'
+# coordinate system, which is good but hard to understand. We can
+# convert to orthogonal (z, x, y) space by using a method on the
+# geometry object, found from any of the pods.
+geo = P.pods.values()[0].geometry
+S_display_cart = geo.coordinate_shift(S_display, input_system='natural', input_space='real', keep_dims=True)
+
+# Plot some slices
+fig, ax = plt.subplots(nrows=1, ncols=3)
+x, z, y = S_display_cart.grids()
+ax[0].imshow(np.abs(S_display_cart.data[0][:,:,objView.dcoord[2]]).T, extent=[x.min(), x.max(), z.min(), z.max()], interpolation='none', origin='lower', vmin=0, vmax=3)
+plt.setp(ax[0], ylabel='z', xlabel='x', title='side view')
+ax[1].imshow(np.abs(S_display_cart.data[0][:,objView.dcoord[1],:]).T, extent=[x.min(), x.max(), y.min(), y.max()], interpolation='none', origin='lower', vmin=0, vmax=3)
+plt.setp(ax[1], ylabel='y', xlabel='x', title='top view')
+ax[2].imshow(np.abs(S_display_cart.data[0][objView.dcoord[0],:,:]), extent=[y.min(), y.max(), z.min(), z.max()], interpolation='none', origin='lower', vmin=0, vmax=3)
+plt.setp(ax[2], ylabel='z', xlabel='y', title='front view')
+plt.show()
From f6ddbb8e8dc391e7486582e4b48e436ba44acf85 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Tue, 14 Nov 2017 14:50:04 +0100
Subject: [PATCH 198/363] Full illumination init in Bragg3dSimScan and
Bragg3dModel
---
ptypy/core/manager.py | 181 +++++++++++++++++++++++++++++++--
ptypy/experiment/Bragg3dSim.py | 176 ++++++++++++++++++++++++++++++--
2 files changed, 343 insertions(+), 14 deletions(-)
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index c2eccfe39..c54adaf6e 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -1242,11 +1242,176 @@ class Bragg3dModel(Vanilla):
type = str
help =
- [illumination.size]
+ [illumination.aperture]
+ type = Param
+ default =
+ help = Beam aperture parameters
+
+ [illumination.aperture.rotate]
+ type = float
+ default = 0.
+ help = Rotate aperture by this value
+ doc =
+
+ [illumination.aperture.central_stop]
+ help = size of central stop as a fraction of aperture.size
+ default = None
+ doc = If not None: places a central beam stop in aperture. The value given here is the fraction of the beam stop compared to `size`
+ lowlim = 0.
+ uplim = 1.
+ userlevel = 1
+ type = float
+
+ [illumination.aperture.diffuser]
+ help = Noise in the transparen part of the aperture
+ default = None
+ doc = Can be either:
+ - ``None`` : no noise
+ - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
+ - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
+ userlevel = 2
+ type = tuple
+
+ [illumination.aperture.edge]
+ help = Edge width of aperture (in pixels!)
+ type = float
+ default = 2.0
+ userlevel = 2
+
+ [illumination.aperture.form]
+ default = circ
+ type = None, str
+ help = One of None, 'rect' or 'circ'
+ doc = One of:
+ - ``None`` : no aperture, this may be useful for nearfield
+ - ``'rect'`` : rectangular aperture
+ - ``'circ'`` : circular aperture
+ choices = None,'rect','circ'
+ userlevel = 2
+
+ [illumination.aperture.offset]
+ default = 0.
+ type = float, tuple
+ help = Offset between center of aperture and optical axes
+ doc = May also be a tuple (vertical,horizontal) for size in case of an asymmetric offset
+ userlevel = 2
+
+ [illumination.aperture.size]
default = 1e-6
type = float
- help = Initial probe size
- doc = The probe is initialized as a flat circle.
+ help = Aperture width or diameter
+ doc = May also be a tuple *(vertical,horizontal)* in case of an asymmetric aperture
+ lowlim = 0.
+ userlevel = 0
+
+ [illumination.diversity]
+ default = None
+ type = Param, None
+ help = Probe mode(s) diversity parameters
+ doc = Can be ``None`` i.e. no diversity
+ userlevel = 1
+
+ [illumination.diversity.noise]
+ default = None
+ type = tuple
+ help = Noise in the generated modes of the illumination
+ doc = Can be either:
+ - ``None`` : no noise
+ - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
+ - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
+ userlevel = 1
+
+ [illumination.diversity.power]
+ default = 0.1
+ type = tuple, float
+ help = Power of modes relative to main mode (zero-layer)
+ uplim = 1.0
+ lowlim = 0.0
+ userlevel = 1
+
+ [illumination.diversity.shift]
+ default = None
+ type = float
+ help = Lateral shift of modes relative to main mode
+ doc = **[not implemented]**
+ userlevel = 2
+
+ [illumination.model]
+ default = None
+ type = str
+ help = Type of illumination model
+ doc = One of:
+ - ``None`` : model initialitziation defaults to flat array filled with the specified number of photons
+ - ``'recon'`` : load model from previous reconstruction, see `recon` Parameters
+ - ``'stxm'`` : Estimate model from autocorrelation of mean diffraction data
+ - ** : one of ptypys internal image resource strings
+ - ** : one of the templates inillumination module
+
+ In script, you may pass a numpy.ndarray here directly as the model. It is considered as incoming wavefront and will be propagated according to `propagation` with an optional `aperture` applied before.
+ userlevel = 0
+
+ [illumination.photons]
+ type = int, None
+ default = None
+ help = Number of photons in the incident illumination
+ doc = A value specified here will take precedence over calculated statistics from the loaded data.
+ lowlim = 0
+ userlevel = 2
+
+ [illumination.propagation]
+ type = Param
+ default =
+ help = Parameters for propagation after aperture plane
+ doc = Propagation to focus takes precedence to parallel propagation if `foccused` is not ``None``
+
+ [illumination.propagation.antialiasing]
+ default = 1
+ type = float
+ help = Antialiasing factor
+ doc = Antialiasing factor used when generating the probe. (numbers larger than 2 or 3 are memory hungry)
+ **[Untested]**
+ userlevel = 2
+
+ [illumination.propagation.focussed]
+ default = None
+ type = None, float
+ lowlim =
+ help = Propagation distance from aperture to focus
+ doc = If ``None`` or ``0`` : No focus propagation
+ userlevel = 0
+
+ [illumination.propagation.parallel]
+ default = None
+ type = None, float
+ help = Parallel propagation distance
+ doc = If ``None`` or ``0`` : No parallel propagation
+ userlevel = 0
+
+ [illumination.propagation.spot_size]
+ default = None
+ type = None, float
+ help = Focal spot diameter
+ doc = If not ``None``, this parameter is used to generate the appropriate aperture size instead of :py:data:`size`
+ lowlim = 0
+ userlevel = 1
+
+ [illumination.recon]
+ default =
+ type = Param
+ help = Parameters to load from previous reconstruction
+
+ [illumination.recon.label]
+ default = None
+ type = None, str
+ help = Scan label of diffraction that is to be used for probe estimate
+ doc = If ``None``, own scan label is used
+ userlevel = 1
+
+ [illumination.recon.rfile]
+ default = \*.ptyr
+ type = str
+ help = Path to a ``.ptyr`` compatible file
+ userlevel = 0
[sample.fill]
default = 1
@@ -1398,15 +1563,15 @@ def _initialize_probe(self, probe_ids):
# create an oversampled probe perpendicular to its incoming
# direction, using the illumination module as a utility.
- logger.info('Initializing as circle of size ' + str(self.p.illumination.size))
+ logger.info('Initializing probe')
Cprobe = Container(data_dims=2, data_type='float')
geo = self.geometries[0]
psize = min(geo.resolution) * .1
- extent = int(np.ceil(self.p.illumination.size / psize))
+ extent = int(np.ceil(self.p.illumination.aperture.size / psize))
Sprobe = Cprobe.new_storage(psize=10e-9, shape=extent)
- illu_pars = u.Param({'aperture':
- {'form': 'circ', 'size': self.p.illumination.size}})
- illumination.init_storage(Sprobe, illu_pars)
+
+ # fill the incoming probe
+ illumination.init_storage(Sprobe, self.p.illumination)
# Extrude the incoming probe in the right direction and frame
s.data[:] = geo.prepare_3d_probe(Sprobe, system='natural').data
diff --git a/ptypy/experiment/Bragg3dSim.py b/ptypy/experiment/Bragg3dSim.py
index ad61f03cb..8c79a49bc 100644
--- a/ptypy/experiment/Bragg3dSim.py
+++ b/ptypy/experiment/Bragg3dSim.py
@@ -7,6 +7,7 @@
import ptypy.utils as u
from ptypy.utils.descriptor import defaults_tree
from ptypy.core import geometry_bragg
+from ptypy.core import illumination
import numpy as np
import time
@@ -40,12 +41,176 @@ class Bragg3dSimScan(PtyScan):
[energy]
default = 8.5
- [probe_fwhm]
- default = 1e-6
+ [illumination.aperture]
+ type = Param
+ default =
+ help = Beam aperture parameters
+
+ [illumination.aperture.rotate]
+ type = float
+ default = 0.
+ help = Rotate aperture by this value
+ doc =
+
+ [illumination.aperture.central_stop]
+ help = size of central stop as a fraction of aperture.size
+ default = None
+ doc = If not None: places a central beam stop in aperture. The value given here is the fraction of the beam stop compared to `size`
+ lowlim = 0.
+ uplim = 1.
+ userlevel = 1
type = float
+
+ [illumination.aperture.diffuser]
+ help = Noise in the transparen part of the aperture
+ default = None
+ doc = Can be either:
+ - ``None`` : no noise
+ - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
+ - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
+ userlevel = 2
+ type = tuple
+
+ [illumination.aperture.edge]
+ help = Edge width of aperture (in pixels!)
+ type = float
+ default = 2.0
+ userlevel = 2
+
+ [illumination.aperture.form]
+ default = circ
+ type = None, str
+ help = One of None, 'rect' or 'circ'
+ doc = One of:
+ - ``None`` : no aperture, this may be useful for nearfield
+ - ``'rect'`` : rectangular aperture
+ - ``'circ'`` : circular aperture
+ choices = None,'rect','circ'
+ userlevel = 2
+
+ [illumination.aperture.offset]
+ default = 0.
+ type = float, tuple
+ help = Offset between center of aperture and optical axes
+ doc = May also be a tuple (vertical,horizontal) for size in case of an asymmetric offset
+ userlevel = 2
+
+ [illumination.aperture.size]
+ default = None
+ type = float
+ help = Aperture width or diameter
+ doc = May also be a tuple *(vertical,horizontal)* in case of an asymmetric aperture
+ lowlim = 0.
+ userlevel = 0
+
+ [illumination.diversity]
+ default = None
+ type = Param, None
+ help = Probe mode(s) diversity parameters
+ doc = Can be ``None`` i.e. no diversity
+ userlevel = 1
+
+ [illumination.diversity.noise]
+ default = None
+ type = tuple
+ help = Noise in the generated modes of the illumination
+ doc = Can be either:
+ - ``None`` : no noise
+ - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
+ - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
+ userlevel = 1
+
+ [illumination.diversity.power]
+ default = 0.1
+ type = tuple, float
+ help = Power of modes relative to main mode (zero-layer)
+ uplim = 1.0
lowlim = 0.0
- help = FWHM of the gaussian probe
+ userlevel = 1
+ [illumination.diversity.shift]
+ default = None
+ type = float
+ help = Lateral shift of modes relative to main mode
+ doc = **[not implemented]**
+ userlevel = 2
+
+ [illumination.model]
+ default = None
+ type = str
+ help = Type of illumination model
+ doc = One of:
+ - ``None`` : model initialitziation defaults to flat array filled with the specified number of photons
+ - ``'recon'`` : load model from previous reconstruction, see `recon` Parameters
+ - ``'stxm'`` : Estimate model from autocorrelation of mean diffraction data
+ - ** : one of ptypys internal image resource strings
+ - ** : one of the templates inillumination module
+
+ In script, you may pass a numpy.ndarray here directly as the model. It is considered as incoming wavefront and will be propagated according to `propagation` with an optional `aperture` applied before.
+ userlevel = 0
+
+ [illumination.photons]
+ type = int, None
+ default = None
+ help = Number of photons in the incident illumination
+ doc = A value specified here will take precedence over calculated statistics from the loaded data.
+ lowlim = 0
+ userlevel = 2
+
+ [illumination.propagation]
+ type = Param
+ default =
+ help = Parameters for propagation after aperture plane
+ doc = Propagation to focus takes precedence to parallel propagation if `foccused` is not ``None``
+
+ [illumination.propagation.antialiasing]
+ default = 1
+ type = float
+ help = Antialiasing factor
+ doc = Antialiasing factor used when generating the probe. (numbers larger than 2 or 3 are memory hungry)
+ **[Untested]**
+ userlevel = 2
+
+ [illumination.propagation.focussed]
+ default = None
+ type = None, float
+ lowlim =
+ help = Propagation distance from aperture to focus
+ doc = If ``None`` or ``0`` : No focus propagation
+ userlevel = 0
+
+ [illumination.propagation.parallel]
+ default = None
+ type = None, float
+ help = Parallel propagation distance
+ doc = If ``None`` or ``0`` : No parallel propagation
+ userlevel = 0
+
+ [illumination.propagation.spot_size]
+ default = None
+ type = None, float
+ help = Focal spot diameter
+ doc = If not ``None``, this parameter is used to generate the appropriate aperture size instead of :py:data:`size`
+ lowlim = 0
+ userlevel = 1
+
+ [illumination.recon]
+ default =
+ type = Param
+ help = Parameters to load from previous reconstruction
+
+ [illumination.recon.label]
+ default = None
+ type = None, str
+ help = Scan label of diffraction that is to be used for probe estimate
+ doc = If ``None``, own scan label is used
+ userlevel = 1
+
+ [illumination.recon.rfile]
+ default = \*.ptyr
+ type = str
+ help = Path to a ``.ptyr`` compatible file
+ userlevel = 0
[rocking_step]
# Godard: default = .01
default = .0025
@@ -159,9 +324,8 @@ def simulate(self):
print 'WARNING! fix simulation probe extent'
zi, yi = Sprobe.grids()
- # gaussian probe
- sigma = self.p.probe_fwhm / 2.3548
- Sprobe.data = np.exp(-zi**2 / (2 * sigma**2) - yi**2 / (2 * sigma**2))
+ # fill the incoming probe
+ illumination.init_storage(Sprobe, self.p.illumination)
# The Bragg geometry has a method to prepare a 3d Storage by extruding
# the 2d probe and interpolating to the right grid. The returned storage
From 45179d82f892930d50198dc7440bd637b8bd044e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alexander=20Bj=C3=B6rling?=
Date: Wed, 15 Nov 2017 08:59:56 +0100
Subject: [PATCH 199/363] Full(ScanModel) imports sample and illumination
defaults
---
ptypy/core/illumination.py | 15 +-
ptypy/core/manager.py | 324 ++-----------------------------------
ptypy/core/sample.py | 15 +-
3 files changed, 22 insertions(+), 332 deletions(-)
diff --git a/ptypy/core/illumination.py b/ptypy/core/illumination.py
index 89b628101..30b17f025 100644
--- a/ptypy/core/illumination.py
+++ b/ptypy/core/illumination.py
@@ -20,12 +20,10 @@
TEMPLATES = dict()
-local_tree = EvalDescriptor('')
-@local_tree.parse_doc('illumination')
-class DummyClass(object):
- """
- Defaults:
-
+# Local, module-level defaults. These can be appended to the defaults of
+# other classes.
+illumination_desc = EvalDescriptor('illumination')
+illumination_desc.from_string(r"""
[aperture]
type = Param
default =
@@ -196,10 +194,9 @@ class DummyClass(object):
type = str
help = Path to a ``.ptyr`` compatible file
userlevel = 0
- """
- pass
+ """)
-DEFAULT = DummyClass.DEFAULT
+DEFAULT = illumination_desc.make_default(99)
DEFAULT_aperture = DEFAULT.aperture
__all__ = ['init_storage', 'aperture']
diff --git a/ptypy/core/manager.py b/ptypy/core/manager.py
index 549469e9b..c0b3b271c 100644
--- a/ptypy/core/manager.py
+++ b/ptypy/core/manager.py
@@ -595,6 +595,9 @@ class Full(ScanModel):
Defaults:
+ # note: this class also imports the module-level defaults for sample
+ # and illumination, below.
+
[name]
default = Full
type = str
@@ -661,320 +664,6 @@ class Full(ScanModel):
type = str
userlevel = 2
- [illumination.aperture]
- type = Param
- default =
- help = Beam aperture parameters
-
- [illumination.aperture.rotate]
- type = float
- default = 0.
- help = Rotate aperture by this value
- doc =
-
- [illumination.aperture.central_stop]
- help = size of central stop as a fraction of aperture.size
- default = None
- doc = If not None: places a central beam stop in aperture. The value given here is the fraction of the beam stop compared to `size`
- lowlim = 0.
- uplim = 1.
- userlevel = 1
- type = float
-
- [illumination.aperture.diffuser]
- help = Noise in the transparen part of the aperture
- default = None
- doc = Can be either:
- - ``None`` : no noise
- - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
- - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
- userlevel = 2
- type = tuple
-
- [illumination.aperture.edge]
- help = Edge width of aperture (in pixels!)
- type = float
- default = 2.0
- userlevel = 2
-
- [illumination.aperture.form]
- default = circ
- type = None, str
- help = One of None, 'rect' or 'circ'
- doc = One of:
- - ``None`` : no aperture, this may be useful for nearfield
- - ``'rect'`` : rectangular aperture
- - ``'circ'`` : circular aperture
- choices = None,'rect','circ'
- userlevel = 2
-
- [illumination.aperture.offset]
- default = 0.
- type = float, tuple
- help = Offset between center of aperture and optical axes
- doc = May also be a tuple (vertical,horizontal) for size in case of an asymmetric offset
- userlevel = 2
-
- [illumination.aperture.size]
- default = None
- type = float
- help = Aperture width or diameter
- doc = May also be a tuple *(vertical,horizontal)* in case of an asymmetric aperture
- lowlim = 0.
- userlevel = 0
-
- [illumination.diversity]
- default = None
- type = Param, None
- help = Probe mode(s) diversity parameters
- doc = Can be ``None`` i.e. no diversity
- userlevel = 1
-
- [illumination.diversity.noise]
- default = None
- type = tuple
- help = Noise in the generated modes of the illumination
- doc = Can be either:
- - ``None`` : no noise
- - ``2-tuple`` : noise in phase (amplitude (rms), minimum feature size)
- - ``4-tuple`` : noise in phase & modulus (rms, mfs, rms_mod, mfs_mod)
- userlevel = 1
-
- [illumination.diversity.power]
- default = 0.1
- type = tuple, float
- help = Power of modes relative to main mode (zero-layer)
- uplim = 1.0
- lowlim = 0.0
- userlevel = 1
-
- [illumination.diversity.shift]
- default = None
- type = float
- help = Lateral shift of modes relative to main mode
- doc = **[not implemented]**
- userlevel = 2
-
- [illumination.model]
- default = None
- type = str
- help = Type of illumination model
- doc = One of:
- - ``None`` : model initialitziation defaults to flat array filled with the specified number of photons
- - ``'recon'`` : load model from previous reconstruction, see `recon` Parameters
- - ``'stxm'`` : Estimate model from autocorrelation of mean diffraction data
- - ** : one of ptypys internal image resource strings
- - ** : one of the templates inillumination module
-
- In script, you may pass a numpy.ndarray here directly as the model. It is considered as incoming wavefront and will be propagated according to `propagation` with an optional `aperture` applied before.
- userlevel = 0
-
- [illumination.photons]
- type = int, None
- default = None
- help = Number of photons in the incident illumination
- doc = A value specified here will take precedence over calculated statistics from the loaded data.
- lowlim = 0
- userlevel = 2
-
- [illumination.propagation]
- type = Param
- default =
- help = Parameters for propagation after aperture plane
- doc = Propagation to focus takes precedence to parallel propagation if `foccused` is not ``None``
-
- [illumination.propagation.antialiasing]
- default = 1
- type = float
- help = Antialiasing factor
- doc = Antialiasing factor used when generating the probe. (numbers larger than 2 or 3 are memory hungry)
- **[Untested]**
- userlevel = 2
-
- [illumination.propagation.focussed]
- default = None
- type = None, float
- lowlim =
- help = Propagation distance from aperture to focus
- doc = If ``None`` or ``0`` : No focus propagation
- userlevel = 0
-
- [illumination.propagation.parallel]
- default = None
- type = None, float
- help = Parallel propagation distance
- doc = If ``None`` or ``0`` : No parallel propagation
- userlevel = 0
-
- [illumination.propagation.spot_size]
- default = None
- type = None, float
- help = Focal spot diameter
- doc = If not ``None``, this parameter is used to generate the appropriate aperture size instead of :py:data:`size`
- lowlim = 0
- userlevel = 1
-
- [illumination.recon]
- default =
- type = Param
- help = Parameters to load from previous reconstruction
-
- [illumination.recon.label]
- default = None
- type = None, str
- help = Scan label of diffraction that is to be used for probe estimate
- doc = If ``None``, own scan label is used
- userlevel = 1
-
- [illumination.recon.rfile]
- default = \*.ptyr
- type = str
- help = Path to a ``.ptyr`` compatible file
- userlevel = 0
-
- [sample.model]
- default = None
- help = Type of initial object model
- doc = One of:
- - ``None`` : model initialitziation defaults to flat array filled `fill`
- - ``'recon'`` : load model from STXM analysis of diffraction data
- - ``'stxm'`` : Estimate model from autocorrelation of mean diffraction data
- - ** : one of ptypys internal model resource strings
- - *