diff --git a/.travis.yml b/.travis.yml index d546fdff4b..4a90c1ba33 100644 --- a/.travis.yml +++ b/.travis.yml @@ -91,7 +91,6 @@ install: - if [[ "$CI_MODE" == "test" ]]; then ./scripts/buildSundials.sh; fi - if [[ "$CI_MODE" == "test" ]]; then ./scripts/buildCpputest.sh; fi - if [[ "$CI_MODE" == "test" ]]; then ./scripts/buildBNGL.sh; fi - - if [[ "$CI_MODE" == "test" ]]; then export BNGPATH=$(pwd)/ThirdParty/BioNetGen-2.3.2; fi - if [[ "$CI_MODE" == "test" ]]; then ./scripts/buildAmici.sh; fi - if [[ "$CI_MODE" == "test" ]]; then ./scripts/installAmiciArchive.sh; fi - if [[ "$CI_MODE" == "test" ]]; then ./scripts/installAmiciSource.sh; fi diff --git a/include/amici/amici.h b/include/amici/amici.h index a94605cb4f..2252608d33 100644 --- a/include/amici/amici.h +++ b/include/amici/amici.h @@ -38,12 +38,14 @@ std::unique_ptr runAmiciSimulation(Solver &solver, const ExpData *ed * @param solver Solver instance * @param edatas experimental data objects * @param model model specification object + * @param failfast flag to allow early termination * @param num_threads number of threads for parallel execution * @return vector of pointers to return data objects */ std::vector> runAmiciSimulations(Solver const& solver, const std::vector &edatas, Model const& model, + const bool failfast, int num_threads); } // namespace amici diff --git a/include/amici/serialization.h b/include/amici/serialization.h index 03bee43a88..dfa02abb33 100644 --- a/include/amici/serialization.h +++ b/include/amici/serialization.h @@ -87,6 +87,7 @@ void serialize(Archive &ar, amici::Model &u, const unsigned int version) { ar &const_cast(u.nw); ar &const_cast(u.ndwdx); ar &const_cast(u.ndwdp); + ar &const_cast(u.ndxdotdw); ar &const_cast(u.nnz); ar &const_cast(u.nJ); ar &const_cast(u.ubw); diff --git a/include/amici/sundials_matrix_wrapper.h b/include/amici/sundials_matrix_wrapper.h index 8c0173845f..18561d6286 100644 --- a/include/amici/sundials_matrix_wrapper.h +++ b/include/amici/sundials_matrix_wrapper.h @@ -159,7 +159,12 @@ class SUNMatrixWrapper { void multiply(realtype *c, const realtype *b) const; private: + void update_ptrs(); + SUNMatrix matrix = nullptr; + realtype *data_ptr = nullptr; + sunindextype *indexptrs_ptr = nullptr; + sunindextype *indexvals_ptr = nullptr; }; } // namespace amici diff --git a/python/amici/__init__.py b/python/amici/__init__.py index 884ca04aa2..3908640c85 100644 --- a/python/amici/__init__.py +++ b/python/amici/__init__.py @@ -153,13 +153,15 @@ def ExpData(*args): return amici.ExpData(*args) -def runAmiciSimulations(model, solver, edata_list, num_threads=1): +def runAmiciSimulations(model, solver, edata_list, failfast=True, + num_threads=1): """ Convenience wrapper for loops of amici.runAmiciSimulation Arguments: model: Model instance solver: Solver instance, must be generated from Model.getSolver() edata_list: list of ExpData instances + failfast: returns as soon as an integration failure is encountered num_threads: number of threads to use (only used if compiled with openmp) @@ -174,5 +176,6 @@ def runAmiciSimulations(model, solver, edata_list, num_threads=1): rdata_ptr_list = amici.runAmiciSimulations(solver.get(), edata_ptr_vector, model.get(), + failfast, num_threads) return [numpy.rdataToNumPyArrays(r) for r in rdata_ptr_list] diff --git a/python/amici/gradient_check.py b/python/amici/gradient_check.py new file mode 100644 index 0000000000..8418aaa396 --- /dev/null +++ b/python/amici/gradient_check.py @@ -0,0 +1,137 @@ +from . import (runAmiciSimulation, SensitivityOrder_none, + SensitivityMethod_forward) +import numpy as np +import copy + + +def check_finite_difference(x0, model, solver, edata, ip, fields, + assert_fun, atol=1e-4, rtol=1e-4, epsilon=1e-3): + old_sensitivity_order = solver.getSensitivityOrder() + old_parameters = model.getParameters() + old_plist = model.getParameterList() + + # sensitivity + p = copy.deepcopy(x0) + plist = [ip] + + model.setParameters(p) + model.setParameterList(plist) + rdata = runAmiciSimulation(model, solver, edata) + + # finite difference + solver.setSensitivityOrder(SensitivityOrder_none) + + # forward: + p = copy.deepcopy(x0) + p[ip] += epsilon/2 + model.setParameters(p) + rdataf = runAmiciSimulation(model, solver, edata) + + # backward: + p = copy.deepcopy(x0) + p[ip] -= epsilon/2 + model.setParameters(p) + rdatab = runAmiciSimulation(model, solver, edata) + + for field in fields: + sensi_raw = rdata[f's{field}'] + fd = (rdataf[field]-rdatab[field])/epsilon + if len(sensi_raw.shape) == 1: + sensi = sensi_raw[0] + elif len(sensi_raw.shape) == 2: + sensi = sensi_raw[:, 0] + elif len(sensi_raw.shape) == 3: + sensi = sensi_raw[:, 0, :] + else: + assert_fun(False) # not implemented + + check_close(sensi, fd, assert_fun, atol, rtol, field, ip=ip) + + solver.setSensitivityOrder(old_sensitivity_order) + model.setParameters(old_parameters) + model.setParameterList(old_plist) + + +def check_derivatives(model, solver, edata, assert_fun, + atol=1e-4, rtol=1e-4, epsilon=1e-3): + """Finite differences check for likelihood gradient + + Arguments: + model: amici model + solver: amici solver + edata: exp data + atol: absolute tolerance + rtol: relative tolerance + epsilon: finite difference step-size + """ + from scipy.optimize import check_grad + + p = np.array(model.getParameters()) + + rdata = runAmiciSimulation(model, solver, edata) + + fields = ['llh'] + + leastsquares_applicable = \ + solver.getSensitivityMethod() == SensitivityMethod_forward + + if 'ssigmay' in rdata.keys(): + if rdata['ssigmay'] is not None: + if rdata['ssigmay'].any(): + leastsquares_applicable = False + + if leastsquares_applicable: + fields += ['res', 'x', 'y'] + + check_results(rdata, 'FIM', + np.dot(rdata['sres'].transpose(), rdata['sres']), + assert_fun, + 1e-8, 1e-4) + check_results(rdata, 'sllh', + -np.dot(rdata['res'].transpose(), rdata['sres']), + assert_fun, + 1e-8, 1e-4) + for ip in range(len(p)): + check_finite_difference(p, model, solver, edata, ip, fields, + assert_fun, atol=atol, rtol=rtol, + epsilon=epsilon) + + +def check_close(result, expected, assert_fun, atol, rtol, field, ip=None): + close = np.isclose(result, expected, atol=atol, rtol=rtol, equal_nan=True) + + if not close.all(): + if ip is None: + index_str = '' + check_type = 'Regression check ' + else: + index_str = f'at index ip={ip} ' + check_type = 'FD check ' + print(f'{check_type} failed for {field} {index_str}for ' + f'{close.sum()} indices:') + adev = np.abs(result - expected) + rdev = np.abs((result - expected)/(expected + atol)) + print(f'max(adev): {adev.max()}, max(rdev): {rdev.max()}') + + assert_fun(close.all()) + + +def check_results(rdata, field, expected, assert_fun, atol, rtol): + """ + checks whether rdata[field] agrees with expected according to provided + tolerances + + Arguments: + rdata: simulation results as returned by amici.runAmiciSimulation + field: name of the field to check + expected: expected test results + atol: absolute tolerance + rtol: relative tolerance + """ + + result = rdata[field] + if type(result) is float: + result = np.array(result) + + check_close(result, expected, assert_fun, atol, rtol, field) + diff --git a/python/amici/ode_export.py b/python/amici/ode_export.py index b0333818a8..8a225fa839 100644 --- a/python/amici/ode_export.py +++ b/python/amici/ode_export.py @@ -691,6 +691,9 @@ class ODEModel: _syms: carries symbolic identifiers of the symbolic variables of the model @type dict + _strippedsyms: carries symbolic identifiers that were stripped of + additional class information @type dict + _sparsesyms: carries linear list of all symbolic identifiers for sparsified variables @type dict @@ -759,6 +762,7 @@ def __init__(self): self._vals = dict() self._names = dict() self._syms = dict() + self._strippedsyms = dict() self._sparsesyms = dict() self._colptrs = dict() self._rowvals = dict() @@ -913,7 +917,7 @@ def add_conservation_law(self, state, total_abundance, state_expr, state_id = self._states[ix].get_id() self.add_component( - Expression(state_id, f'cl_{state_id}', state_expr) + Expression(state_id, str(state_id), state_expr) ) self.add_component( @@ -1004,22 +1008,31 @@ def np(self): """ return len(self.sym('p')) - def sym(self, name): + def sym(self, name, stripped=False): """Returns (and constructs if necessary) the identifiers for a symbolic entity. Arguments: name: name of the symbolic variable @type str + stripped: (optional) should additional class information be + stripped from the symbolic variables? @type bool Returns: containing the symbolic identifiers @type symengine.DenseMatrix Raises: - + ValueError if stripped symbols not available """ if name not in self._syms: self._generateSymbol(name) - return self._syms[name] + + if stripped: + if name not in self._variable_prototype: + raise ValueError('Stripped symbols only available for ' + 'variables from variable prototypes') + return self._strippedsyms[name] + else: + return self._syms[name] def sparsesym(self, name): """Returns (and constructs if necessary) the sparsified identifiers for @@ -1166,6 +1179,10 @@ def _generateSymbol(self, name): comp.get_id() for comp in getattr(self, component) ]) + self._strippedsyms[name] = sp.Matrix([ + sp.Symbol(comp.get_name()) + for comp in getattr(self, component) + ]) if name == 'y': self._syms['my'] = sp.Matrix([ sp.Symbol(f'm{strip_pysb(comp.get_id())}') @@ -1486,13 +1503,26 @@ def _derivative(self, eq, var, name=None): self._lock_total_derivative = False return + # this is the basic requirement check + needs_stripped_symbols = eq == 'xdot' and var != 'x' + # partial derivative if eq == 'Jy': eq = self.eq(eq).transpose() else: eq = self.eq(eq) - sym_var = self.sym(var) + if pysb is not None and needs_stripped_symbols: + needs_stripped_symbols = not any( + isinstance(sym, pysb.Component) + for sym in eq.free_symbols + ) + + # now check whether we are working with energy_modeling branch + # where pysb class info is already stripped + # TODO: fixme as soon as energy_modeling made it to the main pysb + # branch + sym_var = self.sym(var, needs_stripped_symbols) if min(eq.shape) and min(sym_var.shape) \ and eq.is_zero is not True and sym_var.is_zero is not True: diff --git a/python/amici/pysb_import.py b/python/amici/pysb_import.py index 682150136b..b708dcd875 100644 --- a/python/amici/pysb_import.py +++ b/python/amici/pysb_import.py @@ -793,7 +793,6 @@ def _greedy_target_index_update(cl_prototypes): # with the highest diff_fillin (note that the target index counts # are recomputed on the fly) - if prototype['diff_fillin'] > -1 \ and ( get_target_indices(cl_prototypes).count( diff --git a/python/sdist/MANIFEST.in b/python/sdist/MANIFEST.in index 4d4ae7dd1b..45b4d6bdd5 100644 --- a/python/sdist/MANIFEST.in +++ b/python/sdist/MANIFEST.in @@ -6,4 +6,5 @@ include amici/* include amici/src/*template* include amici/swig/CMakeLists_model.txt include setup_clibs.py -include version.txt \ No newline at end of file +include custom_commands.py +include version.txt diff --git a/python/sdist/amici/gradient_check.py b/python/sdist/amici/gradient_check.py new file mode 120000 index 0000000000..3402ef0822 --- /dev/null +++ b/python/sdist/amici/gradient_check.py @@ -0,0 +1 @@ +../../amici/gradient_check.py \ No newline at end of file diff --git a/python/sdist/custom_commands.py b/python/sdist/custom_commands.py new file mode 100644 index 0000000000..6aee6a70e7 --- /dev/null +++ b/python/sdist/custom_commands.py @@ -0,0 +1,230 @@ +"""Custom setuptools commands for AMICI installation""" + +import glob +import os +import sys +import subprocess +from shutil import copyfile + +from setuptools.command.build_ext import build_ext +from setuptools.command.sdist import sdist +from setuptools.command.install_lib import install_lib +from setuptools.command.develop import develop +from setuptools.command.install import install +from setuptools.command.build_clib import build_clib + +from amici.setuptools import generateSwigInterfaceFiles + + +class my_install(install): + """Custom install to handle extra arguments""" + + # Passing --no-clibs allows to install the Python-only part of AMICI + user_options = install.user_options + [ + ('no-clibs', None, "Don't build AMICI C++ extension"), + ] + + def initialize_options(self): + install.initialize_options(self) + self.no_clibs = False + + def finalize_options(self): + if self.no_clibs: + self.no_clibs = True + install.finalize_options(self) + + def run(self): + install.run(self) + + +def compile_parallel(self, sources, output_dir=None, macros=None, + include_dirs=None, debug=0, extra_preargs=None, + extra_postargs=None, depends=None): + """Parallelized version of distutils.ccompiler.compile""" + macros, objects, extra_postargs, pp_opts, build = \ + self._setup_compile(output_dir, macros, include_dirs, sources, + depends, extra_postargs) + cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) + + # parallel compilation + num_threads = 1 + if 'AMICI_PARALLEL_COMPILE' in os.environ: + max_threads = int(os.environ['AMICI_PARALLEL_COMPILE']) + num_threads = min(len(objects), max_threads) + num_threads = max(1, num_threads) + + def _single_compile(obj): + try: + src, ext = build[obj] + except KeyError: + return + self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) + + if num_threads > 1: + import multiprocessing.pool + # convert to list, imap is evaluated on-demand + list(multiprocessing.pool.ThreadPool(num_threads).imap( + _single_compile, objects)) + else: + for obj in objects: + _single_compile(obj) + + return objects + + +class my_build_clib(build_clib): + """Custom build_clib""" + + def build_libraries(self, libraries): + no_clibs = self.get_finalized_command('develop').no_clibs + no_clibs |= self.get_finalized_command('install').no_clibs + + if no_clibs: + return + + # Override for parallel compilation + import distutils.ccompiler + distutils.ccompiler.CCompiler.compile = compile_parallel + + build_clib.build_libraries(self, libraries) + + +class my_develop(develop): + """Custom develop to build clibs""" + + # Passing --no-clibs allows to install the Python-only part of AMICI + user_options = develop.user_options + [ + ('no-clibs', None, "Don't build AMICI C++ extension"), + ] + + def initialize_options(self): + develop.initialize_options(self) + self.no_clibs = False + + def finalize_options(self): + if self.no_clibs: + self.no_clibs = True + develop.finalize_options(self) + + def run(self): + if not self.no_clibs: + generateSwigInterfaceFiles() + self.run_command('build') + + develop.run(self) + + +class my_install_lib(install_lib): + """Custom install to allow preserving of debug symbols""" + def run(self): + """strip debug symbols + + Returns: + + """ + if 'ENABLE_AMICI_DEBUGGING' in os.environ \ + and os.environ['ENABLE_AMICI_DEBUGGING'] == 'TRUE' \ + and sys.platform == 'darwin': + search_dir = os.path.join(os.getcwd(),self.build_dir,'amici') + for file in os.listdir(search_dir): + if file.endswith('.so'): + subprocess.run(['dsymutil',os.path.join(search_dir,file), + '-o',os.path.join(search_dir,file + '.dSYM')]) + + + # Continue with the actual installation + install_lib.run(self) + + +class my_build_ext(build_ext): + """Custom build_ext to allow keeping otherwise temporary static libs""" + + def run(self): + """Copy the generated clibs to the extensions folder to be included in the wheel + + Returns: + + """ + no_clibs = self.get_finalized_command('develop').no_clibs + no_clibs |= self.get_finalized_command('install').no_clibs + + if no_clibs: + return + + if not self.dry_run: # --dry-run + libraries = [] + build_clib = '' + if self.distribution.has_c_libraries(): + # get the previously built static libraries + build_clib = self.get_finalized_command('build_clib') + libraries = build_clib.get_library_names() or [] + library_dirs = build_clib.build_clib + + # Module build directory where we want to copy the generated libs + # to + if self.inplace == 0: + build_dir = self.build_lib + else: + build_dir = os.getcwd() + + target_dir = os.path.join(build_dir, 'amici', 'libs') + self.mkpath(target_dir) + + # Copy the generated libs + for lib in libraries: + libfilenames = glob.glob( + '%s%s*%s.*' % (build_clib.build_clib, os.sep, lib) + ) + assert len(libfilenames) == 1, \ + "Found unexpected number of files: " % libfilenames + + copyfile(libfilenames[0], + os.path.join(target_dir, os.path.basename(libfilenames[0]))) + + # Continue with the actual extension building + build_ext.run(self) + + +class my_sdist(sdist): + """Custom sdist to run swig and add the interface files to the source distribution + + Could have relied on letting build_ext run swig. However, that would require any user having swig installed + during package installation. This way we can postpone that until the package is used to compile generated models. + """ + + def run(self): + """Setuptools entry-point + + Returns: + + """ + self.runSwig() + self.saveGitVersion() + sdist.run(self) + + def runSwig(self): + """Run swig + + Returns: + + """ + + if not self.dry_run: # --dry-run + # We create two SWIG interfaces, one with HDF5 support, one without + generateSwigInterfaceFiles() + + def saveGitVersion(self): + """Create file with extended version string + + This requires git. We assume that whoever creates the sdist will work inside + a valid git repository. + + Returns: + + """ + with open("amici/git_version.txt", "w") as f: + sp = subprocess.run(['git', 'describe', + '--abbrev=4', '--dirty=-dirty', + '--always', '--tags'], + stdout=f) + assert(sp.returncode == 0) diff --git a/python/sdist/setup.py b/python/sdist/setup.py index 1894b37580..94df7927df 100755 --- a/python/sdist/setup.py +++ b/python/sdist/setup.py @@ -13,20 +13,14 @@ """ from setuptools import find_packages, setup, Extension -from setuptools.command.build_ext import build_ext -from setuptools.command.sdist import sdist -from setuptools.command.install_lib import install_lib -from setuptools.command.develop import develop -from setuptools.command.install import install -from setuptools.command.build_clib import build_clib import os import sys -import glob import sysconfig import subprocess -from shutil import copyfile import setup_clibs # Must run from within containing directory +from custom_commands import (my_install, my_build_clib, my_develop, + my_install_lib, my_build_ext, my_sdist) def try_install(package): @@ -55,278 +49,104 @@ def try_install(package): getHdf5Config, addCoverageFlagsIfRequired, addDebugFlagsIfRequired, - generateSwigInterfaceFiles, ) # Python version check. We need >= 3.6 due to e.g. f-strings if sys.version_info < (3, 6): sys.exit('amici requires at least Python version 3.6') -# Extra compiler flags -cxx_flags = [] -amici_module_linker_flags = [] -define_macros = [] -blaspkgcfg = getBlasConfig() -amici_module_linker_flags.extend('-l%s' % l for l in blaspkgcfg['libraries']) - -h5pkgcfg = getHdf5Config() +def main(): + # Extra compiler flags + cxx_flags = [] + amici_module_linker_flags = [] + define_macros = [] -if h5pkgcfg['found']: - # Manually add linker flags. The libraries passed to Extension will - # end up in front of the clibs in the linker line and not after, where - # they are required. - print("HDF5 library found. Building AMICI with HDF5 support.") + blaspkgcfg = getBlasConfig() amici_module_linker_flags.extend( - ['-l%s' % l for l in ['hdf5_hl_cpp', 'hdf5_hl', 'hdf5_cpp', 'hdf5']]) - extension_sources = [ - 'amici/amici_wrap.cxx', # swig interface - ] -else: - print("HDF5 library NOT found. Building AMICI WITHOUT HDF5 support.") - extension_sources = [ - 'amici/amici_wrap_without_hdf5.cxx', # swig interface - ] - -addCoverageFlagsIfRequired( - cxx_flags, - amici_module_linker_flags, -) - -addDebugFlagsIfRequired( - cxx_flags, - amici_module_linker_flags, -) - -# compiler and linker flags for libamici -if 'AMICI_CXXFLAGS' in os.environ: - cxx_flags.extend(os.environ['AMICI_CXXFLAGS'].split(' ')) -if 'AMICI_LDFLAGS' in os.environ: - amici_module_linker_flags.extend(os.environ['AMICI_LDFLAGS'].split(' ')) - -libamici = setup_clibs.getLibAmici( - h5pkgcfg=h5pkgcfg, blaspkgcfg=blaspkgcfg, extra_compiler_flags=cxx_flags) -libsundials = setup_clibs.getLibSundials(extra_compiler_flags=cxx_flags) -libsuitesparse = setup_clibs.getLibSuiteSparse( - extra_compiler_flags=cxx_flags + ['-DDLONG'] -) - -# Build shared object -amici_module = Extension( - name='amici._amici', - sources=extension_sources, - include_dirs=['amici/include', - *libsundials[1]['include_dirs'], - *libsuitesparse[1]['include_dirs'], - *h5pkgcfg['include_dirs'], - *blaspkgcfg['include_dirs'], - np.get_include() - ], - # Cannot use here, see above - # libraries=[ - # 'hdf5_hl_cpp', 'hdf5_hl', 'hdf5_cpp', 'hdf5' - #], - define_macros=define_macros, - library_dirs=[ - *h5pkgcfg['library_dirs'], - *blaspkgcfg['library_dirs'], - 'amici/libs', # clib target directory - ], - extra_compile_args=['-std=c++11', *cxx_flags], - extra_link_args=amici_module_linker_flags -) - - -class my_install(install): - """Custom install to handle extra arguments""" - - # Passing --no-clibs allows to install the Python-only part of AMICI - user_options = install.user_options + [ - ('no-clibs', None, "Don't build AMICI C++ extension"), - ] - - def initialize_options(self): - install.initialize_options(self) - self.no_clibs = False - - def finalize_options(self): - if self.no_clibs: - self.no_clibs = True - install.finalize_options(self) - - def run(self): - install.run(self) - - -class my_build_clib(build_clib): - """Custom build_clib""" - - def build_libraries(self, libraries): - no_clibs = self.get_finalized_command('develop').no_clibs - no_clibs |= self.get_finalized_command('install').no_clibs - - if no_clibs: - return - - build_clib.build_libraries(self, libraries) - - -class my_develop(develop): - """Custom develop to build clibs""" - - # Passing --no-clibs allows to install the Python-only part of AMICI - user_options = develop.user_options + [ - ('no-clibs', None, "Don't build AMICI C++ extension"), - ] - - def initialize_options(self): - develop.initialize_options(self) - self.no_clibs = False - - def finalize_options(self): - if self.no_clibs: - self.no_clibs = True - develop.finalize_options(self) - - def run(self): - if not self.no_clibs: - generateSwigInterfaceFiles() - self.run_command('build') - - develop.run(self) - - -class my_install_lib(install_lib): - """Custom install to allow preserving of debug symbols""" - def run(self): - """strip debug symbols - - Returns: - - """ - if 'ENABLE_AMICI_DEBUGGING' in os.environ \ - and os.environ['ENABLE_AMICI_DEBUGGING'] == 'TRUE' \ - and sys.platform == 'darwin': - search_dir = os.path.join(os.getcwd(),self.build_dir,'amici') - for file in os.listdir(search_dir): - if file.endswith('.so'): - subprocess.run(['dsymutil',os.path.join(search_dir,file), - '-o',os.path.join(search_dir,file + '.dSYM')]) - - - # Continue with the actual installation - install_lib.run(self) - - -class my_build_ext(build_ext): - """Custom build_ext to allow keeping otherwise temporary static libs""" - - def run(self): - """Copy the generated clibs to the extensions folder to be included in the wheel - - Returns: - - """ - no_clibs = self.get_finalized_command('develop').no_clibs - no_clibs |= self.get_finalized_command('install').no_clibs - - if no_clibs: - return - - if not self.dry_run: # --dry-run - libraries = [] - build_clib = '' - if self.distribution.has_c_libraries(): - # get the previously built static libraries - build_clib = self.get_finalized_command('build_clib') - libraries = build_clib.get_library_names() or [] - library_dirs = build_clib.build_clib - - # Module build directory where we want to copy the generated libs - # to - if self.inplace == 0: - build_dir = self.build_lib - else: - build_dir = os.getcwd() - - target_dir = os.path.join(build_dir, 'amici', 'libs') - self.mkpath(target_dir) - - # Copy the generated libs - for lib in libraries: - libfilenames = glob.glob( - '%s%s*%s.*' % (build_clib.build_clib, os.sep, lib) - ) - assert len(libfilenames) == 1, \ - "Found unexpected number of files: " % libfilenames - - copyfile(libfilenames[0], - os.path.join(target_dir, os.path.basename(libfilenames[0]))) - - # Continue with the actual extension building - build_ext.run(self) - - -class my_sdist(sdist): - """Custom sdist to run swig and add the interface files to the source distribution - - Could have relied on letting build_ext run swig. However, that would require any user having swig installed - during package installation. This way we can postpone that until the package is used to compile generated models. - """ - - def run(self): - """Setuptools entry-point - - Returns: - - """ - self.runSwig() - self.saveGitVersion() - sdist.run(self) - - def runSwig(self): - """Run swig - - Returns: - - """ - - if not self.dry_run: # --dry-run - # We create two SWIG interfaces, one with HDF5 support, one without - generateSwigInterfaceFiles() - - - def saveGitVersion(self): - """Create file with extended version string - - This requires git. We assume that whoever creates the sdist will work inside - a valid git repository. - - Returns: + '-l%s' % l for l in blaspkgcfg['libraries']) + + h5pkgcfg = getHdf5Config() + + if h5pkgcfg['found']: + # Manually add linker flags. The libraries passed to Extension will + # end up in front of the clibs in the linker line and not after, where + # they are required. + print("HDF5 library found. Building AMICI with HDF5 support.") + amici_module_linker_flags.extend( + ['-l%s' % l for l in + ['hdf5_hl_cpp', 'hdf5_hl', 'hdf5_cpp', 'hdf5']]) + extension_sources = [ + 'amici/amici_wrap.cxx', # swig interface + ] + else: + print("HDF5 library NOT found. Building AMICI WITHOUT HDF5 support.") + extension_sources = [ + 'amici/amici_wrap_without_hdf5.cxx', # swig interface + ] + + addCoverageFlagsIfRequired( + cxx_flags, + amici_module_linker_flags, + ) - """ - with open("amici/git_version.txt", "w") as f: - sp = subprocess.run(['git', 'describe', - '--abbrev=4', '--dirty=-dirty', - '--always', '--tags'], - stdout=f) - assert(sp.returncode == 0) + addDebugFlagsIfRequired( + cxx_flags, + amici_module_linker_flags, + ) + # compiler and linker flags for libamici + if 'AMICI_CXXFLAGS' in os.environ: + cxx_flags.extend(os.environ['AMICI_CXXFLAGS'].split(' ')) + if 'AMICI_LDFLAGS' in os.environ: + amici_module_linker_flags.extend( + os.environ['AMICI_LDFLAGS'].split(' ')) + + libamici = setup_clibs.getLibAmici( + h5pkgcfg=h5pkgcfg, blaspkgcfg=blaspkgcfg, + extra_compiler_flags=cxx_flags) + libsundials = setup_clibs.getLibSundials(extra_compiler_flags=cxx_flags) + libsuitesparse = setup_clibs.getLibSuiteSparse( + extra_compiler_flags=cxx_flags + ['-DDLONG'] + ) -# Readme as long package description to go on PyPi -# (https://pypi.org/project/amici/) -with open("README.md", "r") as fh: - long_description = fh.read() + # Build shared object + amici_module = Extension( + name='amici._amici', + sources=extension_sources, + include_dirs=['amici/include', + *libsundials[1]['include_dirs'], + *libsuitesparse[1]['include_dirs'], + *h5pkgcfg['include_dirs'], + *blaspkgcfg['include_dirs'], + np.get_include() + ], + # Cannot use here, see above + # libraries=[ + # 'hdf5_hl_cpp', 'hdf5_hl', 'hdf5_cpp', 'hdf5' + # ], + define_macros=define_macros, + library_dirs=[ + *h5pkgcfg['library_dirs'], + *blaspkgcfg['library_dirs'], + 'amici/libs', # clib target directory + ], + extra_compile_args=['-std=c++11', *cxx_flags], + extra_link_args=amici_module_linker_flags + ) -# Remove the "-Wstrict-prototypes" compiler option, which isn't valid for -# C++ to fix warnings. -cfg_vars = sysconfig.get_config_vars() -for key, value in cfg_vars.items(): - if type(value) == str: - cfg_vars[key] = value.replace("-Wstrict-prototypes", "") + # Readme as long package description to go on PyPi + # (https://pypi.org/project/amici/) + with open("README.md", "r") as fh: + long_description = fh.read() + # Remove the "-Wstrict-prototypes" compiler option, which isn't valid for + # C++ to fix warnings. + cfg_vars = sysconfig.get_config_vars() + for key, value in cfg_vars.items(): + if type(value) == str: + cfg_vars[key] = value.replace("-Wstrict-prototypes", "") -def main(): # Install setup( name='amici', @@ -378,7 +198,7 @@ def main(): }, test_suite="tests", classifiers=[ - 'Development Status :: 3 - Alpha', + 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: POSIX :: Linux', @@ -389,5 +209,6 @@ def main(): ], ) + if __name__ == '__main__': main() diff --git a/scripts/run-codecov.sh b/scripts/run-codecov.sh index c99fe5eeca..9af284cba5 100755 --- a/scripts/run-codecov.sh +++ b/scripts/run-codecov.sh @@ -7,6 +7,12 @@ AMICI_PATH=$(cd $SCRIPT_PATH/.. && pwd) source ${AMICI_PATH}/build/venv/bin/activate pip install coverage pip install -U git+https://github.com/pysb/pysb + + +if [[ -z "${BNGPATH}" ]]; then + export BNGPATH=${AMICI_PATH}/ThirdParty/BioNetGen-2.3.2 +fi + python ./tests/testCoverage.py ret=$? if [[ $ret != 0 ]]; then exit $ret; fi diff --git a/scripts/run-python-tests.sh b/scripts/run-python-tests.sh index a138875238..e4751a7af5 100755 --- a/scripts/run-python-tests.sh +++ b/scripts/run-python-tests.sh @@ -5,6 +5,11 @@ SCRIPT_PATH=$(dirname $BASH_SOURCE) AMICI_PATH=$(cd $SCRIPT_PATH/.. && pwd) set -e + +if [[ -z "${BNGPATH}" ]]; then + export BNGPATH=${AMICI_PATH}/ThirdParty/BioNetGen-2.3.2 +fi + cd ${AMICI_PATH}/tests source ${AMICI_PATH}/build/venv/bin/activate pip install scipy h5py diff --git a/src/amici.cpp b/src/amici.cpp index 89bd4bf3e4..319c356835 100644 --- a/src/amici.cpp +++ b/src/amici.cpp @@ -132,6 +132,7 @@ void printWarnMsgIdAndTxt(const char *identifier, const char *format, ...) { std::vector > runAmiciSimulations(const Solver &solver, const std::vector &edatas, const Model &model, + const bool failfast, #if defined(_OPENMP) int num_threads #else @@ -140,6 +141,7 @@ std::vector > runAmiciSimulations(const Solver &solv ) { std::vector > results(edatas.size()); + bool failed = false; #if defined(_OPENMP) #pragma omp parallel for num_threads(num_threads) @@ -148,7 +150,18 @@ std::vector > runAmiciSimulations(const Solver &solv auto mySolver = std::unique_ptr(solver.clone()); auto myModel = std::unique_ptr(model.clone()); + /* if we fail we need to write empty return datas for the python + interface */ + if (failed) { + ConditionContext conditionContext(myModel.get(), edatas[i]); + results[i] = + std::unique_ptr(new ReturnData(solver, &model)); + } + results[i] = runAmiciSimulation(*mySolver, edatas[i], *myModel); + + if (results[i]->status < 0 && failfast) + failed = true; } return results; diff --git a/src/model.cpp b/src/model.cpp index e9a25e41d2..9c189578cf 100644 --- a/src/model.cpp +++ b/src/model.cpp @@ -1037,7 +1037,7 @@ void Model::fdydp(const realtype t, const AmiVector *x) { // get dydp slice (ny) for current time and parameter if (wasPythonGenerated() && nw) dwdp_tmp = &dwdp.at(nw * ip); - + fdydp(&dydp.at(ip*ny), t, x->data(), @@ -1441,11 +1441,11 @@ void Model::fdwdp(const realtype t, const realtype *x) { std::fill(dwdp.begin(), dwdp.end(), 0.0); if (wasPythonGenerated()) { realtype *stcl = nullptr; - + // avoid bad memory access when slicing if (nw == 0) return; - + for (int ip = 0; ip < nplist(); ++ip) { if (ncl() > 0) stcl = &stotal_cl.at(plist(ip) * ncl()); @@ -1635,7 +1635,7 @@ bool operator ==(const Model &a, const Model &b) && (a.nw == b.nw) && (a.ndwdx == b.ndwdx) && (a.ndwdp == b.ndwdp) - && (a.ndxdotdw == a.ndxdotdw) + && (a.ndxdotdw == b.ndxdotdw) && (a.nnz == b.nnz) && (a.nJ == b.nJ) && (a.ubw == b.ubw) diff --git a/src/rdata.cpp b/src/rdata.cpp index 7848e0a3b5..7352444096 100644 --- a/src/rdata.cpp +++ b/src/rdata.cpp @@ -232,9 +232,9 @@ void ReturnData::applyChainRuleFactorToSimulationResults(const Model *model) { #define chainRule(QUANT, IND1, N1T, N1, IND2, N2) \ if (!s##QUANT.empty()) \ - for (int (IND1) = 0; (IND1) < (N1T); ++(IND1)) \ + for (int IND1 = 0; (IND1) < (N1T); ++(IND1)) \ for (int ip = 0; ip < nplist; ++ip) \ - for (int (IND2) = 0; (IND2) < (N2); ++(IND2)) { \ + for (int IND2 = 0; (IND2) < (N2); ++(IND2)) { \ s##QUANT.at(((IND2)*nplist + ip)*(N1) + (IND1)) *= \ pcoefficient.at(ip); \ } diff --git a/src/sundials_matrix_wrapper.cpp b/src/sundials_matrix_wrapper.cpp index 7b2c349bfa..9b2275fac9 100644 --- a/src/sundials_matrix_wrapper.cpp +++ b/src/sundials_matrix_wrapper.cpp @@ -16,18 +16,24 @@ SUNMatrixWrapper::SUNMatrixWrapper(int M, int N, int NNZ, int sparsetype) if (NNZ && !matrix) throw std::bad_alloc(); + + update_ptrs(); } SUNMatrixWrapper::SUNMatrixWrapper(int M, int N) : matrix(SUNDenseMatrix(M, N)) { if (M && N && !matrix) throw std::bad_alloc(); + + update_ptrs(); } SUNMatrixWrapper::SUNMatrixWrapper(int M, int ubw, int lbw) : matrix(SUNBandMatrix(M, ubw, lbw)) { if (M && !matrix) throw std::bad_alloc(); + + update_ptrs(); } SUNMatrixWrapper::SUNMatrixWrapper(const SUNMatrixWrapper &A, realtype droptol, @@ -50,9 +56,13 @@ SUNMatrixWrapper::SUNMatrixWrapper(const SUNMatrixWrapper &A, realtype droptol, if (!matrix) throw std::bad_alloc(); + + update_ptrs(); } -SUNMatrixWrapper::SUNMatrixWrapper(SUNMatrix mat) : matrix(mat) {} +SUNMatrixWrapper::SUNMatrixWrapper(SUNMatrix mat) : matrix(mat) { + update_ptrs(); +} SUNMatrixWrapper::~SUNMatrixWrapper() { if (matrix) @@ -68,10 +78,12 @@ SUNMatrixWrapper::SUNMatrixWrapper(const SUNMatrixWrapper &other) { throw std::bad_alloc(); SUNMatCopy(other.matrix, matrix); + update_ptrs(); } SUNMatrixWrapper::SUNMatrixWrapper(SUNMatrixWrapper &&other) noexcept { std::swap(matrix, other.matrix); + update_ptrs(); } SUNMatrixWrapper &SUNMatrixWrapper::operator=(const SUNMatrixWrapper &other) { @@ -81,31 +93,18 @@ SUNMatrixWrapper &SUNMatrixWrapper::operator=(const SUNMatrixWrapper &other) { SUNMatrixWrapper &SUNMatrixWrapper:: operator=(SUNMatrixWrapper &&other) noexcept { std::swap(matrix, other.matrix); + update_ptrs(); return *this; } realtype *SUNMatrixWrapper::data() const { - if (!matrix) - return nullptr; - - switch (SUNMatGetID(matrix)) { - case SUNMATRIX_DENSE: - return SM_DATA_D(matrix); - case SUNMATRIX_SPARSE: - return SM_DATA_S(matrix); - case SUNMATRIX_BAND: - return SM_DATA_B(matrix); - case SUNMATRIX_CUSTOM: - throw std::domain_error("Amici currently does not support custom matrix" - " types."); - } - return nullptr; // -Wreturn-type + return data_ptr; } sunindextype SUNMatrixWrapper::rows() const { if (!matrix) return 0; - + switch (SUNMatGetID(matrix)) { case SUNMATRIX_DENSE: return SM_ROWS_D(matrix); @@ -116,13 +115,15 @@ sunindextype SUNMatrixWrapper::rows() const { case SUNMATRIX_CUSTOM: throw std::domain_error("Amici currently does not support custom matrix" " types."); + default: + throw std::domain_error("Invalid SUNMatrix type."); } } sunindextype SUNMatrixWrapper::columns() const { if (!matrix) return 0; - + switch (SUNMatGetID(matrix)) { case SUNMATRIX_DENSE: return SM_COLUMNS_D(matrix); @@ -133,31 +134,17 @@ sunindextype SUNMatrixWrapper::columns() const { case SUNMATRIX_CUSTOM: throw std::domain_error("Amici currently does not support custom matrix" " types."); + default: + throw std::domain_error("Invalid SUNMatrix type."); } } sunindextype *SUNMatrixWrapper::indexvals() const { - if (!matrix) - return nullptr; - - switch (SUNMatGetID(matrix)) { - case SUNMATRIX_SPARSE: - return SM_INDEXVALS_S(matrix); - default: - throw std::domain_error("Function only available for sparse matrices"); - } + return indexvals_ptr; } sunindextype *SUNMatrixWrapper::indexptrs() const { - if (!matrix) - return nullptr; - - switch (SUNMatGetID(matrix)) { - case SUNMATRIX_SPARSE: - return SM_INDEXPTRS_S(matrix); - default: - throw std::domain_error("Function only available for sparse matrices"); - } + return indexptrs_ptr; } int SUNMatrixWrapper::sparsetype() const { @@ -166,7 +153,7 @@ int SUNMatrixWrapper::sparsetype() const { else throw std::domain_error("Function only available for sparse matrices"); } - + void SUNMatrixWrapper::reset() { if (matrix) SUNMatZero(matrix); @@ -200,7 +187,7 @@ void SUNMatrixWrapper::multiply(N_Vector c, const N_Vector b) const { void SUNMatrixWrapper::multiply(realtype *c, const realtype *b) const { if (!matrix) return; - + switch (SUNMatGetID(matrix)) { case SUNMATRIX_DENSE: amici_dgemv(BLASLayout::colMajor, BLASTranspose::noTrans, rows(), @@ -211,17 +198,17 @@ void SUNMatrixWrapper::multiply(realtype *c, const realtype *b) const { switch (sparsetype()) { case CSC_MAT: for (sunindextype i = 0; i < columns(); ++i) { - for (sunindextype k = indexptrs()[i]; k < indexptrs()[i + 1]; + for (sunindextype k = indexptrs_ptr[i]; k < indexptrs_ptr[i + 1]; ++k) { - c[indexvals()[k]] += data()[k] * b[i]; + c[indexvals_ptr[k]] += data_ptr[k] * b[i]; } } break; case CSR_MAT: for (sunindextype i = 0; i < rows(); ++i) { - for (sunindextype k = indexptrs()[i]; k < indexptrs()[i + 1]; + for (sunindextype k = indexptrs_ptr[i]; k < indexptrs_ptr[i + 1]; ++k) { - c[i] += data()[k] * b[indexvals()[k]]; + c[i] += data_ptr[k] * b[indexvals_ptr[k]]; } } break; @@ -235,6 +222,32 @@ void SUNMatrixWrapper::multiply(realtype *c, const realtype *b) const { } } +void SUNMatrixWrapper::update_ptrs() { + if(!matrix) + return; + + switch (SUNMatGetID(matrix)) { + case SUNMATRIX_DENSE: + if (columns() > 0 && rows() > 0) + data_ptr = SM_DATA_D(matrix); + break; + case SUNMATRIX_SPARSE: + if (SM_NNZ_S(matrix) > 0) { + data_ptr = SM_DATA_S(matrix); + indexptrs_ptr = SM_INDEXPTRS_S(matrix); + indexvals_ptr = SM_INDEXVALS_S(matrix); + } + break; + case SUNMATRIX_BAND: + if (columns() > 0 && rows() > 0) + data_ptr = SM_DATA_B(matrix); + break; + case SUNMATRIX_CUSTOM: + throw std::domain_error("Amici currently does not support" + "custom matrix types."); + } +} + SUNMatrix SUNMatrixWrapper::get() const { return matrix; } } // namespace amici diff --git a/tests/testModels.py b/tests/testModels.py index 5f9658f262..eb12f2e986 100755 --- a/tests/testModels.py +++ b/tests/testModels.py @@ -6,15 +6,15 @@ import unittest import importlib import os -import numpy as np import copy +from amici.gradient_check import check_derivatives, check_results class TestAmiciPregeneratedModel(unittest.TestCase): """ TestCase class for tests that were pregenerated using the the matlab code generation routines and cmake build routines - + NOTE: requires having run `make python-tests` in /build/ before to build the python modules for the test models """ @@ -104,10 +104,18 @@ def assert_fun(x): assert_fun, ) + if model_name == 'model_steadystate' and \ + case == 'sensiforwarderrorint': + edata = amici.amici.ExpData(self.model.get()) + if edata and model_name != 'model_neuron_o2': # Test runAmiciSimulations: ensure running twice # with same ExpData yields same results - edatas = [edata.get(), edata.get()] + if isinstance(edata, amici.amici.ExpData): + edatas = [edata, edata] + else: + edatas = [edata.get(), edata.get()] + rdatas = amici.runAmiciSimulations( self.model, self.solver, edatas, num_threads=2 ) @@ -129,124 +137,12 @@ def assert_fun(x): ) -def check_close(result, expected, assert_fun, atol, rtol, field, ip=None): - close = np.isclose(result, expected, atol=atol, rtol=rtol, equal_nan=True) - - if not close.all(): - if ip is None: - index_str = '' - check_type = 'Regression check ' - else: - index_str = f'at index ip={ip} ' - check_type = 'FD check ' - print(f'{check_type} failed for {field} {index_str}for ' - f'{close.sum()} indices:') - adev = np.abs(result - expected) - rdev = np.abs((result - expected)/(expected + atol)) - print(f'max(adev): {adev.max()}, max(rdev): {rdev.max()}') - - assert_fun(close.all()) - - -def check_finite_difference(x0, model, solver, edata, ip, fields, - assert_fun, atol=1e-4, rtol=1e-4, epsilon=1e-3): - old_sensitivity_order = solver.getSensitivityOrder() - old_parameters = model.getParameters() - old_plist = model.getParameterList() - - # sensitivity - p = copy.deepcopy(x0) - plist = [ip] - - model.setParameters(p) - model.setParameterList(plist) - rdata = amici.runAmiciSimulation(model, solver, edata) - - # finite difference - solver.setSensitivityOrder(amici.SensitivityOrder_none) - - # forward: - p = copy.deepcopy(x0) - p[ip] += epsilon/2 - model.setParameters(p) - rdataf = amici.runAmiciSimulation(model, solver, edata) - - # backward: - p = copy.deepcopy(x0) - p[ip] -= epsilon/2 - model.setParameters(p) - rdatab = amici.runAmiciSimulation(model, solver, edata) - - for field in fields: - sensi_raw = rdata[f's{field}'] - fd = (rdataf[field]-rdatab[field])/epsilon - if len(sensi_raw.shape) == 1: - sensi = sensi_raw[0] - elif len(sensi_raw.shape) == 2: - sensi = sensi_raw[:, 0] - elif len(sensi_raw.shape) == 3: - sensi = sensi_raw[:, 0, :] - else: - assert_fun(False) # not implemented - - check_close(sensi, fd, assert_fun, atol, rtol, field, ip=ip) - - solver.setSensitivityOrder(old_sensitivity_order) - model.setParameters(old_parameters) - model.setParameterList(old_plist) - - -def check_derivatives(model, solver, edata, assert_fun, - atol=1e-4, rtol=1e-4, epsilon=1e-3): - """Finite differences check for likelihood gradient - - Arguments: - model: amici model - solver: amici solver - edata: exp data - atol: absolute tolerance - rtol: relative tolerance - epsilon: finite difference step-size - """ - from scipy.optimize import check_grad - - p = np.array(model.getParameters()) - - rdata = amici.runAmiciSimulation(model, solver, edata) - - fields = ['llh'] - - leastsquares_applicable = \ - solver.getSensitivityMethod() == amici.SensitivityMethod_forward - - if 'ssigmay' in rdata.keys(): - if rdata['ssigmay'] is not None: - if rdata['ssigmay'].any(): - leastsquares_applicable = False - - if leastsquares_applicable: - fields += ['res', 'x', 'y'] - - check_results(rdata, 'FIM', - np.dot(rdata['sres'].transpose(), rdata['sres']), - assert_fun, - 1e-8, 1e-4) - check_results(rdata, 'sllh', - -np.dot(rdata['res'].transpose(), rdata['sres']), - assert_fun, - 1e-8, 1e-4) - for ip in range(len(p)): - check_finite_difference(p, model, solver, edata, ip, fields, - assert_fun, atol=atol, rtol=rtol, - epsilon=epsilon) - - def verify_simulation_results(rdata, expected_results, assert_fun, atol=1e-8, rtol=1e-4): """ compares all fields of the simulation results in rdata against the expectedResults using the provided tolerances - + Arguments: rdata: simulation results as returned by amici.runAmiciSimulation expected_results: stored test results @@ -277,26 +173,6 @@ def verify_simulation_results(rdata, expected_results, assert_fun, atol, rtol) -def check_results(rdata, field, expected, assert_fun, atol, rtol): - """ - checks whether rdata[field] agrees with expected according to provided - tolerances - - Arguments: - rdata: simulation results as returned by amici.runAmiciSimulation - field: name of the field to check - expected: expected test results - atol: absolute tolerance - rtol: relative tolerance - """ - - result = rdata[field] - if type(result) is float: - result = np.array(result) - - check_close(result, expected, assert_fun, atol, rtol, field) - - if __name__ == '__main__': suite = unittest.TestSuite() suite.addTest(TestAmiciPregeneratedModel()) diff --git a/version.txt b/version.txt index 5eef0f10e8..a3f5a8ed4d 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.10.2 +0.10.3