diff --git a/C/CMakeLists.txt b/C/CMakeLists.txt index 4e589bcb..12882cfa 100644 --- a/C/CMakeLists.txt +++ b/C/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required (VERSION 3.12) -PROJECT (HDF5Examples_C) +project (HDF5Examples_C C) #----------------------------------------------------------------------------- # Build the C Examples diff --git a/C/H5D/CMakeLists.txt b/C/H5D/CMakeLists.txt index b530f4c3..bde15d19 100644 --- a/C/H5D/CMakeLists.txt +++ b/C/H5D/CMakeLists.txt @@ -221,33 +221,40 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + NAME ${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.ddl.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + endif () endif () endmacro () diff --git a/C/H5D/Makefile.am b/C/H5D/Makefile.am index 06cdf66d..dfccb4b7 100644 --- a/C/H5D/Makefile.am +++ b/C/H5D/Makefile.am @@ -5,12 +5,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/C/H5D/test.sh.in b/C/H5D/test.sh.in index f6e84273..ece23035 100755 --- a/C/H5D/test.sh.in +++ b/C/H5D/test.sh.in @@ -6,12 +6,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. srcdir=@srcdir@ diff --git a/C/H5FLT/CMakeLists.txt b/C/H5FLT/CMakeLists.txt index 9345b662..fb2c4fd4 100644 --- a/C/H5FLT/CMakeLists.txt +++ b/C/H5FLT/CMakeLists.txt @@ -24,6 +24,14 @@ else () set (BLOSC_AVAILABLE 0) endif () +option (ENABLE_BLOSC2 "Enable Library Building for blosc2 plugin" ON) +if (ENABLE_BLOSC2) + set (BLOSC_AVAILABLE 1) + set (dyn_examples ${dyn_examples} h5ex_d_blosc2) +else () + set (BLOSC_AVAILABLE 0) +endif () + option (ENABLE_BSHUF "Enable Library Building for bshuf plugin" ON) if (ENABLE_BSHUF) if (NOT CMAKE_C_COMPILER_ID STREQUAL "Intel") @@ -188,6 +196,7 @@ if (H5EX_BUILD_TESTING) add_test ( NAME ${EXAMPLE_VARNAME}_${testname}-ERR COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" -D "TEST_ARGS:STRING=${ARGN}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" @@ -215,6 +224,7 @@ if (H5EX_BUILD_TESTING) add_test ( NAME ${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" -D "TEST_ARGS:STRING=${ARGN}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" @@ -234,7 +244,8 @@ if (H5EX_BUILD_TESTING) add_test ( NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" -D "TEST_ARGS:STRING=--enable-error-stack;-p;${testname}.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -D "TEST_OUTPUT=${testname}.ddl.out" @@ -251,6 +262,7 @@ if (H5EX_BUILD_TESTING) add_test ( NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" -D "TEST_ARGS:STRING=--enable-error-stack;-p;${testname}.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" @@ -287,11 +299,13 @@ if (H5EX_BUILD_TESTING) add_custom_target(${EXAMPLE_VARNAME}_example_files ALL COMMENT "Copying files needed by example tests" DEPENDS ${example_files_list}) foreach (h5_file ${dyn_examples}) - if (${h5_file} MATCHES "h5ex_d_zfp") - ## special filter - ADD_H5_TEST (h5ex_d_zfp FILTERALL) - else () - ADD_H5_TEST (${h5_file}) + if (NOT HDF5_USING_ANALYSIS_TOOL) + if (${h5_file} MATCHES "h5ex_d_zfp") + ## special filter + ADD_H5_TEST (h5ex_d_zfp FILTERALL) + else () + ADD_H5_TEST (${h5_file}) + endif () endif () endforeach () diff --git a/C/H5FLT/h5ex_d_blosc.c b/C/H5FLT/h5ex_d_blosc.c index 883b4593..3fc13654 100644 --- a/C/H5FLT/h5ex_d_blosc.c +++ b/C/H5FLT/h5ex_d_blosc.c @@ -42,7 +42,7 @@ main(void) herr_t status; htri_t avail; H5Z_filter_t filter_id = 0; - char filter_name[80]; + char filter_name[128]; hsize_t dims[2] = {DIM0, DIM1}, chunk[2] = {CHUNK0, CHUNK1}; size_t nelmts = 7; /* number of elements in cd_values */ unsigned int flags; diff --git a/C/H5FLT/h5ex_d_blosc2.c b/C/H5FLT/h5ex_d_blosc2.c new file mode 100644 index 00000000..dae0becb --- /dev/null +++ b/C/H5FLT/h5ex_d_blosc2.c @@ -0,0 +1,234 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of the HDF5 BLOSC2 filter plugin source. The full * + * copyright notice, including terms governing use, modification, and * + * terms governing use, modification, and redistribution, is contained in * + * the file COPYING, which can be found at the root of the BLOSC2 source code * + * distribution tree. If you do not have access to this file, you may * + * request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + + This example shows how to write data and read it from a dataset + using blosc2 compression. + blosc2 filter is not available in HDF5. + The example uses a new feature available in HDF5 version 1.8.11 + to discover, load and register filters at run time. + + ************************************************************/ + +#include "hdf5.h" +#include +#include + +#define FILE "h5ex_d_blosc2.h5" +#define DATASET "DS1" +#define DIM0 32 +#define DIM1 64 +#define CHUNK0 4 +#define CHUNK1 8 +#define H5Z_FILTER_BLOSC2 32026 + +int +main(void) +{ + hid_t file_id = H5I_INVALID_HID; /* Handles */ + hid_t space_id = H5I_INVALID_HID; /* Handles */ + hid_t dset_id = H5I_INVALID_HID; /* Handles */ + hid_t dcpl_id = H5I_INVALID_HID; /* Handles */ + herr_t status; + htri_t avail; + H5Z_filter_t filter_id = 0; + char filter_name[128]; + hsize_t dims[2] = {DIM0, DIM1}, chunk[2] = {CHUNK0, CHUNK1}; + size_t nelmts = 10; /* number of elements in cd_values */ + unsigned int flags; + unsigned filter_config; + const unsigned int cd_values[10] = {0, 0, 0, 0, 4, 1, 2, 2, 4, 8}; /* blosc parameters */ + unsigned int values_out[10] = {99, 99, 99, 99, 99, 99, 99, 99, 99, 99}; + int wdata[DIM0][DIM1], /* Write buffer */ + rdata[DIM0][DIM1], /* Read buffer */ + max; + hsize_t i, j; + int ret_value = 1; + + /* + * Initialize data. + */ + for (i = 0; i < DIM0; i++) + for (j = 0; j < DIM1; j++) + wdata[i][j] = i * j - j; + + /* + * Create a new file using the default properties. + */ + file_id = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); + if (file_id < 0) + goto done; + + /* + * Create dataspace. Setting maximum size to NULL sets the maximum + * size to be the current size. + */ + space_id = H5Screate_simple(2, dims, NULL); + if (space_id < 0) + goto done; + + /* + * Create the dataset creation property list, add the gzip + * compression filter and set the chunk size. + */ + dcpl_id = H5Pcreate(H5P_DATASET_CREATE); + if (dcpl_id < 0) + goto done; + + status = H5Pset_filter(dcpl_id, H5Z_FILTER_BLOSC2, H5Z_FLAG_OPTIONAL, nelmts, cd_values); + if (status < 0) + goto done; + + /* + * Check that filter is registered with the library now. + * If it is registered, retrieve filter's configuration. + */ + avail = H5Zfilter_avail(H5Z_FILTER_BLOSC2); + if (avail) { + status = H5Zget_filter_info(H5Z_FILTER_BLOSC2, &filter_config); + if ((filter_config & H5Z_FILTER_CONFIG_ENCODE_ENABLED) && + (filter_config & H5Z_FILTER_CONFIG_DECODE_ENABLED)) + printf("blosc2 filter is available for encoding and decoding.\n"); + } + else { + printf("H5Zfilter_avail - not found.\n"); + goto done; + } + status = H5Pset_chunk(dcpl_id, 2, chunk); + if (status < 0) + printf("failed to set chunk.\n"); + + /* + * Create the dataset. + */ + printf("....Create dataset ................\n"); + dset_id = H5Dcreate(file_id, DATASET, H5T_STD_I32LE, space_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT); + if (dset_id < 0) { + printf("failed to create dataset.\n"); + goto done; + } + + /* + * Write the data to the dataset. + */ + printf("....Writing blosc2 compressed data ................\n"); + status = H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata[0]); + if (status < 0) + printf("failed to write data.\n"); + + /* + * Close and release resources. + */ + H5Dclose(dset_id); + dset_id = -1; + H5Pclose(dcpl_id); + dcpl_id = -1; + H5Sclose(space_id); + space_id = -1; + H5Fclose(file_id); + file_id = -1; + status = H5close(); + if (status < 0) { + printf("/nFAILED to close library/n"); + goto done; + } + + printf("....Close the file and reopen for reading ........\n"); + /* + * Now we begin the read section of this example. + */ + + /* + * Open file and dataset using the default properties. + */ + file_id = H5Fopen(FILE, H5F_ACC_RDONLY, H5P_DEFAULT); + if (file_id < 0) + goto done; + + dset_id = H5Dopen(file_id, DATASET, H5P_DEFAULT); + if (dset_id < 0) + goto done; + + /* + * Retrieve dataset creation property list. + */ + dcpl_id = H5Dget_create_plist(dset_id); + if (dcpl_id < 0) + goto done; + + /* + * Retrieve and print the filter id, compression level and filter's name for blosc. + */ + filter_id = H5Pget_filter2(dcpl_id, (unsigned)0, &flags, &nelmts, values_out, sizeof(filter_name), + filter_name, NULL); + printf("Filter info is available from the dataset creation property\n "); + printf(" Filter identifier is "); + switch (filter_id) { + case H5Z_FILTER_BLOSC2: + printf("%d\n", filter_id); + printf(" Number of parameters is %d with the value %u %u %u\n", nelmts, values_out[4], + values_out[5], values_out[6]); + printf(" To find more about the filter check %s\n", filter_name); + break; + default: + printf("Not expected filter\n"); + break; + } + + /* + * Read the data using the default properties. + */ + printf("....Reading blosc2 compressed data ................\n"); + status = H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata[0]); + if (status < 0) + printf("failed to read data.\n"); + + /* + * Find the maximum value in the dataset, to verify that it was + * read correctly. + */ + max = rdata[0][0]; + for (i = 0; i < DIM0; i++) + for (j = 0; j < DIM1; j++) { + /*printf("%d \n", rdata[i][j]); */ + if (max < rdata[i][j]) + max = rdata[i][j]; + } + /* + * Print the maximum value. + */ + printf("Maximum value in %s is %d\n", DATASET, max); + /* + * Check that filter is registered with the library now. + */ + avail = H5Zfilter_avail(H5Z_FILTER_BLOSC2); + if (avail) + printf("blosc2 filter is available now since H5Dread triggered loading of the filter.\n"); + + ret_value = 0; + +done: + /* + * Close and release resources. + */ + if (dcpl_id >= 0) + H5Pclose(dcpl_id); + if (dset_id >= 0) + H5Dclose(dset_id); + if (space_id >= 0) + H5Sclose(space_id); + if (file_id >= 0) + H5Fclose(file_id); + + return ret_value; +} diff --git a/C/H5FLT/h5ex_d_bshuf.c b/C/H5FLT/h5ex_d_bshuf.c index a5f3ed72..7b666da0 100644 --- a/C/H5FLT/h5ex_d_bshuf.c +++ b/C/H5FLT/h5ex_d_bshuf.c @@ -42,7 +42,7 @@ main(void) herr_t status; htri_t avail; H5Z_filter_t filter_id = 0; - char filter_name[80]; + char filter_name[128]; hsize_t dims[2] = {DIM0, DIM1}, chunk[2] = {CHUNK0, CHUNK1}; size_t nelmts = 3; /* number of elements in cd_values */ unsigned int flags; diff --git a/C/H5FLT/h5ex_d_bzip2.c b/C/H5FLT/h5ex_d_bzip2.c index 0c02b1da..421a1bf9 100644 --- a/C/H5FLT/h5ex_d_bzip2.c +++ b/C/H5FLT/h5ex_d_bzip2.c @@ -42,7 +42,7 @@ main(void) herr_t status; htri_t avail; H5Z_filter_t filter_id = 0; - char filter_name[80]; + char filter_name[128]; hsize_t dims[2] = {DIM0, DIM1}, chunk[2] = {CHUNK0, CHUNK1}; size_t nelmts = 1; /* number of elements in cd_values */ unsigned int flags; diff --git a/C/H5FLT/h5ex_d_jpeg.c b/C/H5FLT/h5ex_d_jpeg.c index 78a42528..a3931bb3 100644 --- a/C/H5FLT/h5ex_d_jpeg.c +++ b/C/H5FLT/h5ex_d_jpeg.c @@ -45,7 +45,7 @@ main(void) herr_t status; htri_t avail; H5Z_filter_t filter_id = 0; - char filter_name[80]; + char filter_name[128]; hsize_t dims[3] = {NUM_IMAGES, DIM0, DIM1}, chunk[3] = {CHUNK0, CHUNK1, CHUNK2}; size_t nelmts = 4; /* number of elements in cd_values */ unsigned int flags; diff --git a/C/H5FLT/h5ex_d_lz4.c b/C/H5FLT/h5ex_d_lz4.c index 9900b512..0bcfc876 100644 --- a/C/H5FLT/h5ex_d_lz4.c +++ b/C/H5FLT/h5ex_d_lz4.c @@ -42,7 +42,7 @@ main(void) herr_t status; htri_t avail; H5Z_filter_t filter_id = 0; - char filter_name[80]; + char filter_name[128]; hsize_t dims[2] = {DIM0, DIM1}, chunk[2] = {CHUNK0, CHUNK1}; size_t nelmts = 1; /* number of elements in cd_values */ unsigned int flags; diff --git a/C/H5FLT/h5ex_d_lzf.c b/C/H5FLT/h5ex_d_lzf.c index 0e74e790..a860d67e 100644 --- a/C/H5FLT/h5ex_d_lzf.c +++ b/C/H5FLT/h5ex_d_lzf.c @@ -42,7 +42,7 @@ main(void) herr_t status; htri_t avail; H5Z_filter_t filter_id = 0; - char filter_name[80]; + char filter_name[128]; hsize_t dims[2] = {DIM0, DIM1}, chunk[2] = {CHUNK0, CHUNK1}; size_t nelmts = 3; /* number of elements in cd_values */ unsigned int flags; diff --git a/C/H5FLT/h5ex_d_mafisc.c b/C/H5FLT/h5ex_d_mafisc.c index b04a5b55..5cee4a07 100644 --- a/C/H5FLT/h5ex_d_mafisc.c +++ b/C/H5FLT/h5ex_d_mafisc.c @@ -42,7 +42,7 @@ main(void) herr_t status; htri_t avail; H5Z_filter_t filter_id = 0; - char filter_name[80]; + char filter_name[128]; hsize_t dims[2] = {DIM0, DIM1}, chunk[2] = {CHUNK0, CHUNK1}; size_t nelmts = 8; /* number of elements in cd_values */ unsigned int flags; diff --git a/C/H5FLT/h5ex_d_zfp.c b/C/H5FLT/h5ex_d_zfp.c index cb7d24d7..dd0a9366 100644 --- a/C/H5FLT/h5ex_d_zfp.c +++ b/C/H5FLT/h5ex_d_zfp.c @@ -42,7 +42,7 @@ main(void) herr_t status; htri_t avail; H5Z_filter_t filter_id = 0; - char filter_name[80]; + char filter_name[128]; hsize_t dims[2] = {DIM0, DIM1}, chunk[2] = {CHUNK0, CHUNK1}; size_t nelmts = 3; /* number of elements in cd_values */ unsigned int flags; diff --git a/C/H5FLT/h5ex_d_zstd.c b/C/H5FLT/h5ex_d_zstd.c index 6891a710..28f084d9 100644 --- a/C/H5FLT/h5ex_d_zstd.c +++ b/C/H5FLT/h5ex_d_zstd.c @@ -44,7 +44,7 @@ main(void) herr_t status; htri_t avail; H5Z_filter_t filter_id = 0; - char filter_name[80]; + char filter_name[128]; hsize_t dims[3] = {NUM_IMAGES, DIM0, DIM1}, chunk[3] = {CHUNK0, CHUNK1, CHUNK2}; size_t nelmts = 1; /* number of elements in cd_values */ unsigned int flags; diff --git a/C/H5FLT/tfiles/h5ex_d_blosc.ddl b/C/H5FLT/tfiles/h5ex_d_blosc.ddl index 324fa1df..c14881d6 100644 --- a/C/H5FLT/tfiles/h5ex_d_blosc.ddl +++ b/C/H5FLT/tfiles/h5ex_d_blosc.ddl @@ -10,7 +10,7 @@ GROUP "/" { FILTERS { USER_DEFINED_FILTER { FILTER_ID 32001 - COMMENT HDF5 blosc filter; see http://www.hdfgroup.org/services/contributions.html + COMMENT HDF5 blosc filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md PARAMS { 2 2 4 128 4 1 2 } } } diff --git a/C/H5FLT/tfiles/h5ex_d_blosc.tst b/C/H5FLT/tfiles/h5ex_d_blosc.tst index ace1975e..4a349b3d 100644 --- a/C/H5FLT/tfiles/h5ex_d_blosc.tst +++ b/C/H5FLT/tfiles/h5ex_d_blosc.tst @@ -5,7 +5,7 @@ blosc filter is available for encoding and decoding. Filter info is available from the dataset creation property Filter identifier is 32001 Number of parameters is 7 with the value 4 1 2 - To find more about the filter check HDF5 blosc filter; see http://www.hdfgroup.org/services/contributions.html + To find more about the filter check HDF5 blosc filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md ....Reading blosc compressed data ................ Maximum value in DS1 is 1890 blosc filter is available now since H5Dread triggered loading of the filter. diff --git a/C/H5FLT/tfiles/h5ex_d_blosc2.ddl b/C/H5FLT/tfiles/h5ex_d_blosc2.ddl new file mode 100644 index 00000000..df86cd05 --- /dev/null +++ b/C/H5FLT/tfiles/h5ex_d_blosc2.ddl @@ -0,0 +1,209 @@ +HDF5 "h5ex_d_blosc2.h5" { +GROUP "/" { + DATASET "DS1" { + DATATYPE H5T_STD_I32LE + DATASPACE SIMPLE { ( 32, 64 ) / ( 32, 64 ) } + STORAGE_LAYOUT { + CHUNKED ( 4, 8 ) + SIZE 21247 (0.386:1 COMPRESSION) + } + FILTERS { + USER_DEFINED_FILTER { + FILTER_ID 32026 + COMMENT HDF5 blosc2 filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md + PARAMS { 1 0 4 128 4 1 2 2 4 8 } + } + } + FILLVALUE { + FILL_TIME H5D_FILL_TIME_IFSET + VALUE H5D_FILL_VALUE_DEFAULT + } + ALLOCATION_TIME { + H5D_ALLOC_TIME_INCR + } + DATA { + (0,0): 0, -1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, + (0,15): -15, -16, -17, -18, -19, -20, -21, -22, -23, -24, -25, -26, + (0,27): -27, -28, -29, -30, -31, -32, -33, -34, -35, -36, -37, -38, + (0,39): -39, -40, -41, -42, -43, -44, -45, -46, -47, -48, -49, -50, + (0,51): -51, -52, -53, -54, -55, -56, -57, -58, -59, -60, -61, -62, + (0,63): -63, + (1,0): 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + (1,21): 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + (1,42): 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + (1,63): 0, + (2,0): 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, + (2,18): 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, + (2,34): 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, + (2,50): 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, + (3,0): 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, + (3,17): 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, + (3,33): 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, + (3,49): 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, + (3,62): 124, 126, + (4,0): 0, 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, + (4,17): 51, 54, 57, 60, 63, 66, 69, 72, 75, 78, 81, 84, 87, 90, 93, 96, + (4,33): 99, 102, 105, 108, 111, 114, 117, 120, 123, 126, 129, 132, 135, + (4,46): 138, 141, 144, 147, 150, 153, 156, 159, 162, 165, 168, 171, + (4,58): 174, 177, 180, 183, 186, 189, + (5,0): 0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56, 60, 64, + (5,17): 68, 72, 76, 80, 84, 88, 92, 96, 100, 104, 108, 112, 116, 120, + (5,31): 124, 128, 132, 136, 140, 144, 148, 152, 156, 160, 164, 168, + (5,43): 172, 176, 180, 184, 188, 192, 196, 200, 204, 208, 212, 216, + (5,55): 220, 224, 228, 232, 236, 240, 244, 248, 252, + (6,0): 0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, + (6,16): 80, 85, 90, 95, 100, 105, 110, 115, 120, 125, 130, 135, 140, + (6,29): 145, 150, 155, 160, 165, 170, 175, 180, 185, 190, 195, 200, + (6,41): 205, 210, 215, 220, 225, 230, 235, 240, 245, 250, 255, 260, + (6,53): 265, 270, 275, 280, 285, 290, 295, 300, 305, 310, 315, + (7,0): 0, 6, 12, 18, 24, 30, 36, 42, 48, 54, 60, 66, 72, 78, 84, 90, + (7,16): 96, 102, 108, 114, 120, 126, 132, 138, 144, 150, 156, 162, 168, + (7,29): 174, 180, 186, 192, 198, 204, 210, 216, 222, 228, 234, 240, + (7,41): 246, 252, 258, 264, 270, 276, 282, 288, 294, 300, 306, 312, + (7,53): 318, 324, 330, 336, 342, 348, 354, 360, 366, 372, 378, + (8,0): 0, 7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91, 98, 105, + (8,16): 112, 119, 126, 133, 140, 147, 154, 161, 168, 175, 182, 189, + (8,28): 196, 203, 210, 217, 224, 231, 238, 245, 252, 259, 266, 273, + (8,40): 280, 287, 294, 301, 308, 315, 322, 329, 336, 343, 350, 357, + (8,52): 364, 371, 378, 385, 392, 399, 406, 413, 420, 427, 434, 441, + (9,0): 0, 8, 16, 24, 32, 40, 48, 56, 64, 72, 80, 88, 96, 104, 112, 120, + (9,16): 128, 136, 144, 152, 160, 168, 176, 184, 192, 200, 208, 216, + (9,28): 224, 232, 240, 248, 256, 264, 272, 280, 288, 296, 304, 312, + (9,40): 320, 328, 336, 344, 352, 360, 368, 376, 384, 392, 400, 408, + (9,52): 416, 424, 432, 440, 448, 456, 464, 472, 480, 488, 496, 504, + (10,0): 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 99, 108, 117, 126, + (10,15): 135, 144, 153, 162, 171, 180, 189, 198, 207, 216, 225, 234, + (10,27): 243, 252, 261, 270, 279, 288, 297, 306, 315, 324, 333, 342, + (10,39): 351, 360, 369, 378, 387, 396, 405, 414, 423, 432, 441, 450, + (10,51): 459, 468, 477, 486, 495, 504, 513, 522, 531, 540, 549, 558, + (10,63): 567, + (11,0): 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, + (11,15): 150, 160, 170, 180, 190, 200, 210, 220, 230, 240, 250, 260, + (11,27): 270, 280, 290, 300, 310, 320, 330, 340, 350, 360, 370, 380, + (11,39): 390, 400, 410, 420, 430, 440, 450, 460, 470, 480, 490, 500, + (11,51): 510, 520, 530, 540, 550, 560, 570, 580, 590, 600, 610, 620, + (11,63): 630, + (12,0): 0, 11, 22, 33, 44, 55, 66, 77, 88, 99, 110, 121, 132, 143, 154, + (12,15): 165, 176, 187, 198, 209, 220, 231, 242, 253, 264, 275, 286, + (12,27): 297, 308, 319, 330, 341, 352, 363, 374, 385, 396, 407, 418, + (12,39): 429, 440, 451, 462, 473, 484, 495, 506, 517, 528, 539, 550, + (12,51): 561, 572, 583, 594, 605, 616, 627, 638, 649, 660, 671, 682, + (12,63): 693, + (13,0): 0, 12, 24, 36, 48, 60, 72, 84, 96, 108, 120, 132, 144, 156, + (13,14): 168, 180, 192, 204, 216, 228, 240, 252, 264, 276, 288, 300, + (13,26): 312, 324, 336, 348, 360, 372, 384, 396, 408, 420, 432, 444, + (13,38): 456, 468, 480, 492, 504, 516, 528, 540, 552, 564, 576, 588, + (13,50): 600, 612, 624, 636, 648, 660, 672, 684, 696, 708, 720, 732, + (13,62): 744, 756, + (14,0): 0, 13, 26, 39, 52, 65, 78, 91, 104, 117, 130, 143, 156, 169, + (14,14): 182, 195, 208, 221, 234, 247, 260, 273, 286, 299, 312, 325, + (14,26): 338, 351, 364, 377, 390, 403, 416, 429, 442, 455, 468, 481, + (14,38): 494, 507, 520, 533, 546, 559, 572, 585, 598, 611, 624, 637, + (14,50): 650, 663, 676, 689, 702, 715, 728, 741, 754, 767, 780, 793, + (14,62): 806, 819, + (15,0): 0, 14, 28, 42, 56, 70, 84, 98, 112, 126, 140, 154, 168, 182, + (15,14): 196, 210, 224, 238, 252, 266, 280, 294, 308, 322, 336, 350, + (15,26): 364, 378, 392, 406, 420, 434, 448, 462, 476, 490, 504, 518, + (15,38): 532, 546, 560, 574, 588, 602, 616, 630, 644, 658, 672, 686, + (15,50): 700, 714, 728, 742, 756, 770, 784, 798, 812, 826, 840, 854, + (15,62): 868, 882, + (16,0): 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, + (16,14): 210, 225, 240, 255, 270, 285, 300, 315, 330, 345, 360, 375, + (16,26): 390, 405, 420, 435, 450, 465, 480, 495, 510, 525, 540, 555, + (16,38): 570, 585, 600, 615, 630, 645, 660, 675, 690, 705, 720, 735, + (16,50): 750, 765, 780, 795, 810, 825, 840, 855, 870, 885, 900, 915, + (16,62): 930, 945, + (17,0): 0, 16, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, + (17,14): 224, 240, 256, 272, 288, 304, 320, 336, 352, 368, 384, 400, + (17,26): 416, 432, 448, 464, 480, 496, 512, 528, 544, 560, 576, 592, + (17,38): 608, 624, 640, 656, 672, 688, 704, 720, 736, 752, 768, 784, + (17,50): 800, 816, 832, 848, 864, 880, 896, 912, 928, 944, 960, 976, + (17,62): 992, 1008, + (18,0): 0, 17, 34, 51, 68, 85, 102, 119, 136, 153, 170, 187, 204, 221, + (18,14): 238, 255, 272, 289, 306, 323, 340, 357, 374, 391, 408, 425, + (18,26): 442, 459, 476, 493, 510, 527, 544, 561, 578, 595, 612, 629, + (18,38): 646, 663, 680, 697, 714, 731, 748, 765, 782, 799, 816, 833, + (18,50): 850, 867, 884, 901, 918, 935, 952, 969, 986, 1003, 1020, 1037, + (18,62): 1054, 1071, + (19,0): 0, 18, 36, 54, 72, 90, 108, 126, 144, 162, 180, 198, 216, 234, + (19,14): 252, 270, 288, 306, 324, 342, 360, 378, 396, 414, 432, 450, + (19,26): 468, 486, 504, 522, 540, 558, 576, 594, 612, 630, 648, 666, + (19,38): 684, 702, 720, 738, 756, 774, 792, 810, 828, 846, 864, 882, + (19,50): 900, 918, 936, 954, 972, 990, 1008, 1026, 1044, 1062, 1080, + (19,61): 1098, 1116, 1134, + (20,0): 0, 19, 38, 57, 76, 95, 114, 133, 152, 171, 190, 209, 228, 247, + (20,14): 266, 285, 304, 323, 342, 361, 380, 399, 418, 437, 456, 475, + (20,26): 494, 513, 532, 551, 570, 589, 608, 627, 646, 665, 684, 703, + (20,38): 722, 741, 760, 779, 798, 817, 836, 855, 874, 893, 912, 931, + (20,50): 950, 969, 988, 1007, 1026, 1045, 1064, 1083, 1102, 1121, 1140, + (20,61): 1159, 1178, 1197, + (21,0): 0, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, + (21,14): 280, 300, 320, 340, 360, 380, 400, 420, 440, 460, 480, 500, + (21,26): 520, 540, 560, 580, 600, 620, 640, 660, 680, 700, 720, 740, + (21,38): 760, 780, 800, 820, 840, 860, 880, 900, 920, 940, 960, 980, + (21,50): 1000, 1020, 1040, 1060, 1080, 1100, 1120, 1140, 1160, 1180, + (21,60): 1200, 1220, 1240, 1260, + (22,0): 0, 21, 42, 63, 84, 105, 126, 147, 168, 189, 210, 231, 252, 273, + (22,14): 294, 315, 336, 357, 378, 399, 420, 441, 462, 483, 504, 525, + (22,26): 546, 567, 588, 609, 630, 651, 672, 693, 714, 735, 756, 777, + (22,38): 798, 819, 840, 861, 882, 903, 924, 945, 966, 987, 1008, 1029, + (22,50): 1050, 1071, 1092, 1113, 1134, 1155, 1176, 1197, 1218, 1239, + (22,60): 1260, 1281, 1302, 1323, + (23,0): 0, 22, 44, 66, 88, 110, 132, 154, 176, 198, 220, 242, 264, 286, + (23,14): 308, 330, 352, 374, 396, 418, 440, 462, 484, 506, 528, 550, + (23,26): 572, 594, 616, 638, 660, 682, 704, 726, 748, 770, 792, 814, + (23,38): 836, 858, 880, 902, 924, 946, 968, 990, 1012, 1034, 1056, + (23,49): 1078, 1100, 1122, 1144, 1166, 1188, 1210, 1232, 1254, 1276, + (23,59): 1298, 1320, 1342, 1364, 1386, + (24,0): 0, 23, 46, 69, 92, 115, 138, 161, 184, 207, 230, 253, 276, 299, + (24,14): 322, 345, 368, 391, 414, 437, 460, 483, 506, 529, 552, 575, + (24,26): 598, 621, 644, 667, 690, 713, 736, 759, 782, 805, 828, 851, + (24,38): 874, 897, 920, 943, 966, 989, 1012, 1035, 1058, 1081, 1104, + (24,49): 1127, 1150, 1173, 1196, 1219, 1242, 1265, 1288, 1311, 1334, + (24,59): 1357, 1380, 1403, 1426, 1449, + (25,0): 0, 24, 48, 72, 96, 120, 144, 168, 192, 216, 240, 264, 288, 312, + (25,14): 336, 360, 384, 408, 432, 456, 480, 504, 528, 552, 576, 600, + (25,26): 624, 648, 672, 696, 720, 744, 768, 792, 816, 840, 864, 888, + (25,38): 912, 936, 960, 984, 1008, 1032, 1056, 1080, 1104, 1128, 1152, + (25,49): 1176, 1200, 1224, 1248, 1272, 1296, 1320, 1344, 1368, 1392, + (25,59): 1416, 1440, 1464, 1488, 1512, + (26,0): 0, 25, 50, 75, 100, 125, 150, 175, 200, 225, 250, 275, 300, + (26,13): 325, 350, 375, 400, 425, 450, 475, 500, 525, 550, 575, 600, + (26,25): 625, 650, 675, 700, 725, 750, 775, 800, 825, 850, 875, 900, + (26,37): 925, 950, 975, 1000, 1025, 1050, 1075, 1100, 1125, 1150, 1175, + (26,48): 1200, 1225, 1250, 1275, 1300, 1325, 1350, 1375, 1400, 1425, + (26,58): 1450, 1475, 1500, 1525, 1550, 1575, + (27,0): 0, 26, 52, 78, 104, 130, 156, 182, 208, 234, 260, 286, 312, + (27,13): 338, 364, 390, 416, 442, 468, 494, 520, 546, 572, 598, 624, + (27,25): 650, 676, 702, 728, 754, 780, 806, 832, 858, 884, 910, 936, + (27,37): 962, 988, 1014, 1040, 1066, 1092, 1118, 1144, 1170, 1196, + (27,47): 1222, 1248, 1274, 1300, 1326, 1352, 1378, 1404, 1430, 1456, + (27,57): 1482, 1508, 1534, 1560, 1586, 1612, 1638, + (28,0): 0, 27, 54, 81, 108, 135, 162, 189, 216, 243, 270, 297, 324, + (28,13): 351, 378, 405, 432, 459, 486, 513, 540, 567, 594, 621, 648, + (28,25): 675, 702, 729, 756, 783, 810, 837, 864, 891, 918, 945, 972, + (28,37): 999, 1026, 1053, 1080, 1107, 1134, 1161, 1188, 1215, 1242, + (28,47): 1269, 1296, 1323, 1350, 1377, 1404, 1431, 1458, 1485, 1512, + (28,57): 1539, 1566, 1593, 1620, 1647, 1674, 1701, + (29,0): 0, 28, 56, 84, 112, 140, 168, 196, 224, 252, 280, 308, 336, + (29,13): 364, 392, 420, 448, 476, 504, 532, 560, 588, 616, 644, 672, + (29,25): 700, 728, 756, 784, 812, 840, 868, 896, 924, 952, 980, 1008, + (29,37): 1036, 1064, 1092, 1120, 1148, 1176, 1204, 1232, 1260, 1288, + (29,47): 1316, 1344, 1372, 1400, 1428, 1456, 1484, 1512, 1540, 1568, + (29,57): 1596, 1624, 1652, 1680, 1708, 1736, 1764, + (30,0): 0, 29, 58, 87, 116, 145, 174, 203, 232, 261, 290, 319, 348, + (30,13): 377, 406, 435, 464, 493, 522, 551, 580, 609, 638, 667, 696, + (30,25): 725, 754, 783, 812, 841, 870, 899, 928, 957, 986, 1015, 1044, + (30,37): 1073, 1102, 1131, 1160, 1189, 1218, 1247, 1276, 1305, 1334, + (30,47): 1363, 1392, 1421, 1450, 1479, 1508, 1537, 1566, 1595, 1624, + (30,57): 1653, 1682, 1711, 1740, 1769, 1798, 1827, + (31,0): 0, 30, 60, 90, 120, 150, 180, 210, 240, 270, 300, 330, 360, + (31,13): 390, 420, 450, 480, 510, 540, 570, 600, 630, 660, 690, 720, + (31,25): 750, 780, 810, 840, 870, 900, 930, 960, 990, 1020, 1050, 1080, + (31,37): 1110, 1140, 1170, 1200, 1230, 1260, 1290, 1320, 1350, 1380, + (31,47): 1410, 1440, 1470, 1500, 1530, 1560, 1590, 1620, 1650, 1680, + (31,57): 1710, 1740, 1770, 1800, 1830, 1860, 1890 + } + } +} +} diff --git a/C/H5FLT/tfiles/h5ex_d_blosc2.tst b/C/H5FLT/tfiles/h5ex_d_blosc2.tst new file mode 100644 index 00000000..de97edfa --- /dev/null +++ b/C/H5FLT/tfiles/h5ex_d_blosc2.tst @@ -0,0 +1,11 @@ +blosc2 filter is available for encoding and decoding. +....Create dataset ................ +....Writing blosc2 compressed data ................ +....Close the file and reopen for reading ........ +Filter info is available from the dataset creation property + Filter identifier is 32026 + Number of parameters is 10 with the value 4 1 2 + To find more about the filter check HDF5 blosc2 filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md +....Reading blosc2 compressed data ................ +Maximum value in DS1 is 1890 +blosc2 filter is available now since H5Dread triggered loading of the filter. diff --git a/C/H5FLT/tfiles/h5ex_d_bzip2.ddl b/C/H5FLT/tfiles/h5ex_d_bzip2.ddl index 5cf63389..32c55eb8 100644 --- a/C/H5FLT/tfiles/h5ex_d_bzip2.ddl +++ b/C/H5FLT/tfiles/h5ex_d_bzip2.ddl @@ -10,7 +10,7 @@ GROUP "/" { FILTERS { USER_DEFINED_FILTER { FILTER_ID 307 - COMMENT HDF5 bzip2 filter; see http://www.hdfgroup.org/services/contributions.html + COMMENT HDF5 bzip2 filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md PARAMS { 2 } } } diff --git a/C/H5FLT/tfiles/h5ex_d_bzip2.tst b/C/H5FLT/tfiles/h5ex_d_bzip2.tst index a628ae1d..9f71cce3 100644 --- a/C/H5FLT/tfiles/h5ex_d_bzip2.tst +++ b/C/H5FLT/tfiles/h5ex_d_bzip2.tst @@ -5,7 +5,7 @@ bzip2 filter is available for encoding and decoding. Filter info is available from the dataset creation property Filter identifier is 307 Number of parameters is 1 with the value 2 - To find more about the filter check HDF5 bzip2 filter; see http://www.hdfgroup.org/services/contributions.html + To find more about the filter check HDF5 bzip2 filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md ....Reading bzip2 compressed data ................ Maximum value in DS1 is 1890 bzip2 filter is available now since H5Dread triggered loading of the filter. diff --git a/C/H5FLT/tfiles/h5ex_d_jpeg.ddl b/C/H5FLT/tfiles/h5ex_d_jpeg.ddl index 1dc8928d..1cc2ec38 100644 --- a/C/H5FLT/tfiles/h5ex_d_jpeg.ddl +++ b/C/H5FLT/tfiles/h5ex_d_jpeg.ddl @@ -10,7 +10,7 @@ GROUP "/" { FILTERS { USER_DEFINED_FILTER { FILTER_ID 32019 - COMMENT HDF5 jpeg filter; see http://www.hdfgroup.org/services/contributions.html + COMMENT HDF5 jpeg filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md PARAMS { 100 1024 512 0 } } } diff --git a/C/H5FLT/tfiles/h5ex_d_jpeg.tst b/C/H5FLT/tfiles/h5ex_d_jpeg.tst index d3f3bf87..188aa5c2 100644 --- a/C/H5FLT/tfiles/h5ex_d_jpeg.tst +++ b/C/H5FLT/tfiles/h5ex_d_jpeg.tst @@ -5,7 +5,7 @@ jpeg filter is available for encoding and decoding. Filter info is available from the dataset creation property Filter identifier is 32019 Number of parameters is 4 with the value 100 - To find more about the filter check HDF5 jpeg filter; see http://www.hdfgroup.org/services/contributions.html + To find more about the filter check HDF5 jpeg filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md ....Reading jpeg compressed data ................ JPEG quality=100, percent of differing array elements=0.000000 jpeg filter is available now since H5Dread triggered loading of the filter. diff --git a/C/H5FLT/tfiles/h5ex_d_lz4.ddl b/C/H5FLT/tfiles/h5ex_d_lz4.ddl index f13c950f..a1fd1533 100755 --- a/C/H5FLT/tfiles/h5ex_d_lz4.ddl +++ b/C/H5FLT/tfiles/h5ex_d_lz4.ddl @@ -10,7 +10,7 @@ GROUP "/" { FILTERS { USER_DEFINED_FILTER { FILTER_ID 32004 - COMMENT HDF5 lz4 filter; see http://www.hdfgroup.org/services/contributions.html + COMMENT HDF5 lz4 filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md PARAMS { 3 } } } diff --git a/C/H5FLT/tfiles/h5ex_d_lz4.tst b/C/H5FLT/tfiles/h5ex_d_lz4.tst index 5e5e9d74..077e9dfc 100755 --- a/C/H5FLT/tfiles/h5ex_d_lz4.tst +++ b/C/H5FLT/tfiles/h5ex_d_lz4.tst @@ -5,7 +5,7 @@ lz4 filter is available for encoding and decoding. Filter info is available from the dataset creation property Filter identifier is 32004 Number of parameters is 1 with the value 3 - To find more about the filter check HDF5 lz4 filter; see http://www.hdfgroup.org/services/contributions.html + To find more about the filter check HDF5 lz4 filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md ....Reading lz4 compressed data ................ Maximum value in DS1 is 1890 lz4 filter is available now since H5Dread triggered loading of the filter. diff --git a/C/H5FLT/tfiles/h5ex_d_lzf.ddl b/C/H5FLT/tfiles/h5ex_d_lzf.ddl index 089a82aa..c565245e 100644 --- a/C/H5FLT/tfiles/h5ex_d_lzf.ddl +++ b/C/H5FLT/tfiles/h5ex_d_lzf.ddl @@ -10,7 +10,7 @@ GROUP "/" { FILTERS { USER_DEFINED_FILTER { FILTER_ID 32000 - COMMENT HDF5 lzf filter; see http://www.hdfgroup.org/services/contributions.html + COMMENT HDF5 lzf filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md PARAMS { 4 261 128 } } } diff --git a/C/H5FLT/tfiles/h5ex_d_lzf.tst b/C/H5FLT/tfiles/h5ex_d_lzf.tst index 96aca413..0eeff2fd 100755 --- a/C/H5FLT/tfiles/h5ex_d_lzf.tst +++ b/C/H5FLT/tfiles/h5ex_d_lzf.tst @@ -5,7 +5,7 @@ lzf filter is available for encoding and decoding. Filter info is available from the dataset creation property Filter identifier is 32000 Number of parameters is 3 with the value 4 - To find more about the filter check HDF5 lzf filter; see http://www.hdfgroup.org/services/contributions.html + To find more about the filter check HDF5 lzf filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md ....Reading lzf compressed data ................ Maximum value in DS1 is 1890 lzf filter is available now since H5Dread triggered loading of the filter. diff --git a/C/H5FLT/tfiles/h5ex_d_zstd.ddl b/C/H5FLT/tfiles/h5ex_d_zstd.ddl index c9513569..a53b1bce 100644 --- a/C/H5FLT/tfiles/h5ex_d_zstd.ddl +++ b/C/H5FLT/tfiles/h5ex_d_zstd.ddl @@ -10,7 +10,7 @@ GROUP "/" { FILTERS { USER_DEFINED_FILTER { FILTER_ID 32015 - COMMENT HDF5 zstd filter; see http://www.hdfgroup.org/services/contributions.html + COMMENT HDF5 zstd filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md PARAMS { 0 } } } diff --git a/C/H5FLT/tfiles/h5ex_d_zstd.tst b/C/H5FLT/tfiles/h5ex_d_zstd.tst index 3789a3eb..c66aef97 100644 --- a/C/H5FLT/tfiles/h5ex_d_zstd.tst +++ b/C/H5FLT/tfiles/h5ex_d_zstd.tst @@ -5,7 +5,7 @@ zstd filter is available for encoding and decoding. Filter info is available from the dataset creation property Filter identifier is 32015 Number of parameters is 1 with the value 0 - To find more about the filter check HDF5 zstd filter; see http://www.hdfgroup.org/services/contributions.html + To find more about the filter check HDF5 zstd filter; see https://github.com/HDFGroup/hdf5_plugins/blob/master/docs/RegisteredFilterPlugins.md ....Reading zstd compressed data ................ ZSTD number of differing array elements=0 zstd filter is available now since H5Dread triggered loading of the filter. diff --git a/C/H5G/CMakeLists.txt b/C/H5G/CMakeLists.txt index 308349aa..436bd3a0 100644 --- a/C/H5G/CMakeLists.txt +++ b/C/H5G/CMakeLists.txt @@ -233,63 +233,77 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_SKIP_COMPARE=TRUE" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + NAME ${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_SKIP_COMPARE=TRUE" + -D "TEST_OUTPUT=${testname}.out" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + endif () endif () endmacro () macro (ADD_H5_DUMP_TEST testname) - add_test ( - NAME ${EXAMPLE_VARNAME}_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + NAME ${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.ddl.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + endif () endif () endmacro () @@ -301,46 +315,54 @@ if (H5EX_BUILD_TESTING) ${testname}1.h5 ${testname}2.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname}1 + NAME ${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${testname}1.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}1.ddl.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}1.ddl" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname}1 PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) - add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname}2 - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${testname}2.h5" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}2.ddl.out" - -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}2.ddl" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname}2 PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_H5DUMP-${testname}1) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname}1 + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${testname}1.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}1.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}1.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname}1 PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname}2 + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${testname}2.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}2.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}2.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname}2 PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_H5DUMP-${testname}1) + endif () endif () endmacro () @@ -351,19 +373,25 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.out.tmp ) - add_test ( - NAME ${EXAMPLE_VARNAME}_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=${ARGN}" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + else () + add_test ( + NAME ${EXAMPLE_VARNAME}_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=${ARGN}" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_EXPECT=0" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + endif () endmacro () ADD_DUMP_TEST (h5ex_g_create) diff --git a/C/H5G/Makefile.am b/C/H5G/Makefile.am index 9dc4dcf7..8ab2b8ac 100644 --- a/C/H5G/Makefile.am +++ b/C/H5G/Makefile.am @@ -5,12 +5,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/C/H5G/h5ex_g_traverse.c b/C/H5G/h5ex_g_traverse.c index 4962986f..e1099c89 100644 --- a/C/H5G/h5ex_g_traverse.c +++ b/C/H5G/h5ex_g_traverse.c @@ -25,18 +25,14 @@ struct opdata { unsigned recurs; /* Recursion level. 0=root */ struct opdata *prev; /* Pointer to previous opdata */ -#if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) - H5O_token_t token; -#else - haddr_t addr; /* Group address */ -#endif + haddr_t addr; /* Group address */ }; /* * Operator function to be called by H5Literate. */ #if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) -herr_t op_func(hid_t loc_id, const char *name, const H5L_info2_t *info, void *operator_data); +herr_t op_func(hid_t loc_id, const char *name, const H5L_info1_t *info, void *operator_data); #else herr_t op_func(hid_t loc_id, const char *name, const H5L_info_t *info, void *operator_data); #endif @@ -44,11 +40,7 @@ herr_t op_func(hid_t loc_id, const char *name, const H5L_info_t *info, void *ope /* * Function to check for duplicate groups in a path. */ -#if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) -int group_check(hid_t loc_id, struct opdata *od, H5O_token_t target_tok); -#else int group_check(struct opdata *od, haddr_t target_addr); -#endif int main(void) @@ -56,7 +48,7 @@ main(void) hid_t file; /* Handle */ herr_t status; #if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) - H5O_info2_t infobuf; + H5O_info1_t infobuf; #else H5O_info_t infobuf; #endif @@ -67,24 +59,20 @@ main(void) */ file = H5Fopen(FILE, H5F_ACC_RDONLY, H5P_DEFAULT); #if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) - status = H5Oget_info3(file, &infobuf, H5O_INFO_ALL); + status = H5Oget_info2(file, &infobuf, H5O_INFO_ALL); #else status = H5Oget_info(file, &infobuf); #endif od.recurs = 0; od.prev = NULL; -#if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) - od.token = infobuf.token; -#else - od.addr = infobuf.addr; -#endif + od.addr = infobuf.addr; /* * Print the root group and formatting, begin iteration. */ printf("/ {\n"); #if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) - status = H5Literate2(file, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, op_func, (void *)&od); + status = H5Literate1(file, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, op_func, (void *)&od); #else status = H5Literate(file, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, op_func, (void *)&od); #endif @@ -111,7 +99,7 @@ main(void) ************************************************************/ #if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) herr_t -op_func(hid_t loc_id, const char *name, const H5L_info2_t *info, void *operator_data) +op_func(hid_t loc_id, const char *name, const H5L_info1_t *info, void *operator_data) #else herr_t op_func(hid_t loc_id, const char *name, const H5L_info_t *info, void *operator_data) @@ -119,7 +107,7 @@ op_func(hid_t loc_id, const char *name, const H5L_info_t *info, void *operator_d { herr_t status, return_val = 0; #if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) - H5O_info2_t infobuf; + H5O_info1_t infobuf; #else H5O_info_t infobuf; #endif @@ -135,7 +123,7 @@ op_func(hid_t loc_id, const char *name, const H5L_info_t *info, void *operator_d * the Library. */ #if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) - status = H5Oget_info_by_name3(loc_id, name, &infobuf, H5O_INFO_ALL, H5P_DEFAULT); + status = H5Oget_info_by_name2(loc_id, name, &infobuf, H5O_INFO_ALL, H5P_DEFAULT); #else status = H5Oget_info_by_name(loc_id, name, &infobuf, H5P_DEFAULT); #endif @@ -145,7 +133,7 @@ op_func(hid_t loc_id, const char *name, const H5L_info_t *info, void *operator_d printf("Group: %s {\n", name); /* - * Check group address or token against linked list of operator + * Check group address against linked list of operator * data structures. We will always run the check, as the * reference count cannot be relied upon if there are * symbolic links, and H5Oget_info_by_name always follows @@ -155,11 +143,7 @@ op_func(hid_t loc_id, const char *name, const H5L_info_t *info, void *operator_d * reference count was manually manipulated with * H5Odecr_refcount. */ -#if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) - if (group_check(loc_id, od, infobuf.token)) { -#else if (group_check(od, infobuf.addr)) { -#endif printf("%*s Warning: Loop detected!\n", spaces, ""); } else { @@ -173,12 +157,11 @@ op_func(hid_t loc_id, const char *name, const H5L_info_t *info, void *operator_d struct opdata nextod; nextod.recurs = od->recurs + 1; nextod.prev = od; + nextod.addr = infobuf.addr; #if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) - nextod.token = infobuf.token; - return_val = H5Literate_by_name2(loc_id, name, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, op_func, - (void *)&nextod, H5P_DEFAULT); + return_val = H5Literate_by_name1(loc_id, name, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, op_func, + (void *)&nextod, H5P_DEFAULT); #else - nextod.addr = infobuf.addr; return_val = H5Literate_by_name(loc_id, name, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, op_func, (void *)&nextod, H5P_DEFAULT); #endif @@ -201,28 +184,11 @@ op_func(hid_t loc_id, const char *name, const H5L_info_t *info, void *operator_d /************************************************************ This function recursively searches the linked list of - opdata structures for one whose address or object token - matches target_addr or target_tok. Returns 1 if a match - is found, and 0 otherwise. + opdata structures for one whose address matches + target_addr. Returns 1 if a match is found, and 0 + otherwise. ************************************************************/ - -#if H5_VERSION_GE(1, 12, 0) && !defined(H5_USE_110_API) && !defined(H5_USE_18_API) && !defined(H5_USE_16_API) -int -group_check(hid_t loc_id, struct opdata *od, H5O_token_t target_token) -{ - int cmp_value; - H5Otoken_cmp(loc_id, &(od->token), &target_token, &cmp_value); - if (cmp_value == 0) - return 1; /* Addresses match */ - else if (!od->recurs) - return 0; /* Root group reached with no matches */ - else - return group_check(loc_id, od->prev, target_token); - /* Recursively examine the next node */ -} - -#else int group_check(struct opdata *od, haddr_t target_addr) { @@ -234,4 +200,3 @@ group_check(struct opdata *od, haddr_t target_addr) return group_check(od->prev, target_addr); /* Recursively examine the next node */ } -#endif diff --git a/C/H5G/test.sh.in b/C/H5G/test.sh.in index f12c4a50..ea05ca3b 100755 --- a/C/H5G/test.sh.in +++ b/C/H5G/test.sh.in @@ -6,12 +6,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. srcdir=@srcdir@ diff --git a/C/H5PAR/CMakeLists.txt b/C/H5PAR/CMakeLists.txt index 6e569b4d..be48139f 100644 --- a/C/H5PAR/CMakeLists.txt +++ b/C/H5PAR/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required (VERSION 3.12) -PROJECT (H5PAR_C) +project (H5PAR_C C) #----------------------------------------------------------------------------- # Define Sources @@ -32,7 +32,7 @@ if (H5EX_BUILD_TESTING) if (last_test) set_tests_properties (${EXAMPLE_VARNAME}_${testname}-clearall PROPERTIES DEPENDS ${last_test}) endif () - add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}" + add_test (NAME MPI_TEST_${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}" -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE};${MPIEXEC_NUMPROC_FLAG};${mumprocs};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS}" -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" @@ -43,8 +43,8 @@ if (H5EX_BUILD_TESTING) -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/grepTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) - set (last_test "${EXAMPLE_VARNAME}_${testname}") + set_tests_properties (MPI_TEST_${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + set (last_test "MPI_TEST_${EXAMPLE_VARNAME}_${testname}") endmacro () # Ensure that 24 is a multiple of the number of processes. diff --git a/C/H5PAR/ph5_dataset.c b/C/H5PAR/ph5_dataset.c index 9b8e8a83..0c25fcc6 100644 --- a/C/H5PAR/ph5_dataset.c +++ b/C/H5PAR/ph5_dataset.c @@ -53,6 +53,24 @@ main(int argc, char **argv) plist_id = H5Pcreate(H5P_FILE_ACCESS); H5Pset_fapl_mpio(plist_id, comm, info); + /* + * OPTIONAL: It is generally recommended to set collective + * metadata reads on FAPL to perform metadata reads + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_all_coll_metadata_ops(plist_id, true); + + /* + * OPTIONAL: It is generally recommended to set collective + * metadata writes on FAPL to perform metadata writes + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_coll_metadata_write(plist_id, true); + /* * Create a new file collectively and release property list identifier. */ diff --git a/C/H5PAR/ph5_file_create.c b/C/H5PAR/ph5_file_create.c index a3bd0a8d..10938f29 100644 --- a/C/H5PAR/ph5_file_create.c +++ b/C/H5PAR/ph5_file_create.c @@ -36,6 +36,24 @@ main(int argc, char **argv) plist_id = H5Pcreate(H5P_FILE_ACCESS); H5Pset_fapl_mpio(plist_id, comm, info); + /* + * OPTIONAL: It is generally recommended to set collective + * metadata reads on FAPL to perform metadata reads + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_all_coll_metadata_ops(plist_id, true); + + /* + * OPTIONAL: It is generally recommended to set collective + * metadata writes on FAPL to perform metadata writes + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_coll_metadata_write(plist_id, true); + /* * Create a new file collectively. */ diff --git a/C/H5PAR/ph5_filtered_writes.c b/C/H5PAR/ph5_filtered_writes.c index 104704a2..34ed2fbb 100644 --- a/C/H5PAR/ph5_filtered_writes.c +++ b/C/H5PAR/ph5_filtered_writes.c @@ -377,13 +377,23 @@ main(int argc, char **argv) H5Pset_fapl_mpio(fapl_id, comm, info); /* - * OPTIONAL: Set collective metadata reads on FAPL to allow - * parallel writes to filtered datasets to perform - * better at scale. While not strictly necessary, - * this is generally recommended. + * OPTIONAL: It is generally recommended to set collective + * metadata reads on FAPL to perform metadata reads + * collectively, which usually allows filtered datasets + * to perform better at scale, although it is not + * strictly necessary. */ H5Pset_all_coll_metadata_ops(fapl_id, true); + /* + * OPTIONAL: It is generally recommended to set collective + * metadata writes on FAPL to perform metadata writes + * collectively, which usually allows filtered datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_coll_metadata_write(fapl_id, true); + /* * OPTIONAL: Set the latest file format version for HDF5 in * order to gain access to different dataset chunk diff --git a/C/H5PAR/ph5_filtered_writes_no_sel.c b/C/H5PAR/ph5_filtered_writes_no_sel.c index a4d9e169..d4f171ff 100644 --- a/C/H5PAR/ph5_filtered_writes_no_sel.c +++ b/C/H5PAR/ph5_filtered_writes_no_sel.c @@ -271,13 +271,23 @@ main(int argc, char **argv) H5Pset_fapl_mpio(fapl_id, comm, info); /* - * OPTIONAL: Set collective metadata reads on FAPL to allow - * parallel writes to filtered datasets to perform - * better at scale. While not strictly necessary, - * this is generally recommended. + * OPTIONAL: It is generally recommended to set collective + * metadata reads on FAPL to perform metadata reads + * collectively, which usually allows filtered datasets + * to perform better at scale, although it is not + * strictly necessary. */ H5Pset_all_coll_metadata_ops(fapl_id, true); + /* + * OPTIONAL: It is generally recommended to set collective + * metadata writes on FAPL to perform metadata writes + * collectively, which usually allows filtered datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_coll_metadata_write(fapl_id, true); + /* * OPTIONAL: Set the latest file format version for HDF5 in * order to gain access to different dataset chunk diff --git a/C/H5PAR/ph5_hyperslab_by_chunk.c b/C/H5PAR/ph5_hyperslab_by_chunk.c index a255b96e..e00a0ef6 100644 --- a/C/H5PAR/ph5_hyperslab_by_chunk.c +++ b/C/H5PAR/ph5_hyperslab_by_chunk.c @@ -64,6 +64,24 @@ main(int argc, char **argv) plist_id = H5Pcreate(H5P_FILE_ACCESS); H5Pset_fapl_mpio(plist_id, comm, info); + /* + * OPTIONAL: It is generally recommended to set collective + * metadata reads on FAPL to perform metadata reads + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_all_coll_metadata_ops(plist_id, true); + + /* + * OPTIONAL: It is generally recommended to set collective + * metadata writes on FAPL to perform metadata writes + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_coll_metadata_write(plist_id, true); + /* * Create a new file collectively and release property list identifier. */ diff --git a/C/H5PAR/ph5_hyperslab_by_col.c b/C/H5PAR/ph5_hyperslab_by_col.c index b397fcff..49e5ce30 100644 --- a/C/H5PAR/ph5_hyperslab_by_col.c +++ b/C/H5PAR/ph5_hyperslab_by_col.c @@ -59,6 +59,24 @@ main(int argc, char **argv) plist_id = H5Pcreate(H5P_FILE_ACCESS); H5Pset_fapl_mpio(plist_id, comm, info); + /* + * OPTIONAL: It is generally recommended to set collective + * metadata reads on FAPL to perform metadata reads + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_all_coll_metadata_ops(plist_id, true); + + /* + * OPTIONAL: It is generally recommended to set collective + * metadata writes on FAPL to perform metadata writes + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_coll_metadata_write(plist_id, true); + /* * Create a new file collectively and release property list identifier. */ diff --git a/C/H5PAR/ph5_hyperslab_by_pattern.c b/C/H5PAR/ph5_hyperslab_by_pattern.c index 77f3bef2..bec3a2f8 100644 --- a/C/H5PAR/ph5_hyperslab_by_pattern.c +++ b/C/H5PAR/ph5_hyperslab_by_pattern.c @@ -64,6 +64,24 @@ main(int argc, char **argv) plist_id = H5Pcreate(H5P_FILE_ACCESS); H5Pset_fapl_mpio(plist_id, comm, info); + /* + * OPTIONAL: It is generally recommended to set collective + * metadata reads on FAPL to perform metadata reads + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_all_coll_metadata_ops(plist_id, true); + + /* + * OPTIONAL: It is generally recommended to set collective + * metadata writes on FAPL to perform metadata writes + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_coll_metadata_write(plist_id, true); + /* * Create a new file collectively and release property list identifier. */ diff --git a/C/H5PAR/ph5_hyperslab_by_row.c b/C/H5PAR/ph5_hyperslab_by_row.c index 50357867..1c08a32d 100644 --- a/C/H5PAR/ph5_hyperslab_by_row.c +++ b/C/H5PAR/ph5_hyperslab_by_row.c @@ -48,6 +48,24 @@ main(int argc, char **argv) plist_id = H5Pcreate(H5P_FILE_ACCESS); H5Pset_fapl_mpio(plist_id, comm, info); + /* + * OPTIONAL: It is generally recommended to set collective + * metadata reads on FAPL to perform metadata reads + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_all_coll_metadata_ops(plist_id, true); + + /* + * OPTIONAL: It is generally recommended to set collective + * metadata writes on FAPL to perform metadata writes + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_coll_metadata_write(plist_id, true); + /* * Create a new file collectively and release property list identifier. */ diff --git a/C/H5PAR/ph5example.c b/C/H5PAR/ph5example.c new file mode 100644 index 00000000..37d5d68e --- /dev/null +++ b/C/H5PAR/ph5example.c @@ -0,0 +1,1118 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * Example of using the parallel HDF5 library to access datasets. + * Last revised: April 24, 2001. + * + * This program contains two parts. In the first part, the mpi processes + * collectively create a new parallel HDF5 file and create two fixed + * dimension datasets in it. Then each process writes a hyperslab into + * each dataset in an independent mode. All processes collectively + * close the datasets and the file. + * In the second part, the processes collectively open the created file + * and the two datasets in it. Then each process reads a hyperslab from + * each dataset in an independent mode and prints them out. + * All processes collectively close the datasets and the file. + * + * The need of requirement of parallel file prefix is that in general + * the current working directory in which compiling is done, is not suitable + * for parallel I/O and there is no standard pathname for parallel file + * systems. In some cases, the parallel file name may even needs some + * parallel file type prefix such as: "pfs:/GF/...". Therefore, this + * example requires an explicit parallel file prefix. See the usage + * for more detail. + */ + +#include +#include "hdf5.h" +#include +#include + +#ifdef H5_HAVE_PARALLEL +/* Temporary source code */ +#define FAIL -1 +/* temporary code end */ + +/* Define some handy debugging shorthands, routines, ... */ +/* debugging tools */ +#define MESG(x) \ + do { \ + if (verbose) \ + printf("%s\n", x); \ + } while (0) + +#define MPI_BANNER(mesg) \ + do { \ + printf("--------------------------------\n"); \ + printf("Proc %d: ", mpi_rank); \ + printf("*** %s\n", mesg); \ + printf("--------------------------------\n"); \ + } while (0) + +#define SYNC(comm) \ + do { \ + MPI_BANNER("doing a SYNC"); \ + MPI_Barrier(comm); \ + MPI_BANNER("SYNC DONE"); \ + } while (0) +/* End of Define some handy debugging shorthands, routines, ... */ + +/* Constants definitions */ +/* 24 is a multiple of 2, 3, 4, 6, 8, 12. Neat for parallel tests. */ +#define SPACE1_DIM1 24 +#define SPACE1_DIM2 24 +#define SPACE1_RANK 2 +#define DATASETNAME1 "Data1" +#define DATASETNAME2 "Data2" +#define DATASETNAME3 "Data3" +/* hyperslab layout styles */ +#define BYROW 1 /* divide into slabs of rows */ +#define BYCOL 2 /* divide into blocks of columns */ + +#define PARAPREFIX "HDF5_PARAPREFIX" /* file prefix environment variable name */ + +/* dataset data type. Int's can be easily octo dumped. */ +typedef int DATATYPE; + +/* global variables */ +int nerrors = 0; /* errors count */ +#ifndef PATH_MAX +#define PATH_MAX 512 +#endif /* !PATH_MAX */ +char testfiles[2][PATH_MAX]; + +int mpi_size, mpi_rank; /* mpi variables */ + +/* option flags */ +int verbose = 0; /* verbose, default as no. */ +int doread = 1; /* read test */ +int dowrite = 1; /* write test */ +int docleanup = 1; /* cleanup */ + +/* Prototypes */ +void slab_set(hsize_t start[], hsize_t count[], hsize_t stride[], int mode); +void dataset_fill(hsize_t start[], hsize_t count[], hsize_t stride[], DATATYPE *dataset); +void dataset_print(hsize_t start[], hsize_t count[], hsize_t stride[], DATATYPE *dataset); +int dataset_vrfy(hsize_t start[], hsize_t count[], hsize_t stride[], DATATYPE *dataset, DATATYPE *original); +void phdf5writeInd(char *filename); +void phdf5readInd(char *filename); +void phdf5writeAll(char *filename); +void phdf5readAll(char *filename); +void test_split_comm_access(char filenames[][PATH_MAX]); +int parse_options(int argc, char **argv); +void usage(void); +int mkfilenames(char *prefix); +void cleanup(void); + +/* + * Setup the dimensions of the hyperslab. + * Two modes--by rows or by columns. + * Assume dimension rank is 2. + */ +void +slab_set(hsize_t start[], hsize_t count[], hsize_t stride[], int mode) +{ + switch (mode) { + case BYROW: + /* Each process takes a slabs of rows. */ + stride[0] = 1; + stride[1] = 1; + count[0] = SPACE1_DIM1 / mpi_size; + count[1] = SPACE1_DIM2; + start[0] = mpi_rank * count[0]; + start[1] = 0; + break; + case BYCOL: + /* Each process takes a block of columns. */ + stride[0] = 1; + stride[1] = 1; + count[0] = SPACE1_DIM1; + count[1] = SPACE1_DIM2 / mpi_size; + start[0] = 0; + start[1] = mpi_rank * count[1]; + break; + default: + /* Unknown mode. Set it to cover the whole dataset. */ + printf("unknown slab_set mode (%d)\n", mode); + stride[0] = 1; + stride[1] = 1; + count[0] = SPACE1_DIM1; + count[1] = SPACE1_DIM2; + start[0] = 0; + start[1] = 0; + break; + } +} + +/* + * Fill the dataset with trivial data for testing. + * Assume dimension rank is 2 and data is stored contiguous. + */ +void +dataset_fill(hsize_t start[], hsize_t count[], hsize_t stride[], DATATYPE *dataset) +{ + DATATYPE *dataptr = dataset; + hsize_t i, j; + + /* put some trivial data in the data_array */ + for (i = 0; i < count[0]; i++) { + for (j = 0; j < count[1]; j++) { + *dataptr++ = (i * stride[0] + start[0]) * 100 + (j * stride[1] + start[1] + 1); + } + } +} + +/* + * Print the content of the dataset. + */ +void +dataset_print(hsize_t start[], hsize_t count[], hsize_t stride[], DATATYPE *dataset) +{ + DATATYPE *dataptr = dataset; + hsize_t i, j; + + /* print the slab read */ + for (i = 0; i < count[0]; i++) { + printf("Row %lu: ", (unsigned long)(i * stride[0] + start[0])); + for (j = 0; j < count[1]; j++) { + printf("%03d ", *dataptr++); + } + printf("\n"); + } +} + +/* + * Print the content of the dataset. + */ +int +dataset_vrfy(hsize_t start[], hsize_t count[], hsize_t stride[], DATATYPE *dataset, DATATYPE *original) +{ +#define MAX_ERR_REPORT 10 /* Maximum number of errors reported */ + + hsize_t i, j; + int nerr; + + /* print it if verbose */ + if (verbose) + dataset_print(start, count, stride, dataset); + + nerr = 0; + for (i = 0; i < count[0]; i++) { + for (j = 0; j < count[1]; j++) { + if (*dataset++ != *original++) { + nerr++; + if (nerr <= MAX_ERR_REPORT) { + printf("Dataset Verify failed at [%lu][%lu](row %lu, col %lu): expect %d, got %d\n", + (unsigned long)i, (unsigned long)j, (unsigned long)(i * stride[0] + start[0]), + (unsigned long)(j * stride[1] + start[1]), *(dataset - 1), *(original - 1)); + } + } + } + } + if (nerr > MAX_ERR_REPORT) + printf("[more errors ...]\n"); + if (nerr) + printf("%d errors found in dataset_vrfy\n", nerr); + return (nerr); +} + +/* + * Example of using the parallel HDF5 library to create two datasets + * in one HDF5 files with parallel MPIO access support. + * The Datasets are of sizes (number-of-mpi-processes x DIM1) x DIM2. + * Each process controls only a slab of size DIM1 x DIM2 within each + * dataset. + */ + +void +phdf5writeInd(char *filename) +{ + hid_t fid1; /* HDF5 file IDs */ + hid_t acc_tpl1; /* File access templates */ + hid_t sid1; /* Dataspace ID */ + hid_t file_dataspace; /* File dataspace ID */ + hid_t mem_dataspace; /* memory dataspace ID */ + hid_t dataset1, dataset2; /* Dataset ID */ + hsize_t dims1[SPACE1_RANK] = {SPACE1_DIM1, SPACE1_DIM2}; /* dataspace dim sizes */ + DATATYPE data_array1[SPACE1_DIM1][SPACE1_DIM2]; /* data buffer */ + + hsize_t start[SPACE1_RANK]; /* for hyperslab setting */ + hsize_t count[SPACE1_RANK], stride[SPACE1_RANK]; /* for hyperslab setting */ + + herr_t ret; /* Generic return value */ + + MPI_Comm comm = MPI_COMM_WORLD; + MPI_Info info = MPI_INFO_NULL; + + if (verbose) + printf("Independent write test on file %s\n", filename); + + /* ------------------- + * START AN HDF5 FILE + * -------------------*/ + /* setup file access template with parallel IO access. */ + acc_tpl1 = H5Pcreate(H5P_FILE_ACCESS); + assert(acc_tpl1 != FAIL); + MESG("H5Pcreate access succeed"); + /* set Parallel access with communicator */ + ret = H5Pset_fapl_mpio(acc_tpl1, comm, info); + assert(ret != FAIL); + MESG("H5Pset_fapl_mpio succeed"); + + /* + * OPTIONAL: It is generally recommended to set collective + * metadata reads on FAPL to perform metadata reads + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_all_coll_metadata_ops(acc_tpl1, true); + + /* + * OPTIONAL: It is generally recommended to set collective + * metadata writes on FAPL to perform metadata writes + * collectively, which usually allows datasets + * to perform better at scale, although it is not + * strictly necessary. + */ + H5Pset_coll_metadata_write(acc_tpl1, true); + + /* create the file collectively */ + fid1 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl1); + assert(fid1 != FAIL); + MESG("H5Fcreate succeed"); + + /* Release file-access template */ + ret = H5Pclose(acc_tpl1); + assert(ret != FAIL); + + /* -------------------------- + * Define the dimensions of the overall datasets + * and the slabs local to the MPI process. + * ------------------------- */ + /* setup dimensionality object */ + sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL); + assert(sid1 != FAIL); + MESG("H5Screate_simple succeed"); + + /* create a dataset collectively */ + dataset1 = H5Dcreate2(fid1, DATASETNAME1, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + assert(dataset1 != FAIL); + MESG("H5Dcreate2 succeed"); + + /* create another dataset collectively */ + dataset2 = H5Dcreate2(fid1, DATASETNAME2, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + assert(dataset2 != FAIL); + MESG("H5Dcreate2 succeed"); + + /* set up dimensions of the slab this process accesses */ + start[0] = mpi_rank * SPACE1_DIM1 / mpi_size; + start[1] = 0; + count[0] = SPACE1_DIM1 / mpi_size; + count[1] = SPACE1_DIM2; + stride[0] = 1; + stride[1] = 1; + if (verbose) + printf("start[]=(%lu,%lu), count[]=(%lu,%lu), total datapoints=%lu\n", (unsigned long)start[0], + (unsigned long)start[1], (unsigned long)count[0], (unsigned long)count[1], + (unsigned long)(count[0] * count[1])); + + /* put some trivial data in the data_array */ + dataset_fill(start, count, stride, &data_array1[0][0]); + MESG("data_array initialized"); + + /* create a file dataspace independently */ + file_dataspace = H5Dget_space(dataset1); + assert(file_dataspace != FAIL); + MESG("H5Dget_space succeed"); + ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, NULL); + assert(ret != FAIL); + MESG("H5Sset_hyperslab succeed"); + + /* create a memory dataspace independently */ + mem_dataspace = H5Screate_simple(SPACE1_RANK, count, NULL); + assert(mem_dataspace != FAIL); + + /* write data independently */ + ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1); + assert(ret != FAIL); + MESG("H5Dwrite succeed"); + + /* write data independently */ + ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1); + assert(ret != FAIL); + MESG("H5Dwrite succeed"); + + /* release dataspace ID */ + H5Sclose(file_dataspace); + + /* close dataset collectively */ + ret = H5Dclose(dataset1); + assert(ret != FAIL); + MESG("H5Dclose1 succeed"); + ret = H5Dclose(dataset2); + assert(ret != FAIL); + MESG("H5Dclose2 succeed"); + + /* release all IDs created */ + H5Sclose(sid1); + + /* close the file collectively */ + H5Fclose(fid1); +} + +/* Example of using the parallel HDF5 library to read a dataset */ +void +phdf5readInd(char *filename) +{ + hid_t fid1; /* HDF5 file IDs */ + hid_t acc_tpl1; /* File access templates */ + hid_t file_dataspace; /* File dataspace ID */ + hid_t mem_dataspace; /* memory dataspace ID */ + hid_t dataset1, dataset2; /* Dataset ID */ + DATATYPE data_array1[SPACE1_DIM1][SPACE1_DIM2]; /* data buffer */ + DATATYPE data_origin1[SPACE1_DIM1][SPACE1_DIM2]; /* expected data buffer */ + + hsize_t start[SPACE1_RANK]; /* for hyperslab setting */ + hsize_t count[SPACE1_RANK], stride[SPACE1_RANK]; /* for hyperslab setting */ + + herr_t ret; /* Generic return value */ + + MPI_Comm comm = MPI_COMM_WORLD; + MPI_Info info = MPI_INFO_NULL; + + if (verbose) + printf("Independent read test on file %s\n", filename); + + /* setup file access template */ + acc_tpl1 = H5Pcreate(H5P_FILE_ACCESS); + assert(acc_tpl1 != FAIL); + /* set Parallel access with communicator */ + ret = H5Pset_fapl_mpio(acc_tpl1, comm, info); + assert(ret != FAIL); + + /* open the file collectively */ + fid1 = H5Fopen(filename, H5F_ACC_RDWR, acc_tpl1); + assert(fid1 != FAIL); + + /* Release file-access template */ + ret = H5Pclose(acc_tpl1); + assert(ret != FAIL); + + /* open the dataset1 collectively */ + dataset1 = H5Dopen2(fid1, DATASETNAME1, H5P_DEFAULT); + assert(dataset1 != FAIL); + + /* open another dataset collectively */ + dataset2 = H5Dopen2(fid1, DATASETNAME1, H5P_DEFAULT); + assert(dataset2 != FAIL); + + /* set up dimensions of the slab this process accesses */ + start[0] = mpi_rank * SPACE1_DIM1 / mpi_size; + start[1] = 0; + count[0] = SPACE1_DIM1 / mpi_size; + count[1] = SPACE1_DIM2; + stride[0] = 1; + stride[1] = 1; + if (verbose) + printf("start[]=(%lu,%lu), count[]=(%lu,%lu), total datapoints=%lu\n", (unsigned long)start[0], + (unsigned long)start[1], (unsigned long)count[0], (unsigned long)count[1], + (unsigned long)(count[0] * count[1])); + + /* create a file dataspace independently */ + file_dataspace = H5Dget_space(dataset1); + assert(file_dataspace != FAIL); + ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, NULL); + assert(ret != FAIL); + + /* create a memory dataspace independently */ + mem_dataspace = H5Screate_simple(SPACE1_RANK, count, NULL); + assert(mem_dataspace != FAIL); + + /* fill dataset with test data */ + dataset_fill(start, count, stride, &data_origin1[0][0]); + + /* read data independently */ + ret = H5Dread(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1); + assert(ret != FAIL); + + /* verify the read data with original expected data */ + ret = dataset_vrfy(start, count, stride, &data_array1[0][0], &data_origin1[0][0]); + assert(ret != FAIL); + + /* read data independently */ + ret = H5Dread(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1); + assert(ret != FAIL); + + /* verify the read data with original expected data */ + ret = dataset_vrfy(start, count, stride, &data_array1[0][0], &data_origin1[0][0]); + assert(ret == 0); + + /* close dataset collectively */ + ret = H5Dclose(dataset1); + assert(ret != FAIL); + ret = H5Dclose(dataset2); + assert(ret != FAIL); + + /* release all IDs created */ + H5Sclose(file_dataspace); + + /* close the file collectively */ + H5Fclose(fid1); +} + +/* + * Example of using the parallel HDF5 library to create two datasets + * in one HDF5 file with collective parallel access support. + * The Datasets are of sizes (number-of-mpi-processes x DIM1) x DIM2. + * Each process controls only a slab of size DIM1 x DIM2 within each + * dataset. [Note: not so yet. Datasets are of sizes DIM1xDIM2 and + * each process controls a hyperslab within.] + */ + +void +phdf5writeAll(char *filename) +{ + hid_t fid1; /* HDF5 file IDs */ + hid_t acc_tpl1; /* File access templates */ + hid_t xfer_plist; /* Dataset transfer properties list */ + hid_t sid1; /* Dataspace ID */ + hid_t file_dataspace; /* File dataspace ID */ + hid_t mem_dataspace; /* memory dataspace ID */ + hid_t dataset1, dataset2; /* Dataset ID */ + hsize_t dims1[SPACE1_RANK] = {SPACE1_DIM1, SPACE1_DIM2}; /* dataspace dim sizes */ + DATATYPE data_array1[SPACE1_DIM1][SPACE1_DIM2]; /* data buffer */ + + hsize_t start[SPACE1_RANK]; /* for hyperslab setting */ + hsize_t count[SPACE1_RANK], stride[SPACE1_RANK]; /* for hyperslab setting */ + + herr_t ret; /* Generic return value */ + + MPI_Comm comm = MPI_COMM_WORLD; + MPI_Info info = MPI_INFO_NULL; + + if (verbose) + printf("Collective write test on file %s\n", filename); + + /* ------------------- + * START AN HDF5 FILE + * -------------------*/ + /* setup file access template with parallel IO access. */ + acc_tpl1 = H5Pcreate(H5P_FILE_ACCESS); + assert(acc_tpl1 != FAIL); + MESG("H5Pcreate access succeed"); + /* set Parallel access with communicator */ + ret = H5Pset_fapl_mpio(acc_tpl1, comm, info); + assert(ret != FAIL); + MESG("H5Pset_fapl_mpio succeed"); + + /* create the file collectively */ + fid1 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl1); + assert(fid1 != FAIL); + MESG("H5Fcreate succeed"); + + /* Release file-access template */ + ret = H5Pclose(acc_tpl1); + assert(ret != FAIL); + + /* -------------------------- + * Define the dimensions of the overall datasets + * and create the dataset + * ------------------------- */ + /* setup dimensionality object */ + sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL); + assert(sid1 != FAIL); + MESG("H5Screate_simple succeed"); + + /* create a dataset collectively */ + dataset1 = H5Dcreate2(fid1, DATASETNAME1, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + assert(dataset1 != FAIL); + MESG("H5Dcreate2 succeed"); + + /* create another dataset collectively */ + dataset2 = H5Dcreate2(fid1, DATASETNAME2, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + assert(dataset2 != FAIL); + MESG("H5Dcreate2 2 succeed"); + + /* + * Set up dimensions of the slab this process accesses. + */ + + /* Dataset1: each process takes a block of rows. */ + slab_set(start, count, stride, BYROW); + if (verbose) + printf("start[]=(%lu,%lu), count[]=(%lu,%lu), total datapoints=%lu\n", (unsigned long)start[0], + (unsigned long)start[1], (unsigned long)count[0], (unsigned long)count[1], + (unsigned long)(count[0] * count[1])); + + /* create a file dataspace independently */ + file_dataspace = H5Dget_space(dataset1); + assert(file_dataspace != FAIL); + MESG("H5Dget_space succeed"); + ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, NULL); + assert(ret != FAIL); + MESG("H5Sset_hyperslab succeed"); + + /* create a memory dataspace independently */ + mem_dataspace = H5Screate_simple(SPACE1_RANK, count, NULL); + assert(mem_dataspace != FAIL); + + /* fill the local slab with some trivial data */ + dataset_fill(start, count, stride, &data_array1[0][0]); + MESG("data_array initialized"); + if (verbose) { + MESG("data_array created"); + dataset_print(start, count, stride, &data_array1[0][0]); + } + + /* set up the collective transfer properties list */ + xfer_plist = H5Pcreate(H5P_DATASET_XFER); + assert(xfer_plist != FAIL); + ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); + assert(ret != FAIL); + MESG("H5Pcreate xfer succeed"); + + /* write data collectively */ + ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1); + assert(ret != FAIL); + MESG("H5Dwrite succeed"); + + /* release all temporary handles. */ + /* Could have used them for dataset2 but it is cleaner */ + /* to create them again.*/ + H5Sclose(file_dataspace); + H5Sclose(mem_dataspace); + H5Pclose(xfer_plist); + + /* Dataset2: each process takes a block of columns. */ + slab_set(start, count, stride, BYCOL); + if (verbose) + printf("start[]=(%lu,%lu), count[]=(%lu,%lu), total datapoints=%lu\n", (unsigned long)start[0], + (unsigned long)start[1], (unsigned long)count[0], (unsigned long)count[1], + (unsigned long)(count[0] * count[1])); + + /* put some trivial data in the data_array */ + dataset_fill(start, count, stride, &data_array1[0][0]); + MESG("data_array initialized"); + if (verbose) { + MESG("data_array created"); + dataset_print(start, count, stride, &data_array1[0][0]); + } + + /* create a file dataspace independently */ + file_dataspace = H5Dget_space(dataset1); + assert(file_dataspace != FAIL); + MESG("H5Dget_space succeed"); + ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, NULL); + assert(ret != FAIL); + MESG("H5Sset_hyperslab succeed"); + + /* create a memory dataspace independently */ + mem_dataspace = H5Screate_simple(SPACE1_RANK, count, NULL); + assert(mem_dataspace != FAIL); + + /* fill the local slab with some trivial data */ + dataset_fill(start, count, stride, &data_array1[0][0]); + MESG("data_array initialized"); + if (verbose) { + MESG("data_array created"); + dataset_print(start, count, stride, &data_array1[0][0]); + } + + /* set up the collective transfer properties list */ + xfer_plist = H5Pcreate(H5P_DATASET_XFER); + assert(xfer_plist != FAIL); + ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); + assert(ret != FAIL); + MESG("H5Pcreate xfer succeed"); + + /* write data independently */ + ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1); + assert(ret != FAIL); + MESG("H5Dwrite succeed"); + + /* release all temporary handles. */ + H5Sclose(file_dataspace); + H5Sclose(mem_dataspace); + H5Pclose(xfer_plist); + + /* + * All writes completed. Close datasets collectively + */ + ret = H5Dclose(dataset1); + assert(ret != FAIL); + MESG("H5Dclose1 succeed"); + ret = H5Dclose(dataset2); + assert(ret != FAIL); + MESG("H5Dclose2 succeed"); + + /* release all IDs created */ + H5Sclose(sid1); + + /* close the file collectively */ + H5Fclose(fid1); +} + +/* + * Example of using the parallel HDF5 library to read two datasets + * in one HDF5 file with collective parallel access support. + * The Datasets are of sizes (number-of-mpi-processes x DIM1) x DIM2. + * Each process controls only a slab of size DIM1 x DIM2 within each + * dataset. [Note: not so yet. Datasets are of sizes DIM1xDIM2 and + * each process controls a hyperslab within.] + */ + +void +phdf5readAll(char *filename) +{ + hid_t fid1; /* HDF5 file IDs */ + hid_t acc_tpl1; /* File access templates */ + hid_t xfer_plist; /* Dataset transfer properties list */ + hid_t file_dataspace; /* File dataspace ID */ + hid_t mem_dataspace; /* memory dataspace ID */ + hid_t dataset1, dataset2; /* Dataset ID */ + DATATYPE data_array1[SPACE1_DIM1][SPACE1_DIM2]; /* data buffer */ + DATATYPE data_origin1[SPACE1_DIM1][SPACE1_DIM2]; /* expected data buffer */ + + hsize_t start[SPACE1_RANK]; /* for hyperslab setting */ + hsize_t count[SPACE1_RANK], stride[SPACE1_RANK]; /* for hyperslab setting */ + + herr_t ret; /* Generic return value */ + + MPI_Comm comm = MPI_COMM_WORLD; + MPI_Info info = MPI_INFO_NULL; + + if (verbose) + printf("Collective read test on file %s\n", filename); + + /* ------------------- + * OPEN AN HDF5 FILE + * -------------------*/ + /* setup file access template with parallel IO access. */ + acc_tpl1 = H5Pcreate(H5P_FILE_ACCESS); + assert(acc_tpl1 != FAIL); + MESG("H5Pcreate access succeed"); + /* set Parallel access with communicator */ + ret = H5Pset_fapl_mpio(acc_tpl1, comm, info); + assert(ret != FAIL); + MESG("H5Pset_fapl_mpio succeed"); + + /* open the file collectively */ + fid1 = H5Fopen(filename, H5F_ACC_RDWR, acc_tpl1); + assert(fid1 != FAIL); + MESG("H5Fopen succeed"); + + /* Release file-access template */ + ret = H5Pclose(acc_tpl1); + assert(ret != FAIL); + + /* -------------------------- + * Open the datasets in it + * ------------------------- */ + /* open the dataset1 collectively */ + dataset1 = H5Dopen2(fid1, DATASETNAME1, H5P_DEFAULT); + assert(dataset1 != FAIL); + MESG("H5Dopen2 succeed"); + + /* open another dataset collectively */ + dataset2 = H5Dopen2(fid1, DATASETNAME1, H5P_DEFAULT); + assert(dataset2 != FAIL); + MESG("H5Dopen2 2 succeed"); + + /* + * Set up dimensions of the slab this process accesses. + */ + + /* Dataset1: each process takes a block of columns. */ + slab_set(start, count, stride, BYCOL); + if (verbose) + printf("start[]=(%lu,%lu), count[]=(%lu,%lu), total datapoints=%lu\n", (unsigned long)start[0], + (unsigned long)start[1], (unsigned long)count[0], (unsigned long)count[1], + (unsigned long)(count[0] * count[1])); + + /* create a file dataspace independently */ + file_dataspace = H5Dget_space(dataset1); + assert(file_dataspace != FAIL); + MESG("H5Dget_space succeed"); + ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, NULL); + assert(ret != FAIL); + MESG("H5Sset_hyperslab succeed"); + + /* create a memory dataspace independently */ + mem_dataspace = H5Screate_simple(SPACE1_RANK, count, NULL); + assert(mem_dataspace != FAIL); + + /* fill dataset with test data */ + dataset_fill(start, count, stride, &data_origin1[0][0]); + MESG("data_array initialized"); + if (verbose) { + MESG("data_array created"); + dataset_print(start, count, stride, &data_array1[0][0]); + } + + /* set up the collective transfer properties list */ + xfer_plist = H5Pcreate(H5P_DATASET_XFER); + assert(xfer_plist != FAIL); + ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); + assert(ret != FAIL); + MESG("H5Pcreate xfer succeed"); + + /* read data collectively */ + ret = H5Dread(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1); + assert(ret != FAIL); + MESG("H5Dread succeed"); + + /* verify the read data with original expected data */ + ret = dataset_vrfy(start, count, stride, &data_array1[0][0], &data_origin1[0][0]); + assert(ret != FAIL); + + /* release all temporary handles. */ + /* Could have used them for dataset2 but it is cleaner */ + /* to create them again.*/ + H5Sclose(file_dataspace); + H5Sclose(mem_dataspace); + H5Pclose(xfer_plist); + + /* Dataset2: each process takes a block of rows. */ + slab_set(start, count, stride, BYROW); + if (verbose) + printf("start[]=(%lu,%lu), count[]=(%lu,%lu), total datapoints=%lu\n", (unsigned long)start[0], + (unsigned long)start[1], (unsigned long)count[0], (unsigned long)count[1], + (unsigned long)(count[0] * count[1])); + + /* create a file dataspace independently */ + file_dataspace = H5Dget_space(dataset1); + assert(file_dataspace != FAIL); + MESG("H5Dget_space succeed"); + ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, NULL); + assert(ret != FAIL); + MESG("H5Sset_hyperslab succeed"); + + /* create a memory dataspace independently */ + mem_dataspace = H5Screate_simple(SPACE1_RANK, count, NULL); + assert(mem_dataspace != FAIL); + + /* fill dataset with test data */ + dataset_fill(start, count, stride, &data_origin1[0][0]); + MESG("data_array initialized"); + if (verbose) { + MESG("data_array created"); + dataset_print(start, count, stride, &data_array1[0][0]); + } + + /* set up the collective transfer properties list */ + xfer_plist = H5Pcreate(H5P_DATASET_XFER); + assert(xfer_plist != FAIL); + ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); + assert(ret != FAIL); + MESG("H5Pcreate xfer succeed"); + + /* read data independently */ + ret = H5Dread(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1); + assert(ret != FAIL); + MESG("H5Dread succeed"); + + /* verify the read data with original expected data */ + ret = dataset_vrfy(start, count, stride, &data_array1[0][0], &data_origin1[0][0]); + assert(ret != FAIL); + + /* release all temporary handles. */ + H5Sclose(file_dataspace); + H5Sclose(mem_dataspace); + H5Pclose(xfer_plist); + + /* + * All reads completed. Close datasets collectively + */ + ret = H5Dclose(dataset1); + assert(ret != FAIL); + MESG("H5Dclose1 succeed"); + ret = H5Dclose(dataset2); + assert(ret != FAIL); + MESG("H5Dclose2 succeed"); + + /* close the file collectively */ + H5Fclose(fid1); +} + +/* + * test file access by communicator besides COMM_WORLD. + * Split COMM_WORLD into two, one (even_comm) contains the original + * processes of even ranks. The other (odd_comm) contains the original + * processes of odd ranks. Processes in even_comm creates a file, then + * cloose it, using even_comm. Processes in old_comm just do a barrier + * using odd_comm. Then they all do a barrier using COMM_WORLD. + * If the file creation and cloose does not do correct collective action + * according to the communicator argument, the processes will freeze up + * sooner or later due to barrier mixed up. + */ +void +test_split_comm_access(char filenames[][PATH_MAX]) +{ + MPI_Comm comm; + MPI_Info info = MPI_INFO_NULL; + int color, mrc; + int newrank, newprocs; + hid_t fid; /* file IDs */ + hid_t acc_tpl; /* File access properties */ + herr_t ret; /* generic return value */ + + if (verbose) + printf("Independent write test on file %s %s\n", filenames[0], filenames[1]); + + color = mpi_rank % 2; + mrc = MPI_Comm_split(MPI_COMM_WORLD, color, mpi_rank, &comm); + assert(mrc == MPI_SUCCESS); + MPI_Comm_size(comm, &newprocs); + MPI_Comm_rank(comm, &newrank); + + if (color) { + /* odd-rank processes */ + mrc = MPI_Barrier(comm); + assert(mrc == MPI_SUCCESS); + } + else { + /* even-rank processes */ + /* setup file access template */ + acc_tpl = H5Pcreate(H5P_FILE_ACCESS); + assert(acc_tpl != FAIL); + + /* set Parallel access with communicator */ + ret = H5Pset_fapl_mpio(acc_tpl, comm, info); + assert(ret != FAIL); + + /* create the file collectively */ + fid = H5Fcreate(filenames[color], H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl); + assert(fid != FAIL); + MESG("H5Fcreate succeed"); + + /* Release file-access template */ + ret = H5Pclose(acc_tpl); + assert(ret != FAIL); + + ret = H5Fclose(fid); + assert(ret != FAIL); + } + if (mpi_rank == 0) { + mrc = MPI_File_delete(filenames[color], info); + assert(mrc == MPI_SUCCESS); + } + MPI_Comm_free(&comm); +} + +/* + * Show command usage + */ +void +usage(void) +{ + printf("Usage: testphdf5 [-f ] [-r] [-w] [-v]\n"); + printf("\t-f\tfile prefix for parallel test files.\n"); + printf("\t \t e.g. pfs:/PFS/myname\n"); + printf("\t \tcan be set via $" PARAPREFIX ".\n"); + printf("\t \tDefault is current directory.\n"); + printf("\t-c\tno cleanup\n"); + printf("\t-r\tno read\n"); + printf("\t-w\tno write\n"); + printf("\t-v\tverbose on\n"); + printf("\tdefault do write then read\n"); + printf("\n"); +} + +/* + * compose the test filename with the prefix supplied. + * return code: 0 if no error + * 1 otherwise. + */ +int +mkfilenames(char *prefix) +{ + int i, n; + size_t strsize; + + /* filename will be prefix/ParaEgN.h5 where N is 0 to 9. */ + /* So, string must be big enough to hold the prefix, / and 10 more chars */ + /* and the terminating null. */ + strsize = strlen(prefix) + 12; + if (strsize > PATH_MAX) { + printf("File prefix too long; Use a short path name.\n"); + return (1); + } + n = sizeof(testfiles) / sizeof(testfiles[0]); + if (n > 9) { + printf("Warning: Too many entries in testfiles. " + "Need to adjust the code to accommodate the large size.\n"); + } + for (i = 0; i < n; i++) { + snprintf(testfiles[i], PATH_MAX, "%s/ParaEg%d.h5", prefix, i); + } + return (0); +} + +/* + * parse the command line options + */ +int +parse_options(int argc, char **argv) +{ + int i, n; + + /* initialize testfiles to nulls */ + n = sizeof(testfiles) / sizeof(testfiles[0]); + for (i = 0; i < n; i++) { + testfiles[i][0] = '\0'; + } + + while (--argc) { + if (**(++argv) != '-') { + break; + } + else { + switch (*(*argv + 1)) { + case 'f': + ++argv; + if (--argc < 1) { + usage(); + nerrors++; + return (1); + } + if (mkfilenames(*argv)) { + nerrors++; + return (1); + } + break; + case 'c': + docleanup = 0; /* no cleanup */ + break; + case 'r': + doread = 0; + break; + case 'w': + dowrite = 0; + break; + case 'v': + verbose = 1; + break; + default: + usage(); + nerrors++; + return (1); + } + } + } + + /* check the file prefix */ + if (testfiles[0][0] == '\0') { + /* try get it from environment variable HDF5_PARAPREFIX */ + char *env; + char *env_default = "."; /* default to current directory */ + if ((env = getenv(PARAPREFIX)) == NULL) { + env = env_default; + } + mkfilenames(env); + } + return (0); +} + +/* + * cleanup test files created + */ +void +cleanup(void) +{ + int i, n; + + n = sizeof(testfiles) / sizeof(testfiles[0]); + for (i = 0; i < n; i++) { + MPI_File_delete(testfiles[i], MPI_INFO_NULL); + } +} + +/* Main Program */ +int +main(int argc, char **argv) +{ + int mpi_namelen; + char mpi_name[MPI_MAX_PROCESSOR_NAME]; + int i, n; + + MPI_Init(&argc, &argv); + MPI_Comm_size(MPI_COMM_WORLD, &mpi_size); + MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank); + MPI_Get_processor_name(mpi_name, &mpi_namelen); + /* Make sure datasets can be divided into equal chunks by the processes */ + if ((SPACE1_DIM1 % mpi_size) || (SPACE1_DIM2 % mpi_size)) { + printf("DIM1(%d) and DIM2(%d) must be multiples of processes (%d)\n", SPACE1_DIM1, SPACE1_DIM2, + mpi_size); + nerrors++; + goto finish; + } + + if (parse_options(argc, argv) != 0) + goto finish; + + /* show test file names */ + if (mpi_rank == 0) { + n = sizeof(testfiles) / sizeof(testfiles[0]); + printf("Parallel test files are:\n"); + for (i = 0; i < n; i++) { + printf(" %s\n", testfiles[i]); + } + } + + if (dowrite) { + MPI_BANNER("testing PHDF5 dataset using split communicators..."); + test_split_comm_access(testfiles); + MPI_BANNER("testing PHDF5 dataset independent write..."); + phdf5writeInd(testfiles[0]); + MPI_BANNER("testing PHDF5 dataset collective write..."); + phdf5writeAll(testfiles[1]); + } + if (doread) { + MPI_BANNER("testing PHDF5 dataset independent read..."); + phdf5readInd(testfiles[0]); + MPI_BANNER("testing PHDF5 dataset collective read..."); + phdf5readAll(testfiles[1]); + } + + if (!(dowrite || doread)) { + usage(); + nerrors++; + } + +finish: + if (mpi_rank == 0) { /* only process 0 reports */ + if (nerrors) + printf("***PHDF5 example detected %d errors***\n", nerrors); + else { + printf("=====================================\n"); + printf("PHDF5 example finished with no errors\n"); + printf("=====================================\n"); + } + } + if (docleanup) + cleanup(); + MPI_Finalize(); + + return (nerrors); +} + +#else /* H5_HAVE_PARALLEL */ +/* dummy program since H5_HAVE_PARALLE is not configured in */ +int +main(void) +{ + printf("No PHDF5 example because parallel is not configured in\n"); + return (0); +} +#endif /* H5_HAVE_PARALLEL */ diff --git a/C/H5T/CMakeLists.txt b/C/H5T/CMakeLists.txt index 8c4084f1..2fd71ffc 100644 --- a/C/H5T/CMakeLists.txt +++ b/C/H5T/CMakeLists.txt @@ -115,7 +115,7 @@ if (HDF5_BUILD_TOOLS) if (NOT ${example_name} STREQUAL "h5ex_t_convert") if (${example_name} STREQUAL "h5ex_t_vlen" OR ${example_name} STREQUAL "h5ex_t_vlenatt") if (HDF5_VERSION_STRING VERSION_GREATER_EQUAL "1.14.3") - if (${H5_LIBVER_DIR} EQUAL 16 AND ${example_name} STREQUAL "h5ex_t_vlenatt") + if ((${EXAMPLE_VARNAME}_USE_16_API OR ${H5_LIBVER_DIR} EQUAL 16) AND ${example_name} STREQUAL "h5ex_t_vlenatt") add_custom_command ( TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD @@ -127,10 +127,10 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () - elseif (${H5_LIBVER_DIR} EQUAL 16) + elseif (${EXAMPLE_VARNAME}_USE_16_API OR ${H5_LIBVER_DIR} EQUAL 16) add_custom_command ( TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD @@ -142,7 +142,7 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif ((${example_name} STREQUAL "h5ex_t_objref" OR ${example_name} STREQUAL "h5ex_t_objrefatt") OR (${example_name} STREQUAL "h5ex_t_regref" OR ${example_name} STREQUAL "h5ex_t_regrefatt")) @@ -153,14 +153,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}21.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}21.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif (HDF5_VERSION_MAJOR VERSION_EQUAL "1.10") @@ -169,18 +169,18 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}06.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}06.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.12") - if (${H5_LIBVER_DIR} EQUAL 16) + if (${EXAMPLE_VARNAME}_USE_16_API OR ${H5_LIBVER_DIR} EQUAL 16) add_custom_command ( TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD @@ -192,16 +192,25 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/112/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/112/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () else () - add_custom_command ( - TARGET ${EXAMPLE_VARNAME}_${example_name} - POST_BUILD - COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl - ) + if (${EXAMPLE_VARNAME}_USE_16_API OR ${H5_LIBVER_DIR} EQUAL 16) + add_custom_command ( + TARGET ${EXAMPLE_VARNAME}_${example_name} + POST_BUILD + COMMAND ${CMAKE_COMMAND} + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/16/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl + ) + else () + add_custom_command ( + TARGET ${EXAMPLE_VARNAME}_${example_name} + POST_BUILD + COMMAND ${CMAKE_COMMAND} + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl + ) + endif () endif () else () if (HDF5_VERSION_MAJOR VERSION_EQUAL "1.8") @@ -210,14 +219,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}21.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}21.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif (HDF5_VERSION_MAJOR VERSION_EQUAL "1.10") @@ -226,14 +235,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}06.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}06.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.12") @@ -241,14 +250,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/112/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/112/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () endif () @@ -260,12 +269,21 @@ if (HDF5_BUILD_TOOLS) ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/16/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () - add_custom_command ( - TARGET ${EXAMPLE_VARNAME}_${example_name} - POST_BUILD - COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl - ) + if (${EXAMPLE_VARNAME}_USE_16_API) + add_custom_command ( + TARGET ${EXAMPLE_VARNAME}_${example_name} + POST_BUILD + COMMAND ${CMAKE_COMMAND} + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/16/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl + ) + else () + add_custom_command ( + TARGET ${EXAMPLE_VARNAME}_${example_name} + POST_BUILD + COMMAND ${CMAKE_COMMAND} + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl + ) + endif () endif () endif () endforeach () @@ -330,33 +348,40 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + NAME ${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.ddl.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + endif () endif () endmacro () @@ -367,19 +392,25 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=${ARGN}" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + else () + add_test ( + NAME ${EXAMPLE_VARNAME}_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=${ARGN}" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_EXPECT=0" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + endif () endmacro () foreach (example_name ${common_examples}) diff --git a/C/H5T/Makefile.am b/C/H5T/Makefile.am index a4f4be5d..bc0d5d53 100644 --- a/C/H5T/Makefile.am +++ b/C/H5T/Makefile.am @@ -5,12 +5,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/C/H5T/h5ex_t_array.c b/C/H5T/h5ex_t_array.c index b63e4e0e..a29ac455 100644 --- a/C/H5T/h5ex_t_array.c +++ b/C/H5T/h5ex_t_array.c @@ -144,7 +144,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n", DATASET, i); + printf("%s[%" PRIuHSIZE "]:\n", DATASET, i); for (j = 0; j < adims[0]; j++) { printf(" ["); for (k = 0; k < adims[1]; k++) diff --git a/C/H5T/h5ex_t_arrayatt.c b/C/H5T/h5ex_t_arrayatt.c index a89f2b25..f0711286 100644 --- a/C/H5T/h5ex_t_arrayatt.c +++ b/C/H5T/h5ex_t_arrayatt.c @@ -155,7 +155,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n", ATTRIBUTE, i); + printf("%s[%" PRIuHSIZE "]:\n", ATTRIBUTE, i); for (j = 0; j < adims[0]; j++) { printf(" ["); for (k = 0; k < adims[1]; k++) diff --git a/C/H5T/h5ex_t_cmpd.c b/C/H5T/h5ex_t_cmpd.c index 739d0616..44f15523 100644 --- a/C/H5T/h5ex_t_cmpd.c +++ b/C/H5T/h5ex_t_cmpd.c @@ -136,7 +136,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n", DATASET, i); + printf("%s[%" PRIuHSIZE "]:\n", DATASET, i); printf("Serial number : %d\n", rdata[i].serial_no); printf("Location : %s\n", rdata[i].location); printf("Temperature (F) : %f\n", rdata[i].temperature); diff --git a/C/H5T/h5ex_t_cmpdatt.c b/C/H5T/h5ex_t_cmpdatt.c index 246537b1..04c72a51 100644 --- a/C/H5T/h5ex_t_cmpdatt.c +++ b/C/H5T/h5ex_t_cmpdatt.c @@ -146,7 +146,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n", ATTRIBUTE, i); + printf("%s[%" PRIuHSIZE "]:\n", ATTRIBUTE, i); printf("Serial number : %d\n", rdata[i].serial_no); printf("Location : %s\n", rdata[i].location); printf("Temperature (F) : %f\n", rdata[i].temperature); diff --git a/C/H5T/h5ex_t_convert.c b/C/H5T/h5ex_t_convert.c index b6f46b6f..b7036e3e 100644 --- a/C/H5T/h5ex_t_convert.c +++ b/C/H5T/h5ex_t_convert.c @@ -120,7 +120,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < DIM0; i++) { - printf("sensor[%d]:\n", i); + printf("sensor[%" PRIuHSIZE "]:\n", i); printf("Serial number : %d\n", sensor[i].serial_no); printf("Location : %s\n", sensor[i].location); printf("Temperature (F) : %f\n", sensor[i].temperature); diff --git a/C/H5T/h5ex_t_cpxcmpd.c b/C/H5T/h5ex_t_cpxcmpd.c index 8506c08c..370f7819 100644 --- a/C/H5T/h5ex_t_cpxcmpd.c +++ b/C/H5T/h5ex_t_cpxcmpd.c @@ -293,7 +293,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n", DATASET, i); + printf("%s[%" PRIuHSIZE "]:\n", DATASET, i); printf(" Vehicle name :\n %s\n", rdata[i].name); printf(" Sensor locations :\n"); for (j = 0; j < rdata[i].sensors.len; j++) diff --git a/C/H5T/h5ex_t_cpxcmpdatt.c b/C/H5T/h5ex_t_cpxcmpdatt.c index c7efbce7..a55fb76f 100644 --- a/C/H5T/h5ex_t_cpxcmpdatt.c +++ b/C/H5T/h5ex_t_cpxcmpdatt.c @@ -304,7 +304,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n", ATTRIBUTE, i); + printf("%s[%" PRIuHSIZE "]:\n", ATTRIBUTE, i); printf(" Vehicle name :\n %s\n", rdata[i].name); printf(" Sensor locations :\n"); for (j = 0; j < rdata[i].sensors.len; j++) diff --git a/C/H5T/h5ex_t_objref.c b/C/H5T/h5ex_t_objref.c index 1109720c..e6c2de95 100644 --- a/C/H5T/h5ex_t_objref.c +++ b/C/H5T/h5ex_t_objref.c @@ -36,7 +36,7 @@ main(void) hid_t ref_type = H5T_STD_REF; /* Reference datatype */ H5R_ref_t wdata[DIM0]; /* buffer to write to disk */ H5R_ref_t *rdata = NULL; /* buffer to read into*/ - H5R_type_t objtype; /* Reference type */ + H5O_type_t objtype; /* Reference type */ #else hid_t ref_type = H5T_STD_REF_OBJ; /* Reference datatype */ hobj_ref_t wdata[DIM0]; /* Write buffer */ @@ -135,7 +135,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n ->", DATASET, i); + printf("%s[%" PRIuHSIZE "]:\n ->", DATASET, i); /* * Open the referenced object, get its name and type. @@ -174,6 +174,9 @@ main(void) case H5O_TYPE_NAMED_DATATYPE: printf("Named Datatype"); break; + case H5O_TYPE_MAP: + printf("Map Object"); + break; case H5O_TYPE_UNKNOWN: case H5O_TYPE_NTYPES: printf("Unknown"); diff --git a/C/H5T/h5ex_t_objrefatt.c b/C/H5T/h5ex_t_objrefatt.c index a464e9e9..562364a2 100644 --- a/C/H5T/h5ex_t_objrefatt.c +++ b/C/H5T/h5ex_t_objrefatt.c @@ -38,7 +38,7 @@ main(void) hid_t ref_type = H5T_STD_REF; /* Reference datatype */ H5R_ref_t wdata[DIM0]; /* buffer to write to disk */ H5R_ref_t *rdata = NULL; /* buffer to read into*/ - H5R_type_t objtype; /* Reference type */ + H5O_type_t objtype; /* Reference type */ #else hid_t ref_type = H5T_STD_REF_OBJ; /* Reference datatype */ hobj_ref_t wdata[DIM0]; /* Write buffer */ @@ -147,7 +147,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n ->", ATTRIBUTE, i); + printf("%s[%" PRIuHSIZE "]:\n ->", ATTRIBUTE, i); /* * Open the referenced object, get its name and type. @@ -186,6 +186,9 @@ main(void) case H5O_TYPE_NAMED_DATATYPE: printf("Named Datatype"); break; + case H5O_TYPE_MAP: + printf("Map Object"); + break; case H5O_TYPE_UNKNOWN: case H5O_TYPE_NTYPES: printf("Unknown"); diff --git a/C/H5T/h5ex_t_opaque.c b/C/H5T/h5ex_t_opaque.c index 085183a1..11a58aea 100644 --- a/C/H5T/h5ex_t_opaque.c +++ b/C/H5T/h5ex_t_opaque.c @@ -111,7 +111,7 @@ main(void) */ printf("Datatype tag for %s is: \"%s\"\n", DATASET, tag); for (i = 0; i < dims[0]; i++) { - printf("%s[%u]: ", DATASET, i); + printf("%s[%" PRIuHSIZE "]: ", DATASET, i); for (j = 0; j < len; j++) printf("%c", rdata[j + i * len]); printf("\n"); diff --git a/C/H5T/h5ex_t_opaqueatt.c b/C/H5T/h5ex_t_opaqueatt.c index e88031ac..67294921 100644 --- a/C/H5T/h5ex_t_opaqueatt.c +++ b/C/H5T/h5ex_t_opaqueatt.c @@ -121,7 +121,7 @@ main(void) */ printf("Datatype tag for %s is: \"%s\"\n", ATTRIBUTE, tag); for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]: ", ATTRIBUTE, i); + printf("%s[%" PRIuHSIZE "]: ", ATTRIBUTE, i); for (j = 0; j < len; j++) printf("%c", rdata[j + i * len]); printf("\n"); diff --git a/C/H5T/h5ex_t_regref.c b/C/H5T/h5ex_t_regref.c index 39227259..e6d4cef2 100644 --- a/C/H5T/h5ex_t_regref.c +++ b/C/H5T/h5ex_t_regref.c @@ -168,7 +168,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n ->", DATASET, i); + printf("%s[%" PRIuHSIZE "]:\n ->", DATASET, i); /* * Open the referenced object, retrieve its region as a diff --git a/C/H5T/h5ex_t_regrefatt.c b/C/H5T/h5ex_t_regrefatt.c index 5ed745d7..bb31b707 100644 --- a/C/H5T/h5ex_t_regrefatt.c +++ b/C/H5T/h5ex_t_regrefatt.c @@ -183,7 +183,7 @@ main(void) * Output the data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n ->", ATTRIBUTE, i); + printf("%s[%" PRIuHSIZE "]:\n ->", ATTRIBUTE, i); /* * Open the referenced object, retrieve its region as a diff --git a/C/H5T/h5ex_t_vlen.c b/C/H5T/h5ex_t_vlen.c index 7111a343..b5649729 100644 --- a/C/H5T/h5ex_t_vlen.c +++ b/C/H5T/h5ex_t_vlen.c @@ -118,7 +118,7 @@ main(void) * Output the variable-length data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n {", DATASET, i); + printf("%s[%" PRIuHSIZE "]:\n {", DATASET, i); ptr = rdata[i].p; for (j = 0; j < rdata[i].len; j++) { printf(" %d", ptr[j]); diff --git a/C/H5T/h5ex_t_vlenatt.c b/C/H5T/h5ex_t_vlenatt.c index db69aea4..e173a20f 100644 --- a/C/H5T/h5ex_t_vlenatt.c +++ b/C/H5T/h5ex_t_vlenatt.c @@ -128,7 +128,7 @@ main(void) * Output the variable-length data to the screen. */ for (i = 0; i < dims[0]; i++) { - printf("%s[%llu]:\n {", ATTRIBUTE, i); + printf("%s[%" PRIuHSIZE "]:\n {", ATTRIBUTE, i); ptr = rdata[i].p; for (j = 0; j < rdata[i].len; j++) { printf(" %d", ptr[j]); diff --git a/C/H5T/test.sh.in b/C/H5T/test.sh.in index dc48ac99..d5c453b5 100755 --- a/C/H5T/test.sh.in +++ b/C/H5T/test.sh.in @@ -6,12 +6,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. srcdir=@srcdir@ diff --git a/C/H5VDS/CMakeLists.txt b/C/H5VDS/CMakeLists.txt index 87fcb9f4..33dafb47 100644 --- a/C/H5VDS/CMakeLists.txt +++ b/C/H5VDS/CMakeLists.txt @@ -128,33 +128,40 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}*.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + NAME ${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.ddl.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}) + endif () endif () endmacro () diff --git a/C/H5VDS/Makefile.am b/C/H5VDS/Makefile.am index e4f5320b..d9a5116a 100644 --- a/C/H5VDS/Makefile.am +++ b/C/H5VDS/Makefile.am @@ -5,12 +5,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/C/H5VDS/h5ex_vds-eiger.c b/C/H5VDS/h5ex_vds-eiger.c index 0629d734..8a41de6f 100644 --- a/C/H5VDS/h5ex_vds-eiger.c +++ b/C/H5VDS/h5ex_vds-eiger.c @@ -89,7 +89,7 @@ main(void) status = H5Pset_virtual(dcpl, vspace, "f-%b.h5", "/A", src_space); /* Create a virtual dataset */ - dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); + dset = H5Dcreate2(file, DATASET, H5T_STD_I32LE, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Sclose(vspace); status = H5Sclose(src_space); status = H5Dclose(dset); diff --git a/C/H5VDS/h5ex_vds-exc.c b/C/H5VDS/h5ex_vds-exc.c index 10bb68c7..de26911b 100644 --- a/C/H5VDS/h5ex_vds-exc.c +++ b/C/H5VDS/h5ex_vds-exc.c @@ -120,7 +120,7 @@ main(void) } /* Create a virtual dataset */ - dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT); + dset = H5Dcreate2(file, DATASET, H5T_STD_I32LE, space, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Sclose(space); status = H5Sclose(nsrc_space); status = H5Sclose(lsrc_space); diff --git a/C/H5VDS/h5ex_vds-exclim.c b/C/H5VDS/h5ex_vds-exclim.c index 6bebe76e..61b3078c 100644 --- a/C/H5VDS/h5ex_vds-exclim.c +++ b/C/H5VDS/h5ex_vds-exclim.c @@ -114,7 +114,7 @@ main(void) } /* Create a virtual dataset */ - dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT); + dset = H5Dcreate2(file, DATASET, H5T_STD_I32LE, space, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Sclose(space); status = H5Sclose(nsrc_space); status = H5Sclose(lsrc_space); diff --git a/C/H5VDS/h5ex_vds-percival-unlim-maxmin.c b/C/H5VDS/h5ex_vds-percival-unlim-maxmin.c index 8ef996a8..9ca030a4 100644 --- a/C/H5VDS/h5ex_vds-percival-unlim-maxmin.c +++ b/C/H5VDS/h5ex_vds-percival-unlim-maxmin.c @@ -95,7 +95,7 @@ main(void) src_space = H5Screate_simple(RANK, dims, dims_max); dcpl = H5Pcreate(H5P_DATASET_CREATE); status = H5Pset_chunk(dcpl, RANK, chunk_dims); - dset = H5Dcreate2(file, SRC_DATASET[i], H5T_NATIVE_INT, src_space, H5P_DEFAULT, dcpl, H5P_DEFAULT); + dset = H5Dcreate2(file, SRC_DATASET[i], H5T_STD_I32LE, src_space, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata); status = H5Sclose(src_space); status = H5Pclose(dcpl); @@ -145,7 +145,7 @@ main(void) H5Sselect_none(vspace); /* Create a virtual dataset */ - vdset = H5Dcreate2(vfile, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); + vdset = H5Dcreate2(vfile, DATASET, H5T_STD_I32LE, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Sclose(vspace); status = H5Sclose(src_space); status = H5Pclose(dcpl); diff --git a/C/H5VDS/h5ex_vds-percival-unlim.c b/C/H5VDS/h5ex_vds-percival-unlim.c index fadfb790..71a8ddaa 100644 --- a/C/H5VDS/h5ex_vds-percival-unlim.c +++ b/C/H5VDS/h5ex_vds-percival-unlim.c @@ -92,7 +92,7 @@ main(void) src_space = H5Screate_simple(RANK, dims, dims_max); dcpl = H5Pcreate(H5P_DATASET_CREATE); status = H5Pset_chunk(dcpl, RANK, chunk_dims); - dset = H5Dcreate2(file, SRC_DATASET[i], H5T_NATIVE_INT, src_space, H5P_DEFAULT, dcpl, H5P_DEFAULT); + dset = H5Dcreate2(file, SRC_DATASET[i], H5T_STD_I32LE, src_space, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata); status = H5Sclose(src_space); status = H5Pclose(dcpl); @@ -142,7 +142,7 @@ main(void) H5Sselect_none(vspace); /* Create a virtual dataset */ - vdset = H5Dcreate2(vfile, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); + vdset = H5Dcreate2(vfile, DATASET, H5T_STD_I32LE, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Sclose(vspace); status = H5Sclose(src_space); status = H5Pclose(dcpl); diff --git a/C/H5VDS/h5ex_vds-percival.c b/C/H5VDS/h5ex_vds-percival.c index edbef18a..75e1653d 100644 --- a/C/H5VDS/h5ex_vds-percival.c +++ b/C/H5VDS/h5ex_vds-percival.c @@ -80,12 +80,12 @@ main(void) file = H5Fcreate(SRC_FILE[i], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); src_space = H5Screate_simple(RANK, dims, NULL); - dset = H5Dcreate2(file, SRC_DATASET[i], H5T_NATIVE_INT, src_space, H5P_DEFAULT, H5P_DEFAULT, - H5P_DEFAULT); - status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata); - status = H5Sclose(src_space); - status = H5Dclose(dset); - status = H5Fclose(file); + dset = + H5Dcreate2(file, SRC_DATASET[i], H5T_STD_I32LE, src_space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata); + status = H5Sclose(src_space); + status = H5Dclose(dset); + status = H5Fclose(file); } file = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); @@ -130,7 +130,7 @@ main(void) H5Sselect_none(vspace); /* Create a virtual dataset */ - dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); + dset = H5Dcreate2(file, DATASET, H5T_STD_I32LE, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Sclose(vspace); status = H5Sclose(src_space); status = H5Dclose(dset); diff --git a/C/H5VDS/h5ex_vds-simpleIO.c b/C/H5VDS/h5ex_vds-simpleIO.c index 3bd3133e..52be8f5d 100644 --- a/C/H5VDS/h5ex_vds-simpleIO.c +++ b/C/H5VDS/h5ex_vds-simpleIO.c @@ -69,7 +69,7 @@ main(void) */ file = H5Fcreate(SRC_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); space = H5Screate_simple(RANK, dims, NULL); - dset = H5Dcreate2(file, SRC_DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dset = H5Dcreate2(file, SRC_DATASET, H5T_STD_I32LE, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata[0]); status = H5Sclose(space); status = H5Dclose(dset); @@ -94,7 +94,7 @@ main(void) status = H5Pset_virtual(dcpl, vspace, SRC_FILE, SRC_DATASET, src_space); /* Create a virtual dataset */ - dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); + dset = H5Dcreate2(file, DATASET, H5T_STD_I32LE, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Sclose(vspace); status = H5Sclose(src_space); status = H5Dclose(dset); diff --git a/C/H5VDS/h5ex_vds.c b/C/H5VDS/h5ex_vds.c index 96a81374..77219e87 100644 --- a/C/H5VDS/h5ex_vds.c +++ b/C/H5VDS/h5ex_vds.c @@ -87,7 +87,7 @@ main(void) file = H5Fcreate(SRC_FILE[i], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); space = H5Screate_simple(RANK1, dims, NULL); - dset = H5Dcreate2(file, SRC_DATASET[i], H5T_NATIVE_INT, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dset = H5Dcreate2(file, SRC_DATASET[i], H5T_STD_I32LE, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata); status = H5Sclose(space); status = H5Dclose(dset); @@ -127,7 +127,7 @@ main(void) } /* Create a virtual dataset. */ - dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT); + dset = H5Dcreate2(file, DATASET, H5T_STD_I32LE, space, H5P_DEFAULT, dcpl, H5P_DEFAULT); status = H5Sclose(space); status = H5Sclose(src_space); status = H5Dclose(dset); diff --git a/C/H5VDS/test.sh.in b/C/H5VDS/test.sh.in index 91888f55..983ef426 100644 --- a/C/H5VDS/test.sh.in +++ b/C/H5VDS/test.sh.in @@ -6,12 +6,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. srcdir=@srcdir@ diff --git a/C/Makefile.am b/C/Makefile.am index 0048db07..778f8022 100644 --- a/C/Makefile.am +++ b/C/Makefile.am @@ -5,12 +5,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/C/Perf/CMakeLists.txt b/C/Perf/CMakeLists.txt index e41def21..66f93273 100644 --- a/C/Perf/CMakeLists.txt +++ b/C/Perf/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required (VERSION 3.12) -PROJECT (HDF5Examples_C_PERFORM) +project (HDF5Examples_C_PERFORM C) #----------------------------------------------------------------------------- # Define Sources diff --git a/CMakeLists.txt b/CMakeLists.txt index 00adfc09..d8e02d50 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -36,10 +36,6 @@ message (STATUS "HDF5 H5_LIBVER_DIR: ${H5_LIBVER_DIR} HDF5_VERSION_MAJOR: ${HDF5 option (HDF_BUILD_JAVA "Build JAVA support" OFF) if (HDF_BUILD_JAVA) find_package (Java) - INCLUDE_DIRECTORIES ( - ${JAVA_INCLUDE_PATH} - ${JAVA_INCLUDE_PATH2} - ) include (${H5EX_RESOURCES_DIR}/UseJava.cmake) endif () @@ -120,7 +116,9 @@ if (HDF_ENABLE_THREADSAFE) set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} Threads::Threads) endif () -INCLUDE_DIRECTORIES (${H5EX_HDF5_INCLUDE_DIRS}) +set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES + "${H5EX_HDF5_INCLUDE_DIRS}" +) #----------------------------------------------------------------------------- # Dashboard and Testing Settings @@ -159,6 +157,7 @@ if (${H5_LIBVER_DIR} GREATER 16) endif () configure_file (${H5EX_F90_SRC_DIR}/H5D/h5_version.h.in ${PROJECT_BINARY_DIR}/FORTRAN/H5D/h5_version.h @ONLY) + configure_file (${H5EX_F90_SRC_DIR}/H5D/h5_version.h.in ${PROJECT_BINARY_DIR}/FORTRAN/H5G/h5_version.h @ONLY) else () set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build examples FORTRAN support" FORCE) endif () diff --git a/CMakePresets.json b/CMakePresets.json index 263ff293..d9fdd042 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -128,6 +128,23 @@ "ci-x64-Release-Clang" ] }, + { + "name": "ci-StdShar-OSX-Clang", + "configurePreset": "ci-StdShar-Clang", + "inherits": [ + "ci-x64-Release-Clang" + ], + "execution": { + "noTestsAction": "error", + "timeout": 180, + "jobs": 2 + }, + "condition": { + "type": "equals", + "lhs": "${hostSystemName}", + "rhs": "Darwin" + } + }, { "name": "ci-StdShar-GNUC", "configurePreset": "ci-StdShar-GNUC", @@ -135,6 +152,23 @@ "ci-x64-Release-GNUC" ] }, + { + "name": "ci-StdShar-win-Intel", + "configurePreset": "ci-StdShar-Intel", + "inherits": [ + "ci-x64-Release-Intel" + ], + "filter": { + "exclude": { + "name": "H5DUMP-tfloatsattrs" + } + }, + "condition": { + "type": "equals", + "lhs": "${hostSystemName}", + "rhs": "Windows" + } + }, { "name": "ci-StdShar-Intel", "configurePreset": "ci-StdShar-Intel", @@ -160,6 +194,14 @@ {"type": "test", "name": "ci-StdShar-Clang"} ] }, + { + "name": "ci-StdShar-OSX-Clang", + "steps": [ + {"type": "configure", "name": "ci-StdShar-Clang"}, + {"type": "build", "name": "ci-StdShar-Clang"}, + {"type": "test", "name": "ci-StdShar-OSX-Clang"} + ] + }, { "name": "ci-StdShar-GNUC", "steps": [ @@ -175,6 +217,14 @@ {"type": "build", "name": "ci-StdShar-Intel"}, {"type": "test", "name": "ci-StdShar-Intel"} ] + }, + { + "name": "ci-StdShar-win-Intel", + "steps": [ + {"type": "configure", "name": "ci-StdShar-Intel"}, + {"type": "build", "name": "ci-StdShar-Intel"}, + {"type": "test", "name": "ci-StdShar-win-Intel"} + ] } ] } diff --git a/CTestConfig.cmake b/CTestConfig.cmake index 44e26e28..aef6da8f 100644 --- a/CTestConfig.cmake +++ b/CTestConfig.cmake @@ -1,18 +1,32 @@ +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. +# ## This file should be placed in the root directory of your project. ## Then modify the CMakeLists.txt file in the root directory of your ## project to incorporate the testing dashboard. -## # The following are required to uses Dart and the Cdash dashboard +## # The following are required to use Dart and the CDash dashboard ## ENABLE_TESTING() ## INCLUDE(CTest) -set (CTEST_NIGHTLY_START_TIME "18:00:00 CST") set (CTEST_PROJECT_NAME "HDF5Examples") +set (CTEST_NIGHTLY_START_TIME "18:00:00 CST") set (CTEST_DROP_METHOD "https") -if (CDASH_LOCAL) - set (CTEST_DROP_SITE "cdash-internal.hdfgroup.org") - set (CTEST_DROP_LOCATION "/submit.php?project=HDF5Examples") +if (CTEST_DROP_SITE_INIT) + set (CTEST_DROP_SITE "${CTEST_DROP_SITE_INIT}") else () set (CTEST_DROP_SITE "cdash.hdfgroup.org") +endif () +if (CTEST_DROP_LOCATION_INIT) + set (CTEST_DROP_LOCATION "${CTEST_DROP_LOCATION_INIT}") +else () set (CTEST_DROP_LOCATION "/submit.php?project=HDF5Examples") endif () set (CTEST_DROP_SITE_CDASH TRUE) diff --git a/FORTRAN/H5D/CMakeLists.txt b/FORTRAN/H5D/CMakeLists.txt index 0d7f5b5f..3cb0c6e0 100644 --- a/FORTRAN/H5D/CMakeLists.txt +++ b/FORTRAN/H5D/CMakeLists.txt @@ -9,10 +9,8 @@ project (HDF5Examples_FORTRAN_H5D Fortran) #----------------------------------------------------------------------------- # Setup include Directories #----------------------------------------------------------------------------- -INCLUDE_DIRECTORIES ( - ${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT} - ${PROJECT_BINARY_DIR} - ${CMAKE_LIBRARY_OUTPUT_DIRECTORY} +set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES + "${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" ) #----------------------------------------------------------------------------- @@ -245,33 +243,40 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_f90_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} + NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.ddl.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) + endif () endif () endmacro () diff --git a/FORTRAN/H5D/Makefile.am b/FORTRAN/H5D/Makefile.am index dc98f1fb..c76ce859 100644 --- a/FORTRAN/H5D/Makefile.am +++ b/FORTRAN/H5D/Makefile.am @@ -5,12 +5,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/FORTRAN/H5D/h5ex_d_checksum.F90 b/FORTRAN/H5D/h5ex_d_checksum.F90 index b0464a3e..cab742b7 100644 --- a/FORTRAN/H5D/h5ex_d_checksum.F90 +++ b/FORTRAN/H5D/h5ex_d_checksum.F90 @@ -120,15 +120,17 @@ PROGRAM main ! nelmts = 0 CALL H5Pget_filter_f(dcpl, 0, flags, nelmts, cd_values, MaxChrLen, name, filter_id, hdferr) - WRITE(*,'("Filter type is: ")', ADVANCE='NO') + WRITE(*,'(A,1X)', ADVANCE='NO') "Filter type is:" IF(filter_id.EQ.H5Z_FILTER_DEFLATE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_DEFLATE_F")') + WRITE(*,'(A)') "H5Z_FILTER_DEFLATE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SHUFFLE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SHUFFLE_F")') + WRITE(*,'(A)') "H5Z_FILTER_SHUFFLE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_FLETCHER32_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_FLETCHER32_F")') + WRITE(*,'(A)') "H5Z_FILTER_FLETCHER32_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SZIP_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SZIP_F")') + WRITE(*,'(A)') "H5Z_FILTER_SZIP_F" + ELSE + WRITE(*,'(A)') "UNKNOWN" ENDIF ! ! Read the data using the default properties. diff --git a/FORTRAN/H5D/h5ex_d_extern.F90 b/FORTRAN/H5D/h5ex_d_extern.F90 index faef3978..e44fdf0c 100644 --- a/FORTRAN/H5D/h5ex_d_extern.F90 +++ b/FORTRAN/H5D/h5ex_d_extern.F90 @@ -40,7 +40,7 @@ PROGRAM main INTEGER :: i, j ! This change was introduced in the 1.8.12 release #if H5_VERSION_GE(1,8,12) - INTEGER(OFF_T) :: offset = 0 ! Offset, in bytes, from thebeginning of the file to the + INTEGER(OFF_T) :: offset = 0 ! Offset, in bytes, from the beginning of the file to the ! location in the file where the data starts. #else INTEGER :: offset = 0 diff --git a/FORTRAN/H5D/h5ex_d_gzip.F90 b/FORTRAN/H5D/h5ex_d_gzip.F90 index b46e3fcd..7e7b6b5a 100644 --- a/FORTRAN/H5D/h5ex_d_gzip.F90 +++ b/FORTRAN/H5D/h5ex_d_gzip.F90 @@ -118,15 +118,17 @@ PROGRAM main ! nelmts = 1 CALL H5Pget_filter_f(dcpl, 0, flags, nelmts, cd_values, MaxChrLen, name, filter_id, hdferr) - WRITE(*,'("Filter type is: ")', ADVANCE='NO') + WRITE(*,'(A,1X)', ADVANCE='NO') "Filter type is:" IF(filter_id.EQ.H5Z_FILTER_DEFLATE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_DEFLATE_F")') + WRITE(*,'(A)') "H5Z_FILTER_DEFLATE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SHUFFLE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SHUFFLE_F")') + WRITE(*,'(A)') "H5Z_FILTER_SHUFFLE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_FLETCHER32_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_FLETCHER32_F")') + WRITE(*,'(A)') "H5Z_FILTER_FLETCHER32_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SZIP_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SZIP_F")') + WRITE(*,'(A)') "H5Z_FILTER_SZIP_F" + ELSE + WRITE(*,'(A)') "UNKNOWN" ENDIF ! ! Read the data using the default properties. diff --git a/FORTRAN/H5D/h5ex_d_nbit.F90 b/FORTRAN/H5D/h5ex_d_nbit.F90 index 27e4d52c..636898c1 100644 --- a/FORTRAN/H5D/h5ex_d_nbit.F90 +++ b/FORTRAN/H5D/h5ex_d_nbit.F90 @@ -125,17 +125,19 @@ PROGRAM main ! first filter because we know that we only added one filter. ! CALL H5Pget_filter_f(dcpl, 0, flags, nelmts, cd_values, INT(MaxChrLen, SIZE_T), name, filter_id, hdferr) - WRITE(*,'("Filter type is: ")', ADVANCE='NO') + WRITE(*,'(A,1X)', ADVANCE='NO') "Filter type is:" IF(filter_id.EQ.H5Z_FILTER_DEFLATE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_DEFLATE_F")') + WRITE(*,'(A)') "H5Z_FILTER_DEFLATE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SHUFFLE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SHUFFLE_F")') + WRITE(*,'(A)') "H5Z_FILTER_SHUFFLE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_FLETCHER32_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_FLETCHER32_F")') + WRITE(*,'(A)') "H5Z_FILTER_FLETCHER32_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SZIP_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SZIP_F")') + WRITE(*,'(A)') "H5Z_FILTER_SZIP_F" ELSE IF(filter_id.EQ.H5Z_FILTER_NBIT_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_NBIT_F")') + WRITE(*,'(A)') "H5Z_FILTER_NBIT_F" + ELSE + WRITE(*,'(A)') "UNKNOWN" ENDIF ! ! Read the data using the default properties. diff --git a/FORTRAN/H5D/h5ex_d_soint.F90 b/FORTRAN/H5D/h5ex_d_soint.F90 index 120e8966..e3bcc9f6 100644 --- a/FORTRAN/H5D/h5ex_d_soint.F90 +++ b/FORTRAN/H5D/h5ex_d_soint.F90 @@ -133,19 +133,21 @@ PROGRAM main ! nelmts = 1 CALL H5Pget_filter_f(dcpl, 0, flags, nelmts, cd_values, INT(MaxChrLen, SIZE_T), name, filter_id, hdferr) - WRITE(*,'("Filter type is: ")', ADVANCE='NO') + WRITE(*,'(A,1X)', ADVANCE='NO') "Filter type is:" IF(filter_id.EQ.H5Z_FILTER_DEFLATE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_DEFLATE_F")') + WRITE(*,'(A)') "H5Z_FILTER_DEFLATE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SHUFFLE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SHUFFLE_F")') + WRITE(*,'(A)') "H5Z_FILTER_SHUFFLE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_FLETCHER32_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_FLETCHER32_F")') + WRITE(*,'(A)') "H5Z_FILTER_FLETCHER32_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SZIP_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SZIP_F")') + WRITE(*,'(A)') "H5Z_FILTER_SZIP_F" ELSE IF(filter_id.EQ.H5Z_FILTER_NBIT_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_NBIT_F")') + WRITE(*,'(A)') "H5Z_FILTER_NBIT_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SCALEOFFSET_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SCALEOFFSET_F")') + WRITE(*,'(A)') "H5Z_FILTER_SCALEOFFSET_F" + ELSE + WRITE(*,'(A)') "UNKNOWN" ENDIF ! ! Read the data using the default properties. diff --git a/FORTRAN/H5D/h5ex_d_szip.F90 b/FORTRAN/H5D/h5ex_d_szip.F90 index f66036e3..fdd6ecf2 100644 --- a/FORTRAN/H5D/h5ex_d_szip.F90 +++ b/FORTRAN/H5D/h5ex_d_szip.F90 @@ -119,20 +119,22 @@ PROGRAM main nelmts = 1 CALL H5Pget_filter_f(dcpl, 0, flags, nelmts, cd_values, INT(MaxChrLen,SIZE_T), name, filter_id, hdferr) - WRITE(*,'("Filter type is: ")', ADVANCE='NO') + WRITE(*,'(A,1X)', ADVANCE='NO') "Filter type is:" IF(filter_id.EQ.H5Z_FILTER_DEFLATE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_DEFLATE_F")') + WRITE(*,'(A)') "H5Z_FILTER_DEFLATE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SHUFFLE_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SHUFFLE_F")') + WRITE(*,'(A)') "H5Z_FILTER_SHUFFLE_F" ELSE IF(filter_id.EQ.H5Z_FILTER_FLETCHER32_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_FLETCHER32_F")') + WRITE(*,'(A)') "H5Z_FILTER_FLETCHER32_F" ELSE IF(filter_id.EQ.H5Z_FILTER_SZIP_F)THEN - WRITE(*,'(T2,"H5Z_FILTER_SZIP_F")') + WRITE(*,'(A)') "H5Z_FILTER_SZIP_F" ! DEFINED ONLY IN F2003 hdf5 branch ! ELSE IF(filter_id.EQ.H5Z_FILTER_NBIT_F)THEN -! WRITE(*,'(T2,"H5Z_FILTER_NBIT_F")') +! WRITE(*,'(" H5Z_FILTER_NBIT_F")') ! ELSE IF(filter_id.EQ.H5Z_FILTER_SCALEOFFSET_F)THEN -! WRITE(*,'(T2,"H5Z_FILTER_SCALEOFFSET_F")') +! WRITE(*,'(" H5Z_FILTER_SCALEOFFSET_F")') + ELSE + WRITE(*,'(A)') "UNKNOWN" ENDIF ! ! Read the data using the default properties. diff --git a/FORTRAN/H5D/test.sh.in b/FORTRAN/H5D/test.sh.in index 76dbbcde..bdd17c28 100755 --- a/FORTRAN/H5D/test.sh.in +++ b/FORTRAN/H5D/test.sh.in @@ -6,12 +6,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. srcdir=@srcdir@ diff --git a/FORTRAN/H5D/tfiles/18/h5ex_d_checksum.tst b/FORTRAN/H5D/tfiles/18/h5ex_d_checksum.tst index 01ed8661..d2690e3e 100644 --- a/FORTRAN/H5D/tfiles/18/h5ex_d_checksum.tst +++ b/FORTRAN/H5D/tfiles/18/h5ex_d_checksum.tst @@ -1,2 +1,2 @@ -Filter type is: H5Z_FILTER_FLETCHER32_F +Filter type is: H5Z_FILTER_FLETCHER32_F Maximum value in DS1 is: 1984 diff --git a/FORTRAN/H5D/tfiles/18/h5ex_d_gzip.tst b/FORTRAN/H5D/tfiles/18/h5ex_d_gzip.tst index 9efcd78a..6fbaba15 100644 --- a/FORTRAN/H5D/tfiles/18/h5ex_d_gzip.tst +++ b/FORTRAN/H5D/tfiles/18/h5ex_d_gzip.tst @@ -1,2 +1,2 @@ -Filter type is: H5Z_FILTER_DEFLATE_F +Filter type is: H5Z_FILTER_DEFLATE_F Maximum value in DS1 is: 1890 diff --git a/FORTRAN/H5D/tfiles/18/h5ex_d_nbit.tst b/FORTRAN/H5D/tfiles/18/h5ex_d_nbit.tst index 90f7a67b..49c46ba7 100644 --- a/FORTRAN/H5D/tfiles/18/h5ex_d_nbit.tst +++ b/FORTRAN/H5D/tfiles/18/h5ex_d_nbit.tst @@ -1,2 +1,2 @@ -Filter type is: H5Z_FILTER_NBIT_F +Filter type is: H5Z_FILTER_NBIT_F Maximum value in DS1 is: 1890 diff --git a/FORTRAN/H5D/tfiles/18/h5ex_d_soint.tst b/FORTRAN/H5D/tfiles/18/h5ex_d_soint.tst index ddf8b307..d3dad134 100644 --- a/FORTRAN/H5D/tfiles/18/h5ex_d_soint.tst +++ b/FORTRAN/H5D/tfiles/18/h5ex_d_soint.tst @@ -1,5 +1,5 @@ Maximum value in write buffer is: 1890 Minimum value in write buffer is: -63 -Filter type is: H5Z_FILTER_SCALEOFFSET_F +Filter type is: H5Z_FILTER_SCALEOFFSET_F Maximum value in DS1 is: 1890 Minimum value in DS1 is: -63 diff --git a/FORTRAN/H5D/tfiles/18/h5ex_d_szip.tst b/FORTRAN/H5D/tfiles/18/h5ex_d_szip.tst index 8f6ba902..bfd93d46 100644 --- a/FORTRAN/H5D/tfiles/18/h5ex_d_szip.tst +++ b/FORTRAN/H5D/tfiles/18/h5ex_d_szip.tst @@ -1,2 +1,2 @@ -Filter type is: H5Z_FILTER_SZIP_F +Filter type is: H5Z_FILTER_SZIP_F Maximum value in DS1 is: 1890 diff --git a/FORTRAN/H5G/CMakeLists.txt b/FORTRAN/H5G/CMakeLists.txt index 77bd0cbe..edc4b230 100644 --- a/FORTRAN/H5G/CMakeLists.txt +++ b/FORTRAN/H5G/CMakeLists.txt @@ -9,10 +9,8 @@ project (HDF5Examples_FORTRAN_H5G Fortran) #----------------------------------------------------------------------------- # Setup include Directories #----------------------------------------------------------------------------- -INCLUDE_DIRECTORIES ( - ${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT} - ${PROJECT_BINARY_DIR} - ${CMAKE_LIBRARY_OUTPUT_DIRECTORY} +set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES + "${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" ) #----------------------------------------------------------------------------- @@ -172,21 +170,19 @@ endif () #endif () if (H5EX_BUILD_TESTING) -# if (HDF_ENABLE_F2003) -# set (exfiles -# h5ex_g_iterate -# h5ex_g_traverse -# h5ex_g_visit -# ) -# foreach (example ${exfiles}) -# add_custom_command ( -# TARGET ${EXAMPLE_VARNAME}_f90_${example} -# POST_BUILD -# COMMAND ${CMAKE_COMMAND} -# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/${example}.h5 ${PROJECT_BINARY_DIR}/${example}.h5 -# ) -# endforeach () -# endif () + set (exfiles + h5ex_g_iterate + h5ex_g_traverse + h5ex_g_visit + ) + foreach (example ${exfiles}) + add_custom_command ( + TARGET ${EXAMPLE_VARNAME}_f90_${example} + POST_BUILD + COMMAND ${CMAKE_COMMAND} + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/${example}.h5 ${PROJECT_BINARY_DIR}/${example}.h5 + ) + endforeach () macro (ADD_DUMP_TEST testname) add_test ( @@ -195,63 +191,77 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_f90_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_SKIP_COMPARE=TRUE" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} + NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_SKIP_COMPARE=TRUE" + -D "TEST_OUTPUT=${testname}.out" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) + endif () endif () endmacro () macro (ADD_H5_DUMP_TEST testname) - add_test ( - NAME ${EXAMPLE_VARNAME}_f90_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} + NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.ddl.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) + endif () endif () endmacro () @@ -263,25 +273,90 @@ if (H5EX_BUILD_TESTING) ${testname}1.h5 ${testname}2.h5 ) - if (${ARGN} STREQUAL "NULL") - add_test ( - NAME ${EXAMPLE_VARNAME}_f90_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_SKIP_COMPARE=TRUE" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + else () + if (${ARGN} STREQUAL "NULL") + add_test ( + NAME ${EXAMPLE_VARNAME}_f90_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_EXPECT=0" + -D "TEST_SKIP_COMPARE=TRUE" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + else () + add_test ( + NAME ${EXAMPLE_VARNAME}_f90_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_EXPECT=0" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + endif () + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}1 + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${testname}1.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}1.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}1.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}1 PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}2 + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${testname}2.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}2.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}2.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}2 PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}1) + endif () + endif () + endmacro () + + macro (ADD_H5_CMP_TEST testname) + add_test ( + NAME ${EXAMPLE_VARNAME}_f90_${testname}-clearall + COMMAND ${CMAKE_COMMAND} + -E remove + ${testname}.out.tmp + ) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) else () add_test ( NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" + -D "TEST_ARGS:STRING=${ARGN}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -D "TEST_EXPECT=0" -D "TEST_OUTPUT=${testname}.out" @@ -289,68 +364,29 @@ if (H5EX_BUILD_TESTING) -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) endif () - set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) - if (HDF5_BUILD_TOOLS) - add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}1 - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${testname}1.h5" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}1.ddl.out" - -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}1.ddl" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}1 PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) - add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}2 - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${testname}2.h5" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}2.ddl.out" - -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}2.ddl" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}2 PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname}1) - endif () - endmacro () - - macro (ADD_H5_CMP_TEST testname) - add_test ( - NAME ${EXAMPLE_VARNAME}_f90_${testname}-clearall - COMMAND ${CMAKE_COMMAND} - -E remove - ${testname}.out.tmp - ) - add_test ( - NAME ${EXAMPLE_VARNAME}_f90_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=${ARGN}" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) endmacro () ADD_H5_DUMP2_TEST (h5ex_g_compact NULL) ADD_DUMP_TEST (h5ex_g_create) ADD_H5_CMP_TEST (h5ex_g_corder) ADD_H5_CMP_TEST (h5ex_g_phase) -# if (HDF_ENABLE_F2003) -# ADD_H5_CMP_TEST (h5ex_g_iterate_F03) -# ADD_H5_CMP_TEST (h5ex_g_traverse_F03) -# ADD_H5_CMP_TEST (h5ex_g_visit_F03) -# endif () + + if (HDF5_VERSION_STRING VERSION_GREATER_EQUAL "1.10.0") + ADD_H5_CMP_TEST (h5ex_g_intermediate) + ADD_H5_CMP_TEST (h5ex_g_iterate) + ADD_H5_CMP_TEST (h5ex_g_visit) + #if (HDF5_VERSION_STRING VERSION_GREATER_EQUAL "1.14.3") + #ADD_H5_CMP_TEST (h5ex_g_traverse) + #endif() + else () + if (HDF_ENABLE_F2003) + ADD_H5_CMP_TEST (h5ex_g_intermediate) + ADD_H5_CMP_TEST (h5ex_g_iterate) + # ADD_H5_CMP_TEST (h5ex_g_traverse) + ADD_H5_CMP_TEST (h5ex_g_visit) + endif () + endif () endif () diff --git a/FORTRAN/H5G/Fortran_sourcefiles.cmake b/FORTRAN/H5G/Fortran_sourcefiles.cmake index fa38fe6d..e2e8e9d4 100644 --- a/FORTRAN/H5G/Fortran_sourcefiles.cmake +++ b/FORTRAN/H5G/Fortran_sourcefiles.cmake @@ -9,9 +9,27 @@ set (common_examples h5ex_g_phase h5ex_g_create ) - -#set (f03_examples -# h5ex_g_iterate_F03 -# h5ex_g_traverse_F03 -# h5ex_g_visit_F03 -#) +if (HDF5_VERSION_STRING VERSION_GREATER_EQUAL "1.10.0") + set (common_examples + ${common_examples} + h5ex_g_intermediate + h5ex_g_iterate + h5ex_g_visit + ) + if (HDF5_VERSION_STRING VERSION_GREATER_EQUAL "1.14.3") + set (common_examples + ${common_examples} + h5ex_g_traverse + ) + endif() +else () + if (HDF_ENABLE_F2003) + set (common_examples + ${common_examples} + h5ex_g_intermediate + h5ex_g_iterate + h5ex_g_traverse + h5ex_g_visit + ) + endif () +endif () diff --git a/FORTRAN/H5G/Makefile.am b/FORTRAN/H5G/Makefile.am index 39f13c61..cdf1647f 100644 --- a/FORTRAN/H5G/Makefile.am +++ b/FORTRAN/H5G/Makefile.am @@ -5,12 +5,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/FORTRAN/H5G/h5ex_g_intermediate.F90 b/FORTRAN/H5G/h5ex_g_intermediate.F90 new file mode 100644 index 00000000..751b747f --- /dev/null +++ b/FORTRAN/H5G/h5ex_g_intermediate.F90 @@ -0,0 +1,130 @@ +!************************************************************ +! +! This example shows how to create intermediate groups with +! a single call to H5Gcreate. +! +!************************************************************/ + +MODULE g_intermediate + + USE HDF5 + USE ISO_C_BINDING + IMPLICIT NONE + +CONTAINS + +!************************************************************ +! +! Operator function for H5Ovisit. This function prints the +! name and type of the object passed to it. +! +!************************************************************ + + INTEGER FUNCTION op_func(loc_id, name, info, cptr) bind(C) + + USE HDF5 + USE ISO_C_BINDING + IMPLICIT NONE + + INTEGER(HID_T), VALUE :: loc_id + CHARACTER(LEN=1), DIMENSION(1:50) :: name ! We must have LEN=1 for bind(C) strings + ! in order to be standard compliant + TYPE(H5O_info_t) :: info + CHARACTER(LEN=50) :: name_string = ' ' + TYPE(C_PTR) :: cptr + INTEGER :: i + + DO i = 1, 50 + IF(name(i)(1:1).EQ.C_NULL_CHAR) EXIT ! Read up to the C NULL termination + name_string(i:i) = name(i)(1:1) + ENDDO + + WRITE(*,"('/')",ADVANCE="NO") ! Print root group in object path + ! + ! Check if the current object is the root group, and if not print + ! the full path name and type. + ! + IF(name(1)(1:1) .EQ. '.')THEN ! Root group, do not print '.' + WRITE(*,"(' (Group)')") + ELSE + IF(info%type.EQ.H5O_TYPE_GROUP_F)THEN + WRITE(*,'(A," (Group)")') TRIM(name_string) + ELSE IF(info%type.EQ.H5O_TYPE_DATASET_F)THEN + WRITE(*,'(A," (Dataset)")') TRIM(name_string) + ELSE IF(info%type.EQ.H5O_TYPE_NAMED_DATATYPE_F)THEN + WRITE(*,'(A," (Datatype)")') TRIM(name_string) + ELSE + WRITE(*,'(A," (Unknown)")') TRIM(name_string) + ENDIF + ENDIF + + op_func = 0 ! return successful + + END FUNCTION op_func + +END MODULE g_intermediate + +!************************************************************ +! +! Operator function to be called by H5Ovisit. +! +!************************************************************ +PROGRAM main + + USE HDF5 + USE ISO_C_BINDING + USE g_intermediate + + IMPLICIT NONE + + CHARACTER(LEN=22), PARAMETER :: filename = "h5ex_g_intermediate.h5" + INTEGER(HID_T) :: file + INTEGER(HID_T) :: group + INTEGER(HID_T) :: lcpl + INTEGER :: status + TYPE(C_FUNPTR) :: funptr + TYPE(C_PTR) :: f_ptr + INTEGER :: ret_value + + ! + ! Initialize FORTRAN interface. + ! + CALL H5open_f(status) + + file = H5I_INVALID_HID_F + group = H5I_INVALID_HID_F + lcpl = H5I_INVALID_HID_F + + ! + ! Create a new file using the default properties. + ! + CALL H5Fcreate_f(filename, H5F_ACC_TRUNC_F, file, status) + ! + ! Create link creation property list and set it to allow creation + ! of intermediate groups. + ! + CALL H5Pcreate_f(H5P_LINK_CREATE_F, lcpl, status) + CALL H5Pset_create_inter_group_f(lcpl, 1, status) + ! + ! Create the group /G1/G2/G3. Note that /G1 and /G1/G2 do not + ! exist yet. This call would cause an error if we did not use the + ! previously created property list. + ! + CALL H5Gcreate_f(file, "/G1/G2/G3", group, status, lcpl_id=lcpl) + ! + ! Print all the objects in the files to show that intermediate + ! groups have been created. See h5ex_g_visit_f for more information + ! on how to use H5Ovisit_f. + ! + WRITE(*,'(A)') "Objects in the file:" + funptr = C_FUNLOC(op_func) + f_ptr = C_NULL_PTR + CALL H5Ovisit_f(file, H5_INDEX_NAME_F, H5_ITER_NATIVE_F, funptr, f_ptr, ret_value, status) + ! + ! Close and release resources. + ! + CALL H5Pclose_f(lcpl, status) + CALL H5Gclose_f(group, status) + CALL H5Fclose_f(file, status) + +END PROGRAM main diff --git a/FORTRAN/H5G/h5ex_g_iterate.F90 b/FORTRAN/H5G/h5ex_g_iterate.F90 new file mode 100644 index 00000000..ca434636 --- /dev/null +++ b/FORTRAN/H5G/h5ex_g_iterate.F90 @@ -0,0 +1,115 @@ +!************************************************************ +! +! This example shows how to iterate over group members using +! H5Literate. +! +!************************************************************ +MODULE g_iterate + + USE HDF5 + USE ISO_C_BINDING + IMPLICIT NONE + +CONTAINS + +!************************************************************ +! +! Operator function. Prints the name and type of the object +! being examined. +! +! ************************************************************ + + INTEGER FUNCTION op_func(loc_id, name, info, operator_data) bind(C) + + USE HDF5 + USE ISO_C_BINDING + IMPLICIT NONE + + INTEGER(HID_T), VALUE :: loc_id + CHARACTER(LEN=1), DIMENSION(1:10) :: name ! must have LEN=1 for bind(C) strings + TYPE(C_PTR) :: info + TYPE(C_PTR) :: operator_data + + INTEGER :: status, i, len + + TYPE(H5O_info_t), TARGET :: infobuf + TYPE(C_PTR) :: ptr + CHARACTER(LEN=10) :: name_string + + ! + ! Get type of the object and display its name and type. + ! The name of the object is passed to this FUNCTION by + ! the Library. + ! + + DO i = 1, 10 + name_string(i:i) = name(i)(1:1) + ENDDO + + CALL H5Oget_info_by_name_f(loc_id, name_string, infobuf, status) + + ! Include the string up to the C NULL CHARACTER + len = 0 + DO + IF(name_string(len+1:len+1).EQ.C_NULL_CHAR.OR.len.GE.10) EXIT + len = len + 1 + ENDDO + + IF(infobuf%type.EQ.H5O_TYPE_GROUP_F)THEN + WRITE(*,*) " Group: ", name_string(1:len) + ELSE IF(infobuf%type.EQ.H5O_TYPE_DATASET_F)THEN + WRITE(*,*) " Dataset: ", name_string(1:len) + ELSE IF(infobuf%type.EQ.H5O_TYPE_NAMED_DATATYPE_F)THEN + WRITE(*,*) " Datatype: ", name_string(1:len) + ELSE + WRITE(*,*) " Unknown: ", name_string(1:len) + ENDIF + + op_func = 0 ! return successful + + END FUNCTION op_func + +END MODULE g_iterate + + +PROGRAM main + + USE HDF5 + USE ISO_C_BINDING + USE g_iterate + + IMPLICIT NONE + + CHARACTER(LEN=17), PARAMETER :: filename = "h5ex_g_iterate.h5" + INTEGER(HID_T) :: file ! Handle + INTEGER :: status + TYPE(C_FUNPTR) :: funptr + TYPE(C_PTR) :: ptr + INTEGER(hsize_t) :: idx + INTEGER :: ret_value + ! + ! Initialize FORTRAN interface. + ! + CALL h5open_f(status) + ! + ! Open file. + ! + CALL H5Fopen_f(filename, H5F_ACC_RDONLY_F, file, status) + ! + ! Begin iteration. + ! + WRITE(*,'(A)') "Objects in root group:" + + idx = 0 + funptr = C_FUNLOC(op_func) ! call back function + ptr = C_NULL_PTR + + CALL H5Literate_f(file, H5_INDEX_NAME_F, H5_ITER_NATIVE_F, idx, funptr, ptr, ret_value, status) + + ! + ! Close and release resources. + ! + CALL H5Fclose_f(file, status) + +END PROGRAM main + diff --git a/FORTRAN/H5G/h5ex_g_iterate.h5 b/FORTRAN/H5G/h5ex_g_iterate.h5 new file mode 100644 index 00000000..6576e8f8 Binary files /dev/null and b/FORTRAN/H5G/h5ex_g_iterate.h5 differ diff --git a/FORTRAN/H5G/h5ex_g_traverse.F90 b/FORTRAN/H5G/h5ex_g_traverse.F90 new file mode 100644 index 00000000..ab401ce9 --- /dev/null +++ b/FORTRAN/H5G/h5ex_g_traverse.F90 @@ -0,0 +1,251 @@ +!************************************************************ +! +! This example shows a way to recursively traverse the file +! using h5literate and h5literate_by_name_f. The method shown +! here guarantees that +! the recursion will not enter an infinite loop, but does +! not prevent objects from being visited more than once. +! The program prints the directory structure of the file +! specified in filename. The default file used by this example +! implements the structure described in the User's Guide, +! chapter 4, figure 26. +! +! ************************************************************ + +! An optional include to determine the correct HDF5 version +! for selecting the appropriate HDF5 API parameters. This is +! not part of the HDF5 library and is generally unnecessary. +#include "h5_version.h" + +MODULE g_traverse + + USE HDF5 + USE ISO_C_BINDING + IMPLICIT NONE + + CHARACTER(LEN=18) :: filename = "h5ex_g_traverse.h5" + + ! + ! Define operator data structure type for H5Literate callback. + ! During recursive iteration, these structures will form a + ! linked list that can be searched for duplicate groups, + ! preventing infinite recursion. + ! + TYPE :: opdata + INTEGER :: recurs ! Recursion level. 0=root + TYPE(opdata), POINTER :: prev ! Pointer to previous opdata +#if H5_VERSION_GE(1, 12, 0) + TYPE(H5O_TOKEN_T_F) :: token ! Group token +#else + INTEGER(haddr_t) :: token ! Group address +#endif + END TYPE opdata + +CONTAINS + + ! + ! OPERATOR FUNCTION TO BE CALLED BY H5LITERATE_F + ! + ! ************************************************************ + ! + ! Operator function. This function prints the name and type + ! of the object passed to it. If the object is a group, it + ! is first checked against other groups in its path using + ! the group_check function, then if it is not a duplicate, + ! H5Literate is called for that group. This guarantees that + ! the program will not enter infinite recursion due to a + ! circular path in the file. + ! + ! ************************************************************ + + RECURSIVE INTEGER(KIND=C_INT) FUNCTION op_func(loc_id, name, info, operator_data) RESULT(ret_val) BIND(C) + + USE HDF5 + USE ISO_C_BINDING + IMPLICIT NONE + + INTEGER(hid_t), VALUE :: loc_id + CHARACTER(LEN=1), DIMENSION(1:10) :: name ! Must have LEN=1 for bind(C) strings + TYPE(C_PTR), VALUE :: info + TYPE(C_PTR), VALUE :: operator_data + + INTEGER :: status, return_val + TYPE(h5o_info_t), TARGET :: infobuf + TYPE(C_PTR) :: ptr + CHARACTER(LEN=10) :: name_string + INTEGER :: i + TYPE(opdata), POINTER :: od + TYPE(opdata), TARGET :: nextod + INTEGER(HSIZE_T) :: idx + + TYPE(C_PTR) :: ptr2 + TYPE(C_FUNPTR) :: funptr + + CHARACTER(LEN=10) :: space + INTEGER :: spaces ! Number of whitespaces to prepend to output + INTEGER :: len + INTEGER :: ret_val_func + + ret_val_func = 0 + ret_val = 0 + + name_string(1:10) = " " + len = 0 + DO + len = len + 1 + IF(name(len)(1:1).EQ.C_NULL_CHAR) EXIT + name_string(len:len) = name(len)(1:1) + ENDDO + len = len - 1 ! subtract NULL character + + space(1:10) = " " + + CALL C_F_POINTER(operator_data, od) + ! + ! Get type of the object and display its name and type. + ! The name of the object is passed to this function by + ! the Library. + ! + CALL H5Oget_info_by_name_f(loc_id, name_string, infobuf, status) + + spaces = 2*(od%recurs+1) + + WRITE(*,'(A)', ADVANCE='NO') space(1:spaces) ! Format output + + + IF(infobuf%type.EQ.H5O_TYPE_GROUP_F)THEN + + WRITE(*,'("Group: ",A," {")') name_string(1:len) + +! +! Check group address/token against linked list of operator +! data structures. We will always run the check, as the +! reference count cannot be relied upon if there are +! symbolic links, and H5Oget_info_by_name always follows +! symbolic links. Alternatively we could use H5Lget_info +! and never recurse on groups discovered by symbolic +! links, however it could still fail if an object's +! reference count was manually manipulated with +! H5Odecr_refcount. +! + + i = group_check(loc_id, od, infobuf%token) + + IF(i.EQ.1)THEN + WRITE(*,'(A)') space(1:spaces)//" Warning: Loop detected!" + ELSE + + nextod%recurs = od%recurs + 1 + nextod%prev => od + nextod%token = infobuf%token + idx = 0 + ptr2 = C_LOC(nextod%recurs) + funptr = C_FUNLOC(op_func) + CALL h5literate_by_name_f(loc_id, name_string, H5_INDEX_NAME_F, H5_ITER_NATIVE_F, idx, & + funptr, ptr2, ret_val_func, status) + ret_val = INT(ret_val_func,C_INT) + ENDIF + WRITE(*,'(A)') space(1:spaces)//"}" + RETURN + ELSE IF(infobuf%type.EQ.H5O_TYPE_DATASET_F)THEN + WRITE(*,'("Dataset: ",A)') name_string(1:len) + ELSE IF(infobuf%type.EQ.H5O_TYPE_NAMED_DATATYPE_F)THEN + WRITE(*,'("Datatype: ",A)') name_string(1:len) + ELSE + WRITE(*,'("Unknown: ",A)') name_string(1:len) + ENDIF + +END FUNCTION op_func + +!************************************************************ +! +! This function recursively searches the linked list of +! opdata structures for one whose address/token matches +! target_token. Returns 1 if a match is found, and 0 +! otherwise. +! +! ************************************************************/ + + INTEGER RECURSIVE FUNCTION group_check(loc_id, od, target_token) result(g_c) + + IMPLICIT NONE + INTEGER :: i + TYPE(opdata), POINTER :: od + INTEGER(HID_T) :: loc_id + INTEGER :: cmp_value +#if H5_VERSION_GE(1, 14, 3) + TYPE(H5O_TOKEN_T_F) :: target_token + INTEGER :: status + CALL h5otoken_cmp_f(loc_id, od%token, target_token, cmp_value, status) +#else +#if H5_VERSION_GE(1, 12, 0) +#error "example only supports HDF5 versions < 1.12.0 and > 1.14.2" +#else + INTEGER(haddr_t) :: target_token + cmp_value = -1 + IF(od%token .EQ. target_token) cmp_value = 0 +#endif +#endif + IF (cmp_value.EQ.0)THEN + g_c = 1 ! Addresses/token match + ELSE IF (od%recurs.EQ.0)THEN + g_c = 0 ! Root group reached with no matches + ELSE + ! Recursively examine the next node + g_c = group_check(loc_id, od%prev, target_token) + END IF + END FUNCTION group_check + +END MODULE g_traverse + +PROGRAM main + + USE HDF5 + USE ISO_C_BINDING + + USE g_traverse + + IMPLICIT NONE + + INTEGER(hid_t) :: file ! Handle + INTEGER :: status + TYPE(h5o_info_t) :: infobuf + TYPE(opdata), TARGET :: od + TYPE(C_PTR) :: ptr + INTEGER(hsize_t) :: idx + INTEGER :: ret_value + TYPE(C_FUNPTR) :: funptr + ! + ! Initialize FORTRAN interface. + ! + CALL h5open_f(status) + ! + ! Open file and initialize the operator data structure. + ! + CALL H5Fopen_f(filename, H5F_ACC_RDONLY_F, file, status) + + CALL h5oget_info_by_name_f(file, "/", infobuf, status) + + od%recurs = 0 + od%prev => NULL() + od%token = infobuf%token + ! + ! Print the root group and formatting, begin iteration. + ! + idx = 0 + funptr = C_FUNLOC(op_func) + ptr = C_LOC(od) + + WRITE(*,'(A)') "/ {" + CALL H5Literate_f(file, H5_INDEX_NAME_F, H5_ITER_NATIVE_F, idx, funptr, ptr, ret_value, status) + WRITE(*,'(A)') "}" + + ! + ! Close and release resources. + ! + CALL H5Fclose_f(file, status) + +END PROGRAM main + + + diff --git a/FORTRAN/H5G/h5ex_g_traverse.h5 b/FORTRAN/H5G/h5ex_g_traverse.h5 new file mode 100644 index 00000000..3d5d301e Binary files /dev/null and b/FORTRAN/H5G/h5ex_g_traverse.h5 differ diff --git a/FORTRAN/H5G/h5ex_g_visit.F90 b/FORTRAN/H5G/h5ex_g_visit.F90 new file mode 100644 index 00000000..97199478 --- /dev/null +++ b/FORTRAN/H5G/h5ex_g_visit.F90 @@ -0,0 +1,158 @@ +!************************************************************ +! +! This example shows how to recursively traverse a file +! using H5Ovisit. The program prints all of +! the objects in the file specified in FILE. The default +! file used by this example implements the structure described +! in the User's Guide, chapter 4, figure 26. +! +!************************************************************ + +MODULE g_visit + + USE HDF5 + USE ISO_C_BINDING + IMPLICIT NONE + +CONTAINS + +!************************************************************ +! +! Operator function for H5Ovisit. This function prints the +! name and type of the object passed to it. +! +!************************************************************ + + INTEGER FUNCTION op_func(loc_id, name, info, cptr) bind(C) + + USE HDF5 + USE ISO_C_BINDING + IMPLICIT NONE + + INTEGER(HID_T), VALUE :: loc_id + CHARACTER(LEN=1), DIMENSION(1:50) :: name ! We must have LEN=1 for bind(C) strings + ! in order to be standard compliant + TYPE(H5O_info_t) :: info + CHARACTER(LEN=50) :: name_string + TYPE(C_PTR) :: cptr + INTEGER :: i + + name_string(:) = " " + DO i = 1, 50 + IF(name(i)(1:1).EQ.C_NULL_CHAR) EXIT ! Read up to the C NULL termination + name_string(i:i) = name(i)(1:1) + ENDDO + + WRITE(*,"('/')",ADVANCE="NO") ! Print root group in object path + ! + ! Check if the current object is the root group, and if not print + ! the full path name and type. + ! + IF(name(1)(1:1) .EQ. '.')THEN ! Root group, do not print '.' + WRITE(*,"(' (Group)')") + ELSE + IF(info%type.EQ.H5O_TYPE_GROUP_F)THEN + WRITE(*,'(A," (Group)")') TRIM(name_string) + ELSE IF(info%type.EQ.H5O_TYPE_DATASET_F)THEN + WRITE(*,'(A," (Dataset)")') TRIM(name_string) + ELSE IF(info%type.EQ.H5O_TYPE_NAMED_DATATYPE_F)THEN + WRITE(*,'(A," (Datatype)")') TRIM(name_string) + ELSE + WRITE(*,'(A," (Unknown)")') TRIM(name_string) + ENDIF + ENDIF + + op_func = 0 ! return successful + + END FUNCTION op_func + + +!************************************************************ +! +! Operator function for H5Lvisit_f. This function simply +! retrieves the info for the object the current link points +! to, and calls the operator function for H5Ovisit_f. +! +! ************************************************************/ + INTEGER FUNCTION op_func_L(loc_id, name, info, cptr) bind(C) + + USE HDF5 + USE ISO_C_BINDING + IMPLICIT NONE + + INTEGER(HID_T), VALUE :: loc_id + CHARACTER(LEN=1), DIMENSION(1:50) :: name ! We must have LEN=1 for bind(C) strings + ! in order to be standard compliant + TYPE(H5L_info_t) :: info + TYPE(C_PTR) :: cptr + + CHARACTER(LEN=50) :: name_string + INTEGER :: i + INTEGER :: status; + TYPE(H5O_info_t) :: infobuf + + TYPE(C_PTR) :: ptr + + name_string(:) = " " + DO i = 1, 50 + IF(name(i)(1:1).EQ.C_NULL_CHAR) EXIT ! Read up to the C NULL termination + name_string(i:i) = name(i)(1:1) + ENDDO + + ! + ! Get type of the object and display its name and type. + ! The name of the object is passed to this function by + ! the Library. + ! + CALL H5Oget_info_by_name_f(loc_id, name_string, infobuf, status); + + op_func_L = op_func(loc_id, name_string, infobuf, cptr) + + END FUNCTION op_func_L + +END MODULE g_visit + +PROGRAM main + + USE HDF5 + USE ISO_C_BINDING + USE g_visit + + IMPLICIT NONE + + CHARACTER(LEN=15), PARAMETER :: filename = "h5ex_g_visit.h5" + INTEGER(HID_T) :: file ! Handle + INTEGER :: status + TYPE(C_FUNPTR) :: funptr + TYPE(C_PTR) :: ptr + INTEGER :: ret_value + ! + ! Initialize FORTRAN interface. + ! + CALL h5open_f(status) + + CALL H5Fopen_f(filename, H5F_ACC_RDONLY_F, file, status) + ! + ! Begin iteration using H5Ovisit + ! + WRITE(*,'(A)') "Objects in the file:" + + funptr = C_FUNLOC(op_func) + ptr = C_NULL_PTR + CALL H5Ovisit_f(file, H5_INDEX_NAME_F, H5_ITER_NATIVE_F, funptr, ptr, ret_value, status) + + ! + ! Repeat the same process using H5Lvisit + ! + WRITE(*,'(/,A)') "Links in the file:" + + funptr = C_FUNLOC(op_func_L) + ptr = C_NULL_PTR + CALL H5Lvisit_f(file, H5_INDEX_NAME_F, H5_ITER_NATIVE_F, funptr, ptr, ret_value, status) + + ! + ! Close and release resources. + ! + CALL H5Fclose_f(file, status) + +END PROGRAM main diff --git a/FORTRAN/H5G/h5ex_g_visit.h5 b/FORTRAN/H5G/h5ex_g_visit.h5 new file mode 100644 index 00000000..3d5d301e Binary files /dev/null and b/FORTRAN/H5G/h5ex_g_visit.h5 differ diff --git a/FORTRAN/H5G/test.sh.in b/FORTRAN/H5G/test.sh.in index e2082e99..d0906df8 100755 --- a/FORTRAN/H5G/test.sh.in +++ b/FORTRAN/H5G/test.sh.in @@ -6,12 +6,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. srcdir=@srcdir@ diff --git a/FORTRAN/H5G/tfiles/18/F03/h5ex_g_iterate_F03.tst b/FORTRAN/H5G/tfiles/18/F03/h5ex_g_iterate_F03.tst deleted file mode 100644 index 10eb2212..00000000 --- a/FORTRAN/H5G/tfiles/18/F03/h5ex_g_iterate_F03.tst +++ /dev/null @@ -1,5 +0,0 @@ -Objects in root group: - Dataset: DS1 - Datatype: DT1 - Group: G1 - Dataset: L1 diff --git a/FORTRAN/H5G/tfiles/18/F03/h5ex_g_visit_F03.tst b/FORTRAN/H5G/tfiles/18/F03/h5ex_g_visit_F03.tst deleted file mode 100644 index 1a1e8257..00000000 --- a/FORTRAN/H5G/tfiles/18/F03/h5ex_g_visit_F03.tst +++ /dev/null @@ -1,8 +0,0 @@ -Objects in the file: -/ (Group) -/group1 (Group) -/group1/dset1 (Dataset) -/group1/group3 (Group) -/group1/group3/group4 (Group) -/group1/group3/group4/group1 (Group) -/group1/group3/group4/group2 (Group) diff --git a/FORTRAN/H5G/tfiles/18/h5ex_g_intermediate.tst b/FORTRAN/H5G/tfiles/18/h5ex_g_intermediate.tst new file mode 100644 index 00000000..b524067c --- /dev/null +++ b/FORTRAN/H5G/tfiles/18/h5ex_g_intermediate.tst @@ -0,0 +1,5 @@ +Objects in the file: +/ (Group) +/G1 (Group) +/G1/G2 (Group) +/G1/G2/G3 (Group) diff --git a/FORTRAN/H5G/tfiles/18/h5ex_g_iterate.tst b/FORTRAN/H5G/tfiles/18/h5ex_g_iterate.tst new file mode 100644 index 00000000..66a4ae92 --- /dev/null +++ b/FORTRAN/H5G/tfiles/18/h5ex_g_iterate.tst @@ -0,0 +1,5 @@ +Objects in root group: + Dataset: DS1 + Datatype: DT1 + Group: G1 + Dataset: L1 diff --git a/FORTRAN/H5G/tfiles/18/h5ex_g_iterate_F03.tst b/FORTRAN/H5G/tfiles/18/h5ex_g_iterate_F03.tst deleted file mode 100644 index 10eb2212..00000000 --- a/FORTRAN/H5G/tfiles/18/h5ex_g_iterate_F03.tst +++ /dev/null @@ -1,5 +0,0 @@ -Objects in root group: - Dataset: DS1 - Datatype: DT1 - Group: G1 - Dataset: L1 diff --git a/FORTRAN/H5G/tfiles/18/F03/h5ex_g_traverse_F03.tst b/FORTRAN/H5G/tfiles/18/h5ex_g_traverse.tst similarity index 100% rename from FORTRAN/H5G/tfiles/18/F03/h5ex_g_traverse_F03.tst rename to FORTRAN/H5G/tfiles/18/h5ex_g_traverse.tst diff --git a/FORTRAN/H5G/tfiles/18/h5ex_g_traverse_F03.tst b/FORTRAN/H5G/tfiles/18/h5ex_g_traverse_F03.tst deleted file mode 100644 index 9d44d2f0..00000000 --- a/FORTRAN/H5G/tfiles/18/h5ex_g_traverse_F03.tst +++ /dev/null @@ -1,32 +0,0 @@ -/ { - Group: group1 { - Dataset: dset1 - Group: group3 { - Dataset: dset2 - Group: group4 { - Group: group1 { - Group: group5 { - Warning: Loop detected! - } - } - Group: group2 { - } - } - } - } - Group: group2 { - Dataset: dset2 - Group: group4 { - Group: group1 { - Group: group5 { - Dataset: dset1 - Group: group3 { - Warning: Loop detected! - } - } - } - Group: group2 { - } - } - } -} diff --git a/FORTRAN/H5G/tfiles/18/h5ex_g_visit.tst b/FORTRAN/H5G/tfiles/18/h5ex_g_visit.tst new file mode 100644 index 00000000..126a5888 --- /dev/null +++ b/FORTRAN/H5G/tfiles/18/h5ex_g_visit.tst @@ -0,0 +1,19 @@ +Objects in the file: +/ (Group) +/group1 (Group) +/group1/dset1 (Dataset) +/group1/group3 (Group) +/group1/group3/group4 (Group) +/group1/group3/group4/group1 (Group) +/group1/group3/group4/group2 (Group) + +Links in the file: +/group1 (Group) +/group1/dset1 (Dataset) +/group1/group3 (Group) +/group1/group3/dset2 (Dataset) +/group1/group3/group4 (Group) +/group1/group3/group4/group1 (Group) +/group1/group3/group4/group1/group5 (Group) +/group1/group3/group4/group2 (Group) +/group2 (Group) diff --git a/FORTRAN/H5G/tfiles/18/h5ex_g_visit_F03.tst b/FORTRAN/H5G/tfiles/18/h5ex_g_visit_F03.tst deleted file mode 100644 index 1a1e8257..00000000 --- a/FORTRAN/H5G/tfiles/18/h5ex_g_visit_F03.tst +++ /dev/null @@ -1,8 +0,0 @@ -Objects in the file: -/ (Group) -/group1 (Group) -/group1/dset1 (Dataset) -/group1/group3 (Group) -/group1/group3/group4 (Group) -/group1/group3/group4/group1 (Group) -/group1/group3/group4/group2 (Group) diff --git a/FORTRAN/H5PAR/CMakeLists.txt b/FORTRAN/H5PAR/CMakeLists.txt index 98ef271a..a9eee753 100644 --- a/FORTRAN/H5PAR/CMakeLists.txt +++ b/FORTRAN/H5PAR/CMakeLists.txt @@ -9,10 +9,8 @@ project (HDF5Examples_FORTRAN_H5PAR Fortran) #----------------------------------------------------------------------------- # Setup include Directories #----------------------------------------------------------------------------- -INCLUDE_DIRECTORIES ( - ${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT} - ${PROJECT_BINARY_DIR} - ${CMAKE_LIBRARY_OUTPUT_DIRECTORY} +set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES + "${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" ) #----------------------------------------------------------------------------- @@ -48,7 +46,7 @@ if (H5EX_BUILD_TESTING) if (last_test) set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname}-clearall PROPERTIES DEPENDS ${last_test}) endif () - add_test (NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND "${CMAKE_COMMAND}" + add_test (NAME MPI_TEST_${EXAMPLE_VARNAME}_f90_${testname} COMMAND "${CMAKE_COMMAND}" -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE};${MPIEXEC_NUMPROC_FLAG};${mumprocs};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS}" -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" @@ -59,18 +57,17 @@ if (H5EX_BUILD_TESTING) -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/grepTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) - set (last_test "${EXAMPLE_VARNAME}_f90_${testname}") + set_tests_properties (MPI_TEST_${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + set (last_test "MPI_TEST_${EXAMPLE_VARNAME}_f90_${testname}") endmacro () # Ensure that 24 is a multiple of the number of processes. # The number 24 corresponds to SPACE1_DIM1 and SPACE1_DIM2 defined in ph5example.c math(EXPR NUMPROCS "24 / ((24 + ${MPIEXEC_MAX_NUMPROCS} - 1) / ${MPIEXEC_MAX_NUMPROCS})") - foreach (example_name ${examples}) - if (${example_name} STREQUAL "ph5_hyperslab_by_col") + if (${example_name} STREQUAL "ph5_f90_hyperslab_by_row") ADD_GREP_TEST (${example_name} 2) - elseif (${example_name} STREQUAL "ph5_hyperslab_by_chunk" OR ${example_name} STREQUAL "ph5_hyperslab_by_pattern") + elseif (${example_name} STREQUAL "ph5_f90_hyperslab_by_chunk" OR ${example_name} STREQUAL "ph5_f90_hyperslab_by_pattern") ADD_GREP_TEST (${example_name} 4) else () ADD_GREP_TEST (${example_name} ${NUMPROCS}) diff --git a/FORTRAN/H5PAR/Fortran_sourcefiles.cmake b/FORTRAN/H5PAR/Fortran_sourcefiles.cmake index 39c8940b..af2bb574 100644 --- a/FORTRAN/H5PAR/Fortran_sourcefiles.cmake +++ b/FORTRAN/H5PAR/Fortran_sourcefiles.cmake @@ -9,3 +9,14 @@ set (examples ph5_f90_hyperslab_by_pattern ph5_f90_hyperslab_by_chunk ) + +if (HDF5_ENABLE_SUBFILING_VFD) + set (examples ${examples} + ph5_f90_subfiling + ) +endif() +if (HDF5_VERSION_STRING VERSION_GREATER_EQUAL "1.14.4") + set (examples ${examples} + ph5_f90_filtered_writes_no_sel + ) +endif() diff --git a/FORTRAN/H5PAR/ph5_f90_dataset.F90 b/FORTRAN/H5PAR/ph5_f90_dataset.F90 index ce126a34..b5c43a88 100644 --- a/FORTRAN/H5PAR/ph5_f90_dataset.F90 +++ b/FORTRAN/H5PAR/ph5_f90_dataset.F90 @@ -1,10 +1,9 @@ PROGRAM DATASET USE HDF5 ! This module contains all necessary modules + USE MPI IMPLICIT NONE - - INCLUDE 'mpif.h' CHARACTER(LEN=10), PARAMETER :: filename = "sds.h5" ! File name CHARACTER(LEN=8), PARAMETER :: dsetname = "IntArray" ! Dataset name @@ -26,9 +25,10 @@ PROGRAM DATASET ! ! MPI definitions and calls. ! - INTEGER :: mpierror ! MPI error flag - INTEGER :: comm, info - INTEGER :: mpi_size, mpi_rank + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror ! MPI error flag + INTEGER(KIND=MPI_INTEGER_KIND) :: comm, info + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size, mpi_rank + comm = MPI_COMM_WORLD info = MPI_INFO_NULL CALL MPI_INIT(mpierror) @@ -101,6 +101,7 @@ PROGRAM DATASET ! Close FORTRAN interface ! CALL h5close_f(error) + IF(mpi_rank.EQ.0) WRITE(*,'(A)') "PHDF5 example finished with no errors" CALL MPI_FINALIZE(mpierror) diff --git a/FORTRAN/H5PAR/ph5_f90_file_create.F90 b/FORTRAN/H5PAR/ph5_f90_file_create.F90 index f330d4eb..a6965a1f 100644 --- a/FORTRAN/H5PAR/ph5_f90_file_create.F90 +++ b/FORTRAN/H5PAR/ph5_f90_file_create.F90 @@ -5,10 +5,10 @@ PROGRAM FILE_CREATE USE HDF5 ! This module contains all necessary modules + USE MPI IMPLICIT NONE - INCLUDE 'mpif.h' CHARACTER(LEN=10), PARAMETER :: filename = "sds.h5" ! File name INTEGER(HID_T) :: file_id ! File identifier @@ -18,9 +18,9 @@ PROGRAM FILE_CREATE ! ! MPI definitions and calls. ! - INTEGER :: mpierror ! MPI error flag - INTEGER :: comm, info - INTEGER :: mpi_size, mpi_rank + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror ! MPI error flag + INTEGER(KIND=MPI_INTEGER_KIND) :: comm, info + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size, mpi_rank comm = MPI_COMM_WORLD info = MPI_INFO_NULL @@ -53,7 +53,7 @@ PROGRAM FILE_CREATE ! Close FORTRAN interface ! CALL h5close_f(error) - + IF(mpi_rank.EQ.0) WRITE(*,'(A)') "PHDF5 example finished with no errors" CALL MPI_FINALIZE(mpierror) END PROGRAM FILE_CREATE diff --git a/FORTRAN/H5PAR/ph5_f90_filtered_writes_no_sel.F90 b/FORTRAN/H5PAR/ph5_f90_filtered_writes_no_sel.F90 new file mode 100644 index 00000000..4a34d1f2 --- /dev/null +++ b/FORTRAN/H5PAR/ph5_f90_filtered_writes_no_sel.F90 @@ -0,0 +1,356 @@ +! +! Example of using the parallel HDF5 library to collectively write to +! datasets with filters applied to them when one or MPI ranks do not +! have data to contribute to the dataset. +! +! If the HDF5_NOCLEANUP environment variable is set, the file that +! this example creates will not be removed as the example finishes. +! +! The need of requirement of parallel file prefix is that in general +! the current working directory in which compiling is done, is not suitable +! for parallel I/O and there is no standard pathname for parallel file +! systems. In some cases, the parallel file name may even need some +! parallel file type prefix such as: "pfs:/GF/...". Therefore, this +! example parses the HDF5_PARAPREFIX environment variable for a prefix, +! if one is needed. + +MODULE filter + USE HDF5 + USE MPI + + IMPLICIT NONE + + CHARACTER(LEN=29), PARAMETER :: EXAMPLE_FILE = "ph5_filtered_writes_no_sel.h5" + INTEGER , PARAMETER :: EXAMPLE_DSET_DIMS = 2 + CHARACTER(LEN=4) , PARAMETER :: EXAMPLE_DSET_NAME = "DSET" + INTEGER , PARAMETER :: EXAMPLE_DSET_CHUNK_DIM_SIZE = 10 + INTEGER , PARAMETER :: PATH_MAX = 512 + + ! Global variables + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_rank, mpi_size + +CONTAINS + ! + ! Routine to set an HDF5 filter on the given DCPL + ! + SUBROUTINE set_filter(dcpl_id) + + IMPLICIT NONE + INTEGER(HID_T) :: dcpl_id + LOGICAL :: filter_avail + INTEGER :: status + + ! + ! Check if 'deflate' filter is available + ! + CALL H5Zfilter_avail_f(H5Z_FILTER_DEFLATE_F, filter_avail, status) + IF(status .LT. 0)THEN + RETURN + ELSE IF(filter_avail)THEN + ! + ! Set 'deflate' filter with reasonable + ! compression level on DCPL + + CALL H5Pset_deflate_f(dcpl_id, 6, status) + ELSE + ! + ! Set Fletcher32 checksum filter on DCPL + ! since it is always available in HDF5 + CALL H5Pset_fletcher32_f(dcpl_id, status) + ENDIF + END SUBROUTINE set_filter + ! + ! Routine to fill a data buffer with data. Assumes + ! dimension rank is 2 and data is stored contiguous. + + + SUBROUTINE fill_databuf(start, count, stride, wdata) + + IMPLICIT NONE + INTEGER(HSIZE_T), DIMENSION(*) :: start, count, stride + INTEGER, DIMENSION(*) :: wdata + INTEGER(HSIZE_T) :: i, j, icnt + + ! Use MPI rank value for data + icnt = 1 + DO i = 1, COUNT(1) + DO j = 1, COUNT(2) + wdata(icnt) = mpi_rank + icnt = icnt + 1 + ENDDO + ENDDO + + END SUBROUTINE fill_databuf + ! + ! Cleanup created files + ! + SUBROUTINE cleanup(filename) + + IMPLICIT NONE + CHARACTER(*) :: filename + + LOGICAL :: do_cleanup + INTEGER :: status + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror + + CALL get_environment_variable("HDF5_NOCLEANUP", STATUS=status) + IF(status.EQ.0)THEN + CALL MPI_File_delete(filename, MPI_INFO_NULL, mpierror) + ENDIF + + END SUBROUTINE cleanup + ! + ! Routine to write to a dataset in a fashion + ! where no chunks in the dataset are written + ! to by more than 1 MPI rank. This will + ! generally give the best performance as the + ! MPI ranks will need the least amount of + ! inter-process communication. + + SUBROUTINE write_dataset_some_no_sel(file_id, dxpl_id) + + IMPLICIT NONE + INTEGER(HID_T) :: file_id, dxpl_id + + INTEGER, DIMENSION(1:EXAMPLE_DSET_CHUNK_DIM_SIZE, 4*EXAMPLE_DSET_CHUNK_DIM_SIZE), TARGET :: wdata + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: dataset_dims + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: chunk_dims + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: start + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: stride + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: count + LOGICAL :: no_selection = .FALSE. + INTEGER(hid_t) :: dset_id + INTEGER(hid_t) :: dcpl_id + INTEGER(hid_t) :: file_dataspace + INTEGER(hid_t) :: sel_type + TYPE(C_PTR) :: f_ptr + INTEGER :: status + + ! + ! ------------------------------------ + ! Setup Dataset Creation Property List + ! ------------------------------------ + + CALL H5Pcreate_f(H5P_DATASET_CREATE_F, dcpl_id, status) + + ! + ! REQUIRED: Dataset chunking must be enabled to + ! apply a data filter to the dataset. + ! Chunks in the dataset are of size + ! EXAMPLE_DSET_CHUNK_DIM_SIZE x EXAMPLE_DSET_CHUNK_DIM_SIZE. + + chunk_dims(1) = EXAMPLE_DSET_CHUNK_DIM_SIZE + chunk_dims(2) = EXAMPLE_DSET_CHUNK_DIM_SIZE + CALL H5Pset_chunk_f(dcpl_id, EXAMPLE_DSET_DIMS, chunk_dims, status) + + ! Set filter to be applied to created datasets + CALL set_filter(dcpl_id) + + ! + ! ------------------------------------ + ! Define the dimensions of the dataset + ! and create it + ! ------------------------------------ + + ! Create a dataset composed of 4 chunks + ! per MPI rank. The first dataset dimension + ! scales according to the number of MPI ranks. + ! The second dataset dimension stays fixed + ! according to the chunk size. + + dataset_dims(1) = EXAMPLE_DSET_CHUNK_DIM_SIZE * mpi_size + dataset_dims(2) = 4 * EXAMPLE_DSET_CHUNK_DIM_SIZE + + CALL H5Screate_simple_f(EXAMPLE_DSET_DIMS, dataset_dims, file_dataspace, status) + + ! Create the dataset + CALL H5Dcreate_f(file_id, EXAMPLE_DSET_NAME, H5T_NATIVE_INTEGER, file_dataspace, dset_id, status, dcpl_id=dcpl_id) + + ! + ! ------------------------------------ + ! Setup selection in the dataset for + ! each MPI rank + ! ------------------------------------ + + ! + ! Odd rank value MPI ranks do not + ! contribute any data to the dataset. + + IF(MOD(mpi_rank, 2) .NE. 0) no_selection = .TRUE. + + IF(no_selection)THEN + ! + ! MPI ranks not contributing data to + ! the dataset should call H5Sselect_none + ! on the file dataspace that will be + ! passed to H5Dwrite. + + CALL H5Sselect_none_f(file_dataspace, status) + sel_type = H5S_BLOCK_F + ELSE + ! + ! Even MPI ranks contribute data to + ! the dataset. Each MPI rank's selection + ! covers a single chunk in the first dataset + ! dimension. Each MPI rank's selection + ! covers 4 chunks in the second dataset + ! dimension. This leads to each contributing + ! MPI rank writing to 4 chunks of the dataset. + + start(1) = mpi_rank * EXAMPLE_DSET_CHUNK_DIM_SIZE + start(2) = 0 + stride(1) = 1 + stride(2) = 1 + count(1) = EXAMPLE_DSET_CHUNK_DIM_SIZE + count(2) = 4 * EXAMPLE_DSET_CHUNK_DIM_SIZE + + CALL H5Sselect_hyperslab_f(file_dataspace, H5S_SELECT_SET_F, start, count, status, stride=stride) + + sel_type = H5S_ALL_F + ! + ! -------------------------------------- + ! Fill data buffer with MPI rank's rank + ! value to make it easy to see which + ! part of the dataset each rank wrote to + ! -------------------------------------- + + CALL fill_databuf(start, count, stride, wdata) + ENDIF + + ! + ! --------------------------------- + ! Write to the dataset collectively + ! --------------------------------- + f_ptr = C_LOC(wdata) + CALL H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER, f_ptr, status, & + mem_space_id=sel_type, file_space_id=file_dataspace, xfer_prp=dxpl_id) + + ! + ! -------------- + ! Close HDF5 IDs + ! -------------- + + CALL H5Sclose_f(file_dataspace,status) + CALL H5Pclose_f(dcpl_id,status) + CALL H5Dclose_f(dset_id,status) + + END SUBROUTINE write_dataset_some_no_sel + END MODULE filter + + PROGRAM main + + USE filter + IMPLICIT NONE + + INTEGER(KIND=MPI_INTEGER_KIND) :: comm = MPI_COMM_WORLD + INTEGER(KIND=MPI_INTEGER_KIND) :: info = MPI_INFO_NULL + INTEGER(hid_t) :: file_id + INTEGER(hid_t) :: fapl_id + INTEGER(hid_t) :: dxpl_id + CHARACTER(LEN=PATH_MAX) :: par_prefix + CHARACTER(LEN=PATH_MAX) :: filename + INTEGER :: status + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror + + CALL MPI_Init(mpierror) + CALL MPI_Comm_size(comm, mpi_size, mpierror) + CALL MPI_Comm_rank(comm, mpi_rank, mpierror) + + ! + ! Initialize HDF5 library and Fortran interfaces. + ! + CALL h5open_f(status) + ! + ! ---------------------------------- + ! Start parallel access to HDF5 file + ! ---------------------------------- + + ! Setup File Access Property List with parallel I/O access + CALL H5Pcreate_f(H5P_FILE_ACCESS_F, fapl_id, status) + CALL H5Pset_fapl_mpio_f(fapl_id, comm, info, status) + + ! + ! OPTIONAL: Set collective metadata reads on FAPL to allow + ! parallel writes to filtered datasets to perform + ! better at scale. While not strictly necessary, + ! this is generally recommended. + + CALL H5Pset_all_coll_metadata_ops_f(fapl_id, .TRUE., status) + + ! + ! OPTIONAL: Set the latest file format version for HDF5 in + ! order to gain access to different dataset chunk + ! index types and better data encoding methods. + ! While not strictly necessary, this is generally + ! recommended. + + CALL H5Pset_libver_bounds_f(fapl_id, H5F_LIBVER_LATEST_F, H5F_LIBVER_LATEST_F, status) + + ! Parse any parallel prefix and create filename + par_prefix(:) = "" + CALL get_environment_variable("HDF5_PARAPREFIX", VALUE=par_prefix, STATUS=status) + filename = TRIM(par_prefix)//EXAMPLE_FILE + + ! Create HDF5 file + CALL H5Fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, status, access_prp = fapl_id) + + ! + ! -------------------------------------- + ! Setup Dataset Transfer Property List + ! with collective I/O + ! -------------------------------------- + + + CALL H5Pcreate_f(H5P_DATASET_XFER_F, dxpl_id, status) + + ! + ! REQUIRED: Setup collective I/O for the dataset + ! write operations. Parallel writes to + ! filtered datasets MUST be collective, + ! even if some ranks have no data to + ! contribute to the write operation. + + CALL H5Pset_dxpl_mpio_f(dxpl_id, H5FD_MPIO_COLLECTIVE_F, status) + + ! + ! -------------------------------- + ! Create and write to the dataset + ! -------------------------------- + + ! + ! Write to a dataset in a fashion where no + ! chunks in the dataset are written to by + ! more than 1 MPI rank and some MPI ranks + ! have nothing to contribute to the dataset. + ! In this case, the MPI ranks that have no + ! data to contribute must still participate + ! in the collective H5Dwrite call, but should + ! call H5Sselect_none on the file dataspace + ! passed to the H5Dwrite call. + + CALL write_dataset_some_no_sel(file_id, dxpl_id) + + ! + ! ------------------ + ! Close all HDF5 IDs + ! ------------------ + + CALL H5Pclose_f(dxpl_id, status) + CALL H5Pclose_f(fapl_id, status) + CALL H5Fclose_f(file_id, status) + ! + ! Close FORTRAN interfaces and HDF5 library. + ! + CALL h5close_f(status) + + IF(mpi_rank .EQ. 0) WRITE(*,"(A)") "PHDF5 example finished with no errors" + + ! + ! ------------------------------------ + ! Cleanup created HDF5 file and finish + ! ------------------------------------ + CALL cleanup(filename) + + CALL MPI_Finalize(mpierror) + +END PROGRAM main diff --git a/FORTRAN/H5PAR/ph5_f90_hyperslab_by_chunk.F90 b/FORTRAN/H5PAR/ph5_f90_hyperslab_by_chunk.F90 index 3a707afe..c4f67fee 100644 --- a/FORTRAN/H5PAR/ph5_f90_hyperslab_by_chunk.F90 +++ b/FORTRAN/H5PAR/ph5_f90_hyperslab_by_chunk.F90 @@ -4,11 +4,10 @@ PROGRAM DATASET_BY_CHUNK USE HDF5 ! This module contains all necessary modules -! USE MPI + USE MPI IMPLICIT NONE - include 'mpif.h' CHARACTER(LEN=11), PARAMETER :: filename = "sds_chnk.h5" ! File name CHARACTER(LEN=8), PARAMETER :: dsetname = "IntArray" ! Dataset name @@ -20,8 +19,7 @@ PROGRAM DATASET_BY_CHUNK INTEGER(HSIZE_T), DIMENSION(2) :: dimsf = (/4,8/) ! Dataset dimensions ! in the file. -! INTEGER, DIMENSION(7) :: dimsfi = (/4,8,0,0,0,0,0/) - INTEGER(HSIZE_T), DIMENSION (2) :: dimsfi = (/4,8/) + INTEGER(HSIZE_T), DIMENSION(2) :: dimsfi = (/4,8/) INTEGER(HSIZE_T), DIMENSION(2) :: chunk_dims = (/2,4/) ! Chunks dimensions INTEGER(HSIZE_T), DIMENSION(2) :: count @@ -36,9 +34,9 @@ PROGRAM DATASET_BY_CHUNK ! ! MPI definitions and calls. ! - INTEGER :: mpierror ! MPI error flag - INTEGER :: comm, info - INTEGER :: mpi_size, mpi_rank + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror ! MPI error flag + INTEGER(KIND=MPI_INTEGER_KIND) :: comm, info + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size, mpi_rank comm = MPI_COMM_WORLD info = MPI_INFO_NULL @@ -163,7 +161,7 @@ PROGRAM DATASET_BY_CHUNK ! Close FORTRAN interfaces and HDF5 library. ! CALL h5close_f(error) - + IF(mpi_rank.EQ.0) WRITE(*,'(A)') "PHDF5 example finished with no errors" 100 continue CALL MPI_FINALIZE(mpierror) diff --git a/FORTRAN/H5PAR/ph5_f90_hyperslab_by_col.F90 b/FORTRAN/H5PAR/ph5_f90_hyperslab_by_col.F90 index b7b46e82..b7e0b25d 100644 --- a/FORTRAN/H5PAR/ph5_f90_hyperslab_by_col.F90 +++ b/FORTRAN/H5PAR/ph5_f90_hyperslab_by_col.F90 @@ -1,14 +1,13 @@ ! -! Number of processes is assumed to be 1 or multiples of 2 (1,2,4,6,8) +! Number of processes is assumed to be 1 or powers of 2 (2,4,8) ! - PROGRAM DATASET_BY_COL USE HDF5 ! This module contains all necessary modules + USE mpi IMPLICIT NONE - include 'mpif.h' CHARACTER(LEN=10), PARAMETER :: filename = "sds_col.h5" ! File name CHARACTER(LEN=8), PARAMETER :: dsetname = "IntArray" ! Dataset name @@ -31,9 +30,9 @@ PROGRAM DATASET_BY_COL ! ! MPI definitions and calls. ! - INTEGER :: mpierror ! MPI error flag - INTEGER :: comm, info - INTEGER :: mpi_size, mpi_rank + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror ! MPI error flag + INTEGER(KIND=MPI_INTEGER_KIND) :: comm, info + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size, mpi_rank comm = MPI_COMM_WORLD info = MPI_INFO_NULL CALL MPI_INIT(mpierror) @@ -127,7 +126,7 @@ PROGRAM DATASET_BY_COL ! Close FORTRAN predefined datatypes. ! CALL h5close_f(error) - + IF(mpi_rank.EQ.0) WRITE(*,'(A)') "PHDF5 example finished with no errors" CALL MPI_FINALIZE(mpierror) END PROGRAM DATASET_BY_COL diff --git a/FORTRAN/H5PAR/ph5_f90_hyperslab_by_pattern.F90 b/FORTRAN/H5PAR/ph5_f90_hyperslab_by_pattern.F90 index 0f2077f1..f94c8192 100644 --- a/FORTRAN/H5PAR/ph5_f90_hyperslab_by_pattern.F90 +++ b/FORTRAN/H5PAR/ph5_f90_hyperslab_by_pattern.F90 @@ -5,10 +5,10 @@ PROGRAM DATASET_BY_PATTERN USE HDF5 ! This module contains all necessary modules + USE MPI IMPLICIT NONE - include 'mpif.h' CHARACTER(LEN=10), PARAMETER :: filename = "sds_pat.h5" ! File name CHARACTER(LEN=8), PARAMETER :: dsetname = "IntArray" ! Dataset name @@ -36,9 +36,9 @@ PROGRAM DATASET_BY_PATTERN ! ! MPI definitions and calls. ! - INTEGER :: mpierror ! MPI error flag - INTEGER :: comm, info - INTEGER :: mpi_size, mpi_rank + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror ! MPI error flag + INTEGER(KIND=MPI_INTEGER_KIND) :: comm, info + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size, mpi_rank comm = MPI_COMM_WORLD info = MPI_INFO_NULL @@ -158,7 +158,7 @@ PROGRAM DATASET_BY_PATTERN ! Close FORTRAN interfaces and HDF5 library. ! CALL h5close_f(error) - + IF(mpi_rank.EQ.0) WRITE(*,'(A)') "PHDF5 example finished with no errors" 100 continue CALL MPI_FINALIZE(mpierror) diff --git a/FORTRAN/H5PAR/ph5_f90_hyperslab_by_row.F90 b/FORTRAN/H5PAR/ph5_f90_hyperslab_by_row.F90 index 398be23a..f607b482 100644 --- a/FORTRAN/H5PAR/ph5_f90_hyperslab_by_row.F90 +++ b/FORTRAN/H5PAR/ph5_f90_hyperslab_by_row.F90 @@ -4,10 +4,10 @@ PROGRAM DATASET_BY_ROW USE HDF5 ! This module contains all necessary modules + USE MPI IMPLICIT NONE - include 'mpif.h' CHARACTER(LEN=10), PARAMETER :: filename = "sds_row.h5" ! File name CHARACTER(LEN=8), PARAMETER :: dsetname = "IntArray" ! Dataset name @@ -35,9 +35,9 @@ PROGRAM DATASET_BY_ROW ! ! MPI definitions and calls. ! - INTEGER :: mpierror ! MPI error flag - INTEGER :: comm, info - INTEGER :: mpi_size, mpi_rank + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror ! MPI error flag + INTEGER(KIND=MPI_INTEGER_KIND) :: comm, info + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size, mpi_rank comm = MPI_COMM_WORLD info = MPI_INFO_NULL @@ -147,8 +147,9 @@ PROGRAM DATASET_BY_ROW ! Close FORTRAN interfaces and HDF5 library. ! CALL h5close_f(error) - + IF(mpi_rank.EQ.0) WRITE(*,'(A)') "PHDF5 example finished with no errors" 100 continue + CALL MPI_FINALIZE(mpierror) END PROGRAM DATASET_BY_ROW diff --git a/FORTRAN/H5PAR/ph5_f90_subfiling.F90 b/FORTRAN/H5PAR/ph5_f90_subfiling.F90 new file mode 100644 index 00000000..866200f3 --- /dev/null +++ b/FORTRAN/H5PAR/ph5_f90_subfiling.F90 @@ -0,0 +1,523 @@ +! +! Example of using HDF5's Subfiling VFD to write to an +! HDF5 file that is striped across multiple subfiles +! +! If the HDF5_NOCLEANUP environment variable is set, the +! files that this example creates will not be removed as +! the example finishes. +! +! In general, the current working directory in which compiling +! is done, is not suitable for parallel I/O and there is no +! standard pathname for parallel file systems. In some cases, +! the parallel file name may even need some parallel file type +! prefix such as: "pfs:/GF/...". Therefore, this example parses +! the HDF5_PARAPREFIX environment variable for a prefix, if one +! is needed. +! + +MODULE subf + + USE HDF5 + USE MPI + + CHARACTER(LEN=31), PARAMETER :: EXAMPLE_FILE = "h5_subfiling_default_example.h5" + CHARACTER(LEN=30), PARAMETER :: EXAMPLE_FILE2 = "h5_subfiling_custom_example.h5" + CHARACTER(LEN=33), PARAMETER :: EXAMPLE_FILE3 = "h5_subfiling_precreate_example.h5" + + CHARACTER(LEN=4), PARAMETER :: EXAMPLE_DSET_NAME = "DSET" + INTEGER , PARAMETER :: EXAMPLE_DSET_DIMS = 2 + + ! Have each MPI rank write 16MiB of data + INTEGER, PARAMETER :: EXAMPLE_DSET_NY = 4194304 + +CONTAINS + + ! Cleanup created files + + SUBROUTINE cleanup(filename, fapl_id) + + IMPLICIT NONE + INTEGER(HID_T) :: fapl_id + CHARACTER(*) :: filename + + LOGICAL :: do_cleanup + INTEGER :: status + + CALL get_environment_variable("HDF5_NOCLEANUP", STATUS=status) + !IF(status.EQ.0) CALL H5Fdelete_f(filename, fapl_id, status) + IF(status.EQ.0)THEN + OPEN(UNIT=15, IOSTAT=status, FILE=filename, STATUS='old') + IF(status .EQ. 0) CLOSE(15, STATUS='DELETE') + ENDIF + + END SUBROUTINE cleanup + + ! An example of using the HDF5 Subfiling VFD with + ! its default settings of 1 subfile per node, with + ! a stripe size of 32MiB + + SUBROUTINE subfiling_write_default(fapl_id, mpi_size, mpi_rank) + + IMPLICIT NONE + INTEGER(HID_T) :: fapl_id + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_rank + + INTEGER, DIMENSION(:), ALLOCATABLE, TARGET :: wdata + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: dset_dims + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: start + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: count + INTEGER(hid_t) :: file_id + INTEGER(hid_t) :: subfiling_fapl + INTEGER(hid_t) :: dset_id + INTEGER(hid_t) :: filespace + CHARACTER(LEN=512) :: filename, par_prefix + INTEGER :: status + INTEGER(SIZE_T) :: i + TYPE(C_PTR) :: f_ptr + + ! + ! Make a copy of the FAPL so we don't disturb + ! it for the other examples + ! + CALL H5Pcopy_f(fapl_id, subfiling_fapl, status) + + ! + ! Set Subfiling VFD on FAPL using default settings + ! (use IOC VFD, 1 IOC per node, 32MiB stripe size) + ! + ! Note that all of Subfiling's configuration settings + ! can be adjusted with environment variables as well + ! in this case. + ! + + CALL H5Pset_fapl_subfiling_f(subfiling_fapl, status) + + ! + ! OPTIONAL: Set alignment of objects in HDF5 file to + ! be equal to the Subfiling stripe size. + ! Choosing a Subfiling stripe size and HDF5 + ! object alignment value that are some + ! multiple of the disk block size can + ! generally help performance by ensuring + ! that I/O is well-aligned and doesn't + ! excessively cross stripe boundaries. + ! + ! Note that this option can substantially + ! increase the size of the resulting HDF5 + ! files, so it is a good idea to keep an eye + ! on this. + ! + + CALL H5Pset_alignment_f(subfiling_fapl, 0_HSIZE_T, 33554432_HSIZE_T, status) ! ALIGN to default 32MiB stripe size + + ! Parse any parallel prefix and create filename + par_prefix(:) = "" + CALL get_environment_variable("HDF5_PARAPREFIX", VALUE=par_prefix, STATUS=status) + filename = TRIM(par_prefix)//EXAMPLE_FILE + + ! Create a new file collectively + CALL H5Fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, status, access_prp = subfiling_fapl) + + ! Create the dataspace for the dataset. The second + ! dimension varies with the number of MPI ranks + ! while the first dimension is fixed. + + dset_dims(1) = EXAMPLE_DSET_NY + dset_dims(2) = mpi_size + CALL H5Screate_simple_f(EXAMPLE_DSET_DIMS, dset_dims, filespace, status) + + ! Create the dataset with default properties + + CALL H5Dcreate_f(file_id, EXAMPLE_DSET_NAME, H5T_NATIVE_INTEGER, filespace, dset_id, status) + ! Each MPI rank writes from a contiguous memory + ! region to the hyperslab in the file + + start(1) = 0 + start(2) = mpi_rank + count(1) = dset_dims(1) + count(2) = 1 + CALL H5Sselect_hyperslab_f(filespace, H5S_SELECT_SET_F, start, count, status) + + ! Initialize data buffer + ALLOCATE(wdata(COUNT(1)*COUNT(2))) + DO i = 1, COUNT(1)*COUNT(2) + wdata(i) = mpi_rank + ENDDO + + ! Write to dataset + f_ptr = C_LOC(wdata) + CALL H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER, f_ptr, status, mem_space_id=H5S_BLOCK_F, file_space_id=filespace) + + ! Close/release resources. + DEALLOCATE(wdata) + CALL H5Dclose_f(dset_id, status) + CALL H5Sclose_f(filespace, status) + + CALL H5Fclose_f(file_id, status) + + CALL cleanup(EXAMPLE_FILE, subfiling_fapl) + + CALL H5Pclose_f(subfiling_fapl, status) + + END SUBROUTINE subfiling_write_default + + ! + ! An example of using the HDF5 Subfiling VFD with + ! custom settings + ! + + SUBROUTINE subfiling_write_custom(fapl_id, mpi_size, mpi_rank) + + IMPLICIT NONE + INTEGER(HID_T) :: fapl_id + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_rank + + INTEGER, DIMENSION(:), ALLOCATABLE, TARGET :: wdata + + TYPE(H5FD_subfiling_config_t) :: subf_config + TYPE(H5FD_ioc_config_t) :: ioc_config + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: dset_dims + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: start + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: count + INTEGER(hid_t) :: file_id + INTEGER(hid_t) :: subfiling_fapl + INTEGER(hid_t) :: dset_id + INTEGER(hid_t) :: filespace + CHARACTER(LEN=512) :: filename, par_prefix + INTEGER :: status + INTEGER(SIZE_T) :: i + TYPE(C_PTR) :: f_ptr + + ! Make a copy of the FAPL so we don't disturb + ! it for the other examples + + CALL H5Pcopy_f(fapl_id, subfiling_fapl, status) + + ! Get a default Subfiling and IOC configuration + CALL h5pget_fapl_subfiling_f(subfiling_fapl, subf_config, status) + CALL h5pget_fapl_ioc_f(subfiling_fapl,ioc_config, status) + + ! Set Subfiling configuration to use a 1MiB + ! stripe size and the SELECT_IOC_EVERY_NTH_RANK + ! selection method. By default, without a setting + ! in the H5FD_SUBFILING_IOC_SELECTION_CRITERIA + ! environment variable, this will use every MPI + ! rank as an I/O concentrator. + + subf_config%shared_cfg%stripe_size = 1048576 + subf_config%shared_cfg%ioc_selection = SELECT_IOC_EVERY_NTH_RANK_F + + ! Set IOC configuration to use 2 worker threads + ! per IOC instead of the default setting and + ! update IOC configuration with new subfiling + ! configuration. + + ioc_config%thread_pool_size = 2 + + ! Set our new configuration on the IOC + ! FAPL used for Subfiling + + CALL H5Pset_fapl_ioc_f(subf_config%ioc_fapl_id, status, ioc_config) + + ! Finally, set our new Subfiling configuration + ! on the original FAPL + + CALL H5Pset_fapl_subfiling_f(subfiling_fapl, status, subf_config) + ! + ! OPTIONAL: Set alignment of objects in HDF5 file to + ! be equal to the Subfiling stripe size. + ! Choosing a Subfiling stripe size and HDF5 + ! object alignment value that are some + ! multiple of the disk block size can + ! generally help performance by ensuring + ! that I/O is well-aligned and doesn't + ! excessively cross stripe boundaries. + ! + ! Note that this option can substantially + ! increase the size of the resulting HDF5 + ! files, so it is a good idea to keep an eye + ! on this. + ! + + CALL H5Pset_alignment_f(subfiling_fapl, 0_HSIZE_T, 33554432_HSIZE_T, status) ! ALIGN to default 32MiB stripe size + + ! Parse any parallel prefix and create filename + par_prefix(:) = "" + CALL get_environment_variable("HDF5_PARAPREFIX", VALUE=par_prefix, STATUS=status) + filename = TRIM(par_prefix)//EXAMPLE_FILE + + ! Create a new file collectively + CALL H5Fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, status, access_prp = subfiling_fapl) + + ! Create the dataspace for the dataset. The second + ! dimension varies with the number of MPI ranks + ! while the first dimension is fixed. + + dset_dims(1) = EXAMPLE_DSET_NY + dset_dims(2) = mpi_size + CALL H5Screate_simple_f(EXAMPLE_DSET_DIMS, dset_dims, filespace, status) + + ! Create the dataset with default properties + + CALL H5Dcreate_f(file_id, EXAMPLE_DSET_NAME, H5T_NATIVE_INTEGER, filespace, dset_id, status) + ! Each MPI rank writes from a contiguous memory + ! region to the hyperslab in the file + + start(1) = 0 + start(2) = mpi_rank + count(1) = dset_dims(1) + count(2) = 1 + CALL H5Sselect_hyperslab_f(filespace, H5S_SELECT_SET_F, start, count, status) + + ! Initialize data buffer + ALLOCATE(wdata(COUNT(1)*COUNT(2))) + DO i = 1, COUNT(1)*COUNT(2) + wdata(i) = mpi_rank + ENDDO + + ! Write to dataset + f_ptr = C_LOC(wdata) + CALL H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER, f_ptr, status, mem_space_id=H5S_BLOCK_F, file_space_id=filespace) + + ! Close/release resources. + DEALLOCATE(wdata) + CALL H5Dclose_f(dset_id, status) + CALL H5Sclose_f(filespace, status) + + CALL H5Fclose_f(file_id, status) + + CALL cleanup(EXAMPLE_FILE, subfiling_fapl) + + CALL H5Pclose_f(subfiling_fapl, status) + + END SUBROUTINE subfiling_write_custom + + ! + ! An example of pre-creating an HDF5 file on MPI rank + ! 0 when using the HDF5 Subfiling VFD. In this case, + ! the subfiling stripe count must be set so that rank + ! 0 knows how many subfiles to pre-create. + + SUBROUTINE subfiling_write_precreate(fapl_id, mpi_size, mpi_rank) + + IMPLICIT NONE + INTEGER(HID_T) :: fapl_id + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_rank + + INTEGER, DIMENSION(:), ALLOCATABLE, TARGET :: wdata + TYPE(H5FD_subfiling_config_t) :: subf_config + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: dset_dims + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: start + INTEGER(hsize_t), DIMENSION(1:EXAMPLE_DSET_DIMS) :: count + INTEGER(hid_t) :: file_id + INTEGER(hid_t) :: subfiling_fapl + INTEGER(hid_t) :: dset_id + INTEGER(hid_t) :: filespace + CHARACTER(LEN=512) :: filename, par_prefix + INTEGER :: status + INTEGER(SIZE_T) :: i + TYPE(C_PTR) :: f_ptr + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror + + ! Make a copy of the FAPL so we don't disturb + ! it for the other examples + + CALL H5Pcopy_f(fapl_id, subfiling_fapl, status) + + ! Get a default Subfiling and IOC configuration + CALL h5pget_fapl_subfiling_f(subfiling_fapl, subf_config, status) + + ! + ! Set the Subfiling stripe count so that rank + ! 0 knows how many subfiles the logical HDF5 + ! file should consist of. In this case, use + ! 5 subfiles with a default stripe size of + ! 32MiB. + + subf_config%shared_cfg%stripe_count = 5 + ! + ! OPTIONAL: Set alignment of objects in HDF5 file to + ! be equal to the Subfiling stripe size. + ! Choosing a Subfiling stripe size and HDF5 + ! object alignment value that are some + ! multiple of the disk block size can + ! generally help performance by ensuring + ! that I/O is well-aligned and doesn't + ! excessively cross stripe boundaries. + ! + ! Note that this option can substantially + ! increase the size of the resulting HDF5 + ! files, so it is a good idea to keep an eye + ! on this. + ! + + CALL H5Pset_alignment_f(subfiling_fapl, 0_HSIZE_T, 1048576_HSIZE_T, status) ! Align to custom 1MiB stripe size + + ! Parse any parallel prefix and create filename + par_prefix(:) = "" + CALL get_environment_variable("HDF5_PARAPREFIX", VALUE=par_prefix, STATUS=status) + filename = TRIM(par_prefix)//EXAMPLE_FILE + + ! Set dataset dimensionality + dset_dims(1) = EXAMPLE_DSET_NY + dset_dims(2) = mpi_size + + IF (mpi_rank .EQ. 0) THEN + ! + ! Make sure only this rank opens the file + ! + CALL H5Pset_mpi_params_f(subfiling_fapl, MPI_COMM_SELF, MPI_INFO_NULL, status) + + ! + ! Set the Subfiling VFD on our FAPL using + ! our custom configuration + ! + CALL H5Pset_fapl_subfiling_f(subfiling_fapl, status, subf_config); + + ! + ! Create a new file on rank 0 + ! + CALL H5Fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, status, access_prp = subfiling_fapl) + + ! Create the dataspace for the dataset. The second + ! dimension varies with the number of MPI ranks + ! while the first dimension is fixed. + ! + CALL H5Screate_simple_f(EXAMPLE_DSET_DIMS, dset_dims, filespace, status) + + ! Create the dataset with default properties + + CALL H5Dcreate_f(file_id, EXAMPLE_DSET_NAME, H5T_NATIVE_INTEGER, filespace, dset_id, status) + + ! Initialize data buffer + ALLOCATE(wdata(dset_dims(1)*dset_dims(2))) + DO i = 1, dset_dims(1)*dset_dims(2) + wdata(i) = i + ENDDO + + ! + ! Rank 0 writes to the whole dataset + ! + f_ptr = C_LOC(wdata) + CALL H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER, f_ptr, status, mem_space_id=H5S_BLOCK_F, file_space_id=filespace) + + ! + ! Close/release resources. + ! + DEALLOCATE(wdata) + CALL H5Dclose_f(dset_id, status) + CALL H5Sclose_f(filespace, status) + + CALL H5Fclose_f(file_id, status) + ENDIF + + CALL MPI_Barrier(MPI_COMM_WORLD, mpierror) + + ! + ! Use all MPI ranks to re-open the file and + ! read back the dataset that was created + ! + CALL H5Pset_mpi_params_f(subfiling_fapl, MPI_COMM_WORLD, MPI_INFO_NULL, status) + + ! + ! Use the same subfiling configuration as rank 0 + ! used to create the file + ! + CALL H5Pset_fapl_subfiling_f(subfiling_fapl, status, subf_config) + + ! + ! Re-open the file on all ranks + ! + + CALL H5Fopen_f(filename, H5F_ACC_RDONLY_F, file_id, status, access_prp=subfiling_fapl) + + ! + ! Open the dataset that was created + ! + CALL H5Dopen_f(file_id, EXAMPLE_DSET_NAME, dset_id, status) + + ! + ! Initialize data buffer + ! + + ALLOCATE(wdata(dset_dims(1)*dset_dims(2))) + ! + ! Read the dataset on all ranks + ! + f_ptr = C_LOC(wdata) + CALL H5Dread_f(dset_id, H5T_NATIVE_INTEGER, f_ptr, status, mem_space_id=H5S_BLOCK_F, file_space_id=H5S_ALL_F) + + DEALLOCATE(wdata) + + CALL H5Dclose_f(dset_id, status) + CALL H5Fclose_f(file_id, status) + + CALL cleanup(EXAMPLE_FILE, subfiling_fapl) + + CALL H5Pclose_f(subfiling_fapl, status) + + END SUBROUTINE subfiling_write_precreate + +END MODULE subf + +PROGRAM main + + USE SUBF + IMPLICIT NONE + + INTEGER(KIND=MPI_INTEGER_KIND) :: comm = MPI_COMM_WORLD + INTEGER(KIND=MPI_INTEGER_KIND) :: info = MPI_INFO_NULL + INTEGER(HID_T) :: fapl_id + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_size + INTEGER(KIND=MPI_INTEGER_KIND) :: mpi_rank + INTEGER(KIND=MPI_INTEGER_KIND) :: required + INTEGER(KIND=MPI_INTEGER_KIND) :: provided + INTEGER(KIND=MPI_INTEGER_KIND) :: mpierror + INTEGER :: status + + ! HDF5 Subfiling VFD requires MPI_Init_thread with MPI_THREAD_MULTIPLE + required = MPI_THREAD_MULTIPLE + provided = 0 + CALL mpi_init_thread(required, provided, mpierror) + IF (provided .NE. required) THEN + WRITE(*,*) "MPI doesn't support MPI_Init_thread with MPI_THREAD_MULTIPLE *FAILED*" + CALL MPI_Abort(comm, -1_MPI_INTEGER_KIND, mpierror) + ENDIF + + CALL MPI_Comm_size(comm, mpi_size, mpierror) + CALL MPI_Comm_rank(comm, mpi_rank, mpierror) + + ! + ! Initialize HDF5 library and Fortran interfaces. + ! + CALL h5open_f(status) + + ! + ! Set up File Access Property List with MPI + ! parameters for the Subfiling VFD to use + CALL h5pcreate_f(H5P_FILE_ACCESS_F, fapl_id, status) + CALL H5Pset_mpi_params_f(fapl_id, comm, info, status) + + ! Use Subfiling VFD with default settings + CALL subfiling_write_default(fapl_id, mpi_size, mpi_rank) + + ! Use Subfiling VFD with custom settings + CALL subfiling_write_custom(fapl_id, mpi_size, mpi_rank) + + ! Use Subfiling VFD to precreate the HDF5 file on MPI rank + CALL subfiling_write_precreate(fapl_id, mpi_size, mpi_rank) + + CALL H5Pclose_f(fapl_id, status) + ! + ! Close FORTRAN interfaces and HDF5 library. + ! + CALL h5close_f(status) + + IF(mpi_rank .EQ. 0) WRITE(*,"(A)") "PHDF5 example finished with no errors" + + CALL MPI_Finalize(mpierror) + +END PROGRAM main diff --git a/FORTRAN/H5T/CMakeLists.txt b/FORTRAN/H5T/CMakeLists.txt index 8462bbf5..8fa5c57b 100644 --- a/FORTRAN/H5T/CMakeLists.txt +++ b/FORTRAN/H5T/CMakeLists.txt @@ -4,10 +4,8 @@ project (HDF5Examples_FORTRAN_H5T Fortran) #----------------------------------------------------------------------------- # Setup include Directories #----------------------------------------------------------------------------- -INCLUDE_DIRECTORIES ( - ${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT} - ${PROJECT_BINARY_DIR} - ${CMAKE_LIBRARY_OUTPUT_DIRECTORY} +set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES + "${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" ) #----------------------------------------------------------------------------- @@ -124,7 +122,7 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endforeach () @@ -137,14 +135,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif ((${example_name} STREQUAL "h5ex_t_objref_F03" OR ${example_name} STREQUAL "h5ex_t_objrefatt_F03") OR (${example_name} STREQUAL "h5ex_t_regref_F03" OR ${example_name} STREQUAL "h5ex_t_regrefatt_F03")) @@ -155,14 +153,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}21.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}21.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif (HDF5_VERSION_MAJOR VERSION_EQUAL "1.10") @@ -171,14 +169,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}06.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}06.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.12") @@ -186,14 +184,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/112/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/112/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () else () @@ -203,14 +201,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}21.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}21.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif (HDF5_VERSION_MAJOR VERSION_EQUAL "1.10") @@ -219,14 +217,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}06.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}06.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () elseif (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.12") @@ -234,14 +232,14 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/112/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/112/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) else () add_custom_command ( TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () endif () @@ -250,7 +248,7 @@ if (HDF5_BUILD_TOOLS) TARGET ${EXAMPLE_VARNAME}_f90_${example_name} POST_BUILD COMMAND ${CMAKE_COMMAND} - ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${example_name}.ddl + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/18/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl ) endif () endif () @@ -307,33 +305,40 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_f90_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) - if (HDF5_BUILD_TOOLS) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + else () add_test ( - NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} + NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" - -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.ddl.out" -D "TEST_EXPECT=0" - -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" -P "${H5EX_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + if (HDF5_BUILD_TOOLS) + add_test ( + NAME ${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=${H5EX_HDF5_DUMP_EXECUTABLE}" + -D "TEST_ARGS:STRING=${ARGN};${testname}.h5" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=${testname}.ddl.out" + -D "TEST_EXPECT=0" + -D "TEST_REFERENCE=${testname}.ddl" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_H5DUMP-f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}) + endif () endif () endmacro () @@ -344,19 +349,25 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - add_test ( - NAME ${EXAMPLE_VARNAME}_f90_${testname} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=${ARGN}" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_EXPECT=0" - -D "TEST_OUTPUT=${testname}.out" - -D "TEST_REFERENCE=${testname}.tst" - -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" - -P "${H5EX_RESOURCES_DIR}/runTest.cmake" - ) - set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + if (HDF5_USING_ANALYSIS_TOOL) + add_test (NAME ${EXAMPLE_VARNAME}_f90_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + else () + add_test ( + NAME ${EXAMPLE_VARNAME}_f90_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=${ARGN}" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_EXPECT=0" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_REFERENCE=${testname}.tst" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall) + endif () endmacro () macro (TEST_EXAMPLE example) diff --git a/FORTRAN/H5T/Makefile.am b/FORTRAN/H5T/Makefile.am index 79321f0e..7d9d96a8 100644 --- a/FORTRAN/H5T/Makefile.am +++ b/FORTRAN/H5T/Makefile.am @@ -5,12 +5,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/FORTRAN/H5T/h5ex_t_enum_F03.F90 b/FORTRAN/H5T/h5ex_t_enum_F03.F90 index b0ba276d..dd480738 100644 --- a/FORTRAN/H5T/h5ex_t_enum_F03.F90 +++ b/FORTRAN/H5T/h5ex_t_enum_F03.F90 @@ -74,14 +74,14 @@ PROGRAM main ! Insert enumerated value for memtype. ! val = i - CALL h5tenum_insert_f(memtype, TRIM(names(i+1)), val, hdferr) + f_ptr = C_LOC(val) + CALL h5tenum_insert_f(memtype, TRIM(names(i+1)), f_ptr, hdferr) ! ! Insert enumerated value for filetype. We must first convert ! the numerical value val to the base type of the destination. ! - f_ptr = C_LOC(val) CALL h5tconvert_f (M_BASET, F_BASET, INT(1,SIZE_T), f_ptr, hdferr) - CALL h5tenum_insert_f(filetype, TRIM(names(i+1)), val, hdferr) + CALL h5tenum_insert_f(filetype, TRIM(names(i+1)), f_ptr, hdferr) ENDDO ! ! Create dataspace. Setting maximum size to be the current size. @@ -129,7 +129,7 @@ PROGRAM main ! ! Get the name of the enumeration member. ! - CALL h5tenum_nameof_f( memtype, rdata(i,j), NAME_BUF_SIZE, name, hdferr) + CALL h5tenum_nameof_f( memtype, INT(rdata(i,j)), NAME_BUF_SIZE, name, hdferr) WRITE(*,'(" ", A6," ")', ADVANCE='NO') TRIM(NAME) ENDDO WRITE(*,'("]")') diff --git a/FORTRAN/H5T/h5ex_t_enumatt_F03.F90 b/FORTRAN/H5T/h5ex_t_enumatt_F03.F90 index ec641b3e..13a289bd 100644 --- a/FORTRAN/H5T/h5ex_t_enumatt_F03.F90 +++ b/FORTRAN/H5T/h5ex_t_enumatt_F03.F90 @@ -75,14 +75,15 @@ PROGRAM main ! Insert enumerated value for memtype. ! val = i - CALL h5tenum_insert_f(memtype, TRIM(names(i+1)), val, hdferr) + f_ptr = C_LOC(val) + CALL h5tenum_insert_f(memtype, TRIM(names(i+1)), f_ptr, hdferr) ! ! Insert enumerated value for filetype. We must first convert ! the numerical value val to the base type of the destination. ! f_ptr = C_LOC(val) CALL h5tconvert_f(M_BASET, F_BASET, INT(1,SIZE_T), f_ptr, hdferr) - CALL h5tenum_insert_f(filetype, TRIM(names(i+1)), val, hdferr) + CALL h5tenum_insert_f(filetype, TRIM(names(i+1)), f_ptr, hdferr) ENDDO ! ! Create dataspace with a null dataspace. @@ -137,7 +138,7 @@ PROGRAM main ! ! Get the name of the enumeration member. ! - CALL h5tenum_nameof_f( memtype, rdata(i,j), NAME_BUF_SIZE, name, hdferr) + CALL h5tenum_nameof_f( memtype, INT(rdata(i,j)), NAME_BUF_SIZE, name, hdferr) WRITE(*,'(" ",A6," ")', ADVANCE='NO') TRIM(NAME) ENDDO WRITE(*,'("]")') diff --git a/FORTRAN/H5T/test.sh.in b/FORTRAN/H5T/test.sh.in index 368b75aa..dc1f4349 100755 --- a/FORTRAN/H5T/test.sh.in +++ b/FORTRAN/H5T/test.sh.in @@ -6,12 +6,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. srcdir=@srcdir@ diff --git a/FORTRAN/Makefile.am b/FORTRAN/Makefile.am index 0048db07..778f8022 100644 --- a/FORTRAN/Makefile.am +++ b/FORTRAN/Makefile.am @@ -5,12 +5,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/JAVA/CMakeLists.txt b/JAVA/CMakeLists.txt index 5063e5b2..080e5055 100644 --- a/JAVA/CMakeLists.txt +++ b/JAVA/CMakeLists.txt @@ -1,6 +1,10 @@ cmake_minimum_required (VERSION 3.18) project (HDFJAVA_EXAMPLES Java) +set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES + "${HDFJAVA_LIB_DIR};${JAVA_INCLUDE_PATH};${JAVA_INCLUDE_PATH2}" +) + add_subdirectory (H5D) add_subdirectory (H5T) add_subdirectory (H5G) diff --git a/JAVA/H5D/CMakeLists.txt b/JAVA/H5D/CMakeLists.txt index 017830bc..d5b3b85a 100644 --- a/JAVA/H5D/CMakeLists.txt +++ b/JAVA/H5D/CMakeLists.txt @@ -3,11 +3,6 @@ project (HDF5Examples_JAVA_H5D Java) set (CMAKE_VERBOSE_MAKEFILE 1) -set_directory_properties(PROPERTIES - INCLUDE_DIRECTORIES - "${HDFJAVA_LIB_DIR};${JAVA_INCLUDE_PATH};${JAVA_INCLUDE_PATH2}" -) - #----------------------------------------------------------------------------- # Define Sources #----------------------------------------------------------------------------- diff --git a/JAVA/H5G/CMakeLists.txt b/JAVA/H5G/CMakeLists.txt index bd43f2a7..ac35a3a2 100644 --- a/JAVA/H5G/CMakeLists.txt +++ b/JAVA/H5G/CMakeLists.txt @@ -3,11 +3,6 @@ project (HDF5Examples_JAVA_GROUPS Java) set (CMAKE_VERBOSE_MAKEFILE 1) -set_directory_properties(PROPERTIES - INCLUDE_DIRECTORIES - "${HDFJAVA_LIB_DIR};${JAVA_INCLUDE_PATH};${JAVA_INCLUDE_PATH2}" -) - #----------------------------------------------------------------------------- # Define Sources #----------------------------------------------------------------------------- diff --git a/JAVA/H5J/110/HDF5FileStructure.java b/JAVA/H5J/110/HDF5FileStructure.java index cddad575..326d9492 100644 --- a/JAVA/H5J/110/HDF5FileStructure.java +++ b/JAVA/H5J/110/HDF5FileStructure.java @@ -5,12 +5,10 @@ * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * - * the files COPYING and Copyright.html. COPYING can be found at the root * - * of the source code distribution tree; Copyright.html can be found at the * - * root level of an installed copy of the electronic HDF5 document set and * - * is linked from the top-level documents page. It can also be found at * - * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * - * access to either file, you may request a copy from help@hdfgroup.org. * + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ import hdf.hdf5lib.H5; diff --git a/JAVA/H5J/CMakeLists.txt b/JAVA/H5J/CMakeLists.txt index 1bd28a18..6d155652 100644 --- a/JAVA/H5J/CMakeLists.txt +++ b/JAVA/H5J/CMakeLists.txt @@ -3,11 +3,6 @@ project (HDF5Examples_JAVA_INTRO Java) set (CMAKE_VERBOSE_MAKEFILE 1) -set_directory_properties(PROPERTIES - INCLUDE_DIRECTORIES - "${HDFJAVA_LIB_DIR};${JAVA_INCLUDE_PATH};${JAVA_INCLUDE_PATH2}" -) - #----------------------------------------------------------------------------- # Define Sources #----------------------------------------------------------------------------- diff --git a/JAVA/H5J/HDF5FileStructure.java b/JAVA/H5J/HDF5FileStructure.java index 8ea80a80..1370340b 100644 --- a/JAVA/H5J/HDF5FileStructure.java +++ b/JAVA/H5J/HDF5FileStructure.java @@ -5,12 +5,10 @@ * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * - * the files COPYING and Copyright.html. COPYING can be found at the root * - * of the source code distribution tree; Copyright.html can be found at the * - * root level of an installed copy of the electronic HDF5 document set and * - * is linked from the top-level documents page. It can also be found at * - * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * - * access to either file, you may request a copy from help@hdfgroup.org. * + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ import hdf.hdf5lib.H5; diff --git a/JAVA/H5J/HDF5SubsetSelect.java b/JAVA/H5J/HDF5SubsetSelect.java index a00f5bed..e302cd18 100644 --- a/JAVA/H5J/HDF5SubsetSelect.java +++ b/JAVA/H5J/HDF5SubsetSelect.java @@ -5,12 +5,10 @@ * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * - * the files COPYING and Copyright.html. COPYING can be found at the root * - * of the source code distribution tree; Copyright.html can be found at the * - * root level of an installed copy of the electronic HDF5 document set and * - * is linked from the top-level documents page. It can also be found at * - * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * - * access to either file, you may request a copy from help@hdfgroup.org. * + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ import hdf.hdf5lib.H5; diff --git a/JAVA/H5J/Makefile.am b/JAVA/H5J/Makefile.am index 6b615a2f..fedb82f9 100644 --- a/JAVA/H5J/Makefile.am +++ b/JAVA/H5J/Makefile.am @@ -4,12 +4,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. diff --git a/JAVA/H5J/runExample.sh.in b/JAVA/H5J/runExample.sh.in index 3a42b2b5..709613d1 100644 --- a/JAVA/H5J/runExample.sh.in +++ b/JAVA/H5J/runExample.sh.in @@ -6,12 +6,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. # top_builddir=@top_builddir@ diff --git a/JAVA/H5T/CMakeLists.txt b/JAVA/H5T/CMakeLists.txt index 1c06804b..d8c8220e 100644 --- a/JAVA/H5T/CMakeLists.txt +++ b/JAVA/H5T/CMakeLists.txt @@ -3,11 +3,6 @@ project (HDF5Examples_JAVA_H5T Java) set (CMAKE_VERBOSE_MAKEFILE 1) -set_directory_properties(PROPERTIES - INCLUDE_DIRECTORIES - "${HDFJAVA_LIB_DIR};${JAVA_INCLUDE_PATH};${JAVA_INCLUDE_PATH2}" -) - #----------------------------------------------------------------------------- # Define Sources #----------------------------------------------------------------------------- diff --git a/Makefile.am b/Makefile.am index 65d97ebd..d8019c95 100644 --- a/Makefile.am +++ b/Makefile.am @@ -5,15 +5,13 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. ## ## Makefile.am ## Run automake to generate a Makefile.in from this file. ## -SUBDIRS = C FORTRAN \ No newline at end of file +SUBDIRS = C FORTRAN diff --git a/README.md b/README.md index 9a658365..2f0090ba 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ HELP AND SUPPORT ---------------- Information regarding Help Desk and Support services is available at - https://portal.hdfgroup.org/display/support/The+HDF+Help+Desk + https://hdfgroup.atlassian.net/servicedesk/customer/portals @@ -48,7 +48,7 @@ HDF5 SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE -------------------------------------------- Full Documentation and Programming Resources for this HDF5 can be found at - https://portal.hdfgroup.org/display/HDF5 + https://portal.hdfgroup.org/documentation/index.html Periodically development code snapshots are provided at the following URL: @@ -56,7 +56,7 @@ Periodically development code snapshots are provided at the following URL: Source packages for current and previous releases are located at: - https://portal.hdfgroup.org/display/support/Downloads + https://portal.hdfgroup.org/downloads/ Development code is available at our Github location: diff --git a/Using_CMake.txt b/Using_CMake.txt index df761cb2..778fa753 100644 --- a/Using_CMake.txt +++ b/Using_CMake.txt @@ -30,7 +30,7 @@ I. Preconditions 1. We suggest you obtain the latest CMake for windows from the Kitware web site. The HDF5 product requires a minimum CMake version - of 3.12. + of 3.18. 2. You have installed the HDF5 library built with CMake, by executing the HDF Install Utility (the *.msi file in the binary package for @@ -45,7 +45,7 @@ I. Preconditions (Note there are no quote characters used on Windows and all platforms use forward slashes) - 4. Created separate source and build directories. + 4. Create separate source and build directories. (CMake commands are executed in the build directory) @@ -90,11 +90,11 @@ These steps are described in more detail below. * MinGW Makefiles * NMake Makefiles * Unix Makefiles - * Visual Studio 15 - * Visual Studio 15 Win64 - * Visual Studio 17 - * Visual Studio 17 Win64 - * Visual Studio 19 + * Visual Studio 15 2017 + * Visual Studio 15 2017 Win64 + * Visual Studio 16 2019 + * Visual Studio 17 2022 + is: * H5EX_BUILD_TESTING:BOOL=ON diff --git a/config/cmake-presets/hidden-presets.json b/config/cmake-presets/hidden-presets.json index 883b903a..590e7ec7 100644 --- a/config/cmake-presets/hidden-presets.json +++ b/config/cmake-presets/hidden-presets.json @@ -274,7 +274,7 @@ "execution": { "noTestsAction": "error", "timeout": 600, - "jobs": 8 + "jobs": 4 } }, { @@ -357,37 +357,25 @@ "configurePreset": "ci-x64-Release-MSVC", "hidden": true, "inherits": "ci-base", - "generators": [ - "ZIP" - ], "configurations": ["RelWithDebInfo"] }, { "name": "ci-x64-Release-Clang", "configurePreset": "ci-x64-Release-Clang", "hidden": true, - "inherits": "ci-base", - "generators": [ - "TGZ" - ] + "inherits": "ci-base" }, { "name": "ci-x64-Release-GNUC", "configurePreset": "ci-x64-Release-GNUC", "hidden": true, - "inherits": "ci-base", - "generators": [ - "TGZ" - ] + "inherits": "ci-base" }, { "name": "ci-x64-Release-Intel", "configurePreset": "ci-x64-Release-Intel", "hidden": true, - "inherits": "ci-base", - "generators": [ - "TGZ" - ] + "inherits": "ci-base" } ] } diff --git a/config/cmake/HDFExampleMacros.cmake b/config/cmake/HDFExampleMacros.cmake index 3bb1d48d..03e4bc0b 100644 --- a/config/cmake/HDFExampleMacros.cmake +++ b/config/cmake/HDFExampleMacros.cmake @@ -45,10 +45,10 @@ macro (BASIC_SETTINGS varname) #----------------------------------------------------------------------------- # Compiler specific flags : Shouldn't there be compiler tests for these #----------------------------------------------------------------------------- - if (CMAKE_COMPILER_IS_GNUCC) + if (CMAKE_C_COMPILER_ID STREQUAL "GNU") set (CMAKE_C_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_C_FLAGS}") endif () - if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_COMPILER_IS_GNUCXX) + if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_CXX_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_CXX_FLAGS}") endif () @@ -56,10 +56,10 @@ macro (BASIC_SETTINGS varname) # This is in here to help some of the GCC based IDES like Eclipse # and code blocks parse the compiler errors and warnings better. #----------------------------------------------------------------------------- - if (CMAKE_COMPILER_IS_GNUCC) + if (CMAKE_C_COMPILER_ID STREQUAL "GNU") set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fmessage-length=0") endif () - if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_COMPILER_IS_GNUCXX) + if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fmessage-length=0") endif () @@ -74,7 +74,7 @@ macro (BASIC_SETTINGS varname) set (HDF_WARNINGS_BLOCKED 1) string (REGEX REPLACE "(^| )([/-])W[0-9]( |$)" " " CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /w") - if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_COMPILER_IS_GNUCXX) + if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") string (REGEX REPLACE "(^| )([/-])W[0-9]( |$)" " " CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /w") endif () @@ -91,7 +91,7 @@ macro (BASIC_SETTINGS varname) # Most compilers use -w to suppress warnings. if (NOT HDF_WARNINGS_BLOCKED) set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w") - if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_COMPILER_IS_GNUCXX) + if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w") endif () endif () @@ -105,145 +105,214 @@ macro (BASIC_SETTINGS varname) ) endmacro () +macro (EXTERNAL_HDF5_LIBRARY compress_type) + set (HDF5_VERSION "1.15.0") + set (HDF5_VERSION_MAJOR "1.15") + set (HDF5LIB_TGZ_NAME "hdf5.tar.gz" CACHE STRING "Use HDF5LIB from compressed file" FORCE) + set (HDF5LIB_TGZ_ORIGPATH "https://github.com/HDFGroup/hdf5/releases/download/snapshot" CACHE STRING "Use HDF5LIB from original location" FORCE) + set (HDF5LIB_USE_LOCALCONTENT ON CACHE BOOL "Use local file for HDF5LIB FetchContent" FORCE) + set (HDF5LIB_GIT_URL "https://github.com/HDFGroup/hdf5.git" CACHE STRING "Use HDF5LIB from GitHub repository" FORCE) + set (HDF5LIB_GIT_BRANCH "develop" CACHE STRING "" FORCE) + if (NOT HDF5LIB_USE_LOCALCONTENT) + set (HDF5LIB_URL ${H5EXAMPLES_SOURCE_DIR}/${HDF5LIB_TGZ_NAME}) + else () + set (HDF5LIB_URL ${HDF5LIB_TGZ_ORIGPATH}/${HDF5LIB_TGZ_NAME}) + endif () + message (VERBOSE "Library HDF5LIB file is ${HDF5LIB_URL}") + if (${compress_type} MATCHES "GIT") + FetchContent_Declare (HDF5LIB + GIT_REPOSITORY ${HDF5LIB_GIT_URL} + GIT_TAG ${HDF5LIB_GIT_BRANCH} + ) + elseif (${compress_type} MATCHES "TGZ") + message (VERBOSE "Library HDF5LIB file ${HDF5LIB_URL}") + FetchContent_Declare (HDF5LIB + URL ${HDF5LIB_URL} + URL_HASH "" + ) + endif () + FetchContent_GetProperties(HDF5LIB) + if(NOT hdf5lib_POPULATED) + FetchContent_Populate(HDF5LIB) + + set (BUILD_SHARED_LIBS OFF CACHE BOOL "Build Shared Libraries" FORCE) + set (HDF5_BUILD_CPP_LIB OFF CACHE BOOL "Build C++ support" FORCE) + set (HDF5_BUILD_FORTRAN OFF CACHE BOOL "Build FORTRAN support" FORCE) + set (HDF5_BUILD_JAVA OFF CACHE BOOL "Build JAVA support" FORCE) + set (BUILD_TESTING OFF CACHE BOOL "Build JHDF5 Unit Testing" FORCE) + set (HDF5_BUILD_EXAMPLES OFF CACHE BOOL "Build JHDF5 Library Examples" FORCE) + set (HDF5_BUILD_HL_LIB OFF CACHE BOOL "Build JHDF5 HIGH Level HDF5 Library" FORCE) + set (HDF5_ENABLE_Z_LIB_SUPPORT OFF CACHE BOOL "Enable Zlib Filters" FORCE) + set (HDF5_ENABLE_SZIP_SUPPORT OFF CACHE BOOL "Use SZip Filter" FORCE) + + add_subdirectory(${hdf5lib_SOURCE_DIR} ${hdf5lib_BINARY_DIR}) + endif() + + add_library(${HDF_PACKAGE_NAMESPACE}hdf5lib-static ALIAS hdf5-static) + set (H5LIB_STATIC_LIBRARY "${HDF_PACKAGE_NAMESPACE}hdf5lib-static") + set (H5LIB_LIBRARIES ${H5LIB_STATIC_LIBRARY}) + set (H5LIB_TOOLS ${hdf5lib_BINARY_DIR}/bin}) + + set (H5LIB_INCLUDE_DIR_GEN "${hdf5lib_BINARY_DIR}/src") + set (H5LIB_INCLUDE_DIR "${hdf5lib_SOURCE_DIR}/src") + set (H5LIB_FOUND 1) + set (H5LIB_INCLUDE_DIRS ${H5LIB_INCLUDE_DIR_GEN} ${H5LIB_INCLUDE_DIR}) + message (STATUS "HDF5-${HDF5_VERSION} found: INC=${H5LIB_INCLUDE_DIRS} TOOLS=${H5LIB_TOOLS}") +endmacro () + macro (HDF5_SUPPORT) set (CMAKE_MODULE_PATH ${H5EX_RESOURCES_DIR} ${CMAKE_MODULE_PATH}) option (USE_SHARED_LIBS "Use Shared Libraries" ON) - if (NOT H5EX_HDF5_HEADER) - if (USE_SHARED_LIBS) - set (FIND_HDF_COMPONENTS C shared) - else () - set (FIND_HDF_COMPONENTS C static) - set (HDF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) - message (STATUS "Using static HDF5 - disable build of Java examples") - endif () - if (HDF_BUILD_FORTRAN) - set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Fortran) - endif () - if (HDF_BUILD_JAVA) - set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Java) - set (HDF5_Java_FOUND 1) #default setting for 1.10.1 and earlier - endif () - message (STATUS "HDF5 find comps: ${FIND_HDF_COMPONENTS}") - set (SEARCH_PACKAGE_NAME ${HDF5_PACKAGE_NAME}) - - find_package (HDF5 NAMES ${SEARCH_PACKAGE_NAME} COMPONENTS ${FIND_HDF_COMPONENTS}) - message (STATUS "HDF5 C libs:${HDF5_FOUND} static:${HDF5_static_C_FOUND} and shared:${HDF5_shared_C_FOUND}") - message (STATUS "HDF5 Fortran libs: static:${HDF5_static_Fortran_FOUND} and shared:${HDF5_shared_Fortran_FOUND}") - message (STATUS "HDF5 Java libs: ${HDF5_Java_FOUND}") - if (HDF5_FOUND) + if (HDF_FETCH_HDF5) + include (FetchContent) + EXTERNAL_HDF5_LIBRARY(GIT) + set (H5EX_HDF5_INCLUDE_DIRS ${H5LIB_INCLUDE_DIRS}) + set (H5EX_HDF5_HAVE_H5PUBCONF_H 1) + set (H5EX_HDF5_HAVE_HDF5 1) + set (H5EX_HDF5_HEADER "h5pubconf.h") + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${H5LIB_LIBRARIES}) + if (NOT TARGET h5dump) + add_executable (h5dump IMPORTED) + endif() + set (H5EX_HDF5_DUMP_EXECUTABLE $) + else () + if (NOT H5EX_HDF5_HEADER) if (USE_SHARED_LIBS) - if (NOT TARGET ${HDF5_NAMESPACE}h5dump-shared) - add_executable (${HDF5_NAMESPACE}h5dump-shared IMPORTED) - endif () - set (H5EX_HDF5_DUMP_EXECUTABLE $) + set (FIND_HDF_COMPONENTS C shared) else () - if (NOT TARGET ${HDF5_NAMESPACE}h5dump) - add_executable (${HDF5_NAMESPACE}h5dump IMPORTED) - endif() - set (H5EX_HDF5_DUMP_EXECUTABLE $) - endif() + set (FIND_HDF_COMPONENTS C static) + set (HDF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) + message (STATUS "Using static HDF5 - disable build of Java examples") + endif () + if (HDF_BUILD_FORTRAN) + set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Fortran) + endif () + if (HDF_BUILD_JAVA) + set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Java) + set (HDF5_Java_FOUND 1) #default setting for 1.10.1 and earlier + endif () + message (STATUS "HDF5 find comps: ${FIND_HDF_COMPONENTS}") + set (SEARCH_PACKAGE_NAME ${HDF5_PACKAGE_NAME}) - if (NOT HDF5_static_C_FOUND AND NOT HDF5_shared_C_FOUND) - #find library from non-dual-binary package - set (FIND_HDF_COMPONENTS C) - if (HDF_BUILD_FORTRAN) - set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Fortran) - endif () - if (HDF_BUILD_JAVA) - set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Java) - endif () - message (STATUS "HDF5 find comps: ${FIND_HDF_COMPONENTS}") + find_package (HDF5 NAMES ${SEARCH_PACKAGE_NAME} COMPONENTS ${FIND_HDF_COMPONENTS}) + message (STATUS "HDF5 C libs:${HDF5_FOUND} static:${HDF5_static_C_FOUND} and shared:${HDF5_shared_C_FOUND}") + message (STATUS "HDF5 Fortran libs: static:${HDF5_static_Fortran_FOUND} and shared:${HDF5_shared_Fortran_FOUND}") + message (STATUS "HDF5 Java libs: ${HDF5_Java_FOUND}") + if (HDF5_FOUND) + if (NOT HDF5_static_C_FOUND AND NOT HDF5_shared_C_FOUND) + #find library from non-dual-binary package + set (FIND_HDF_COMPONENTS C) + if (HDF_BUILD_FORTRAN) + set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Fortran) + endif () + if (HDF_BUILD_JAVA) + set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Java) + endif () + message (STATUS "HDF5 find comps: ${FIND_HDF_COMPONENTS}") - find_package (HDF5 NAMES ${SEARCH_PACKAGE_NAME} COMPONENTS ${FIND_HDF_COMPONENTS}) - message (STATUS "HDF5 libs:${HDF5_FOUND} C:${HDF5_C_FOUND} Fortran:${HDF5_Fortran_FOUND} Java:${HDF5_Java_FOUND}") - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_LIBRARIES}) - if (HDF5_BUILD_SHARED_LIBS) - add_definitions (-DH5_BUILT_AS_DYNAMIC_LIB) - else () - add_definitions (-DH5_BUILT_AS_STATIC_LIB) - endif () - if (USE_SHARED_LIBS AND WIN32) - set_property (TARGET ${HDF5_NAMESPACE}h5dump PROPERTY IMPORTED_LOCATION "${HDF5_TOOLS_DIR}/h5dumpdll") - else () - set_property (TARGET ${HDF5_NAMESPACE}h5dump PROPERTY IMPORTED_LOCATION "${HDF5_TOOLS_DIR}/h5dump") - endif () - if (HDF_BUILD_JAVA) - set (CMAKE_JAVA_INCLUDE_PATH "${CMAKE_JAVA_INCLUDE_PATH};${HDF5_JAVA_INCLUDE_DIRS}") - message (STATUS "HDF5 jars:${HDF5_JAVA_INCLUDE_DIRS}") - endif () - set (H5EX_HDF5_DUMP_EXECUTABLE $) - else () - if (USE_SHARED_LIBS AND HDF5_shared_C_FOUND) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_C_SHARED_LIBRARY}) - set (HDF5_LIBRARY_PATH ${PACKAGE_PREFIX_DIR}/lib) - set_property (TARGET ${HDF5_NAMESPACE}h5dump-shared PROPERTY IMPORTED_LOCATION "${HDF5_TOOLS_DIR}/h5dump-shared") + find_package (HDF5 NAMES ${SEARCH_PACKAGE_NAME} COMPONENTS ${FIND_HDF_COMPONENTS}) + message (STATUS "HDF5 libs:${HDF5_FOUND} C:${HDF5_C_FOUND} Fortran:${HDF5_Fortran_FOUND} Java:${HDF5_Java_FOUND}") + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_LIBRARIES}) + if (HDF5_BUILD_SHARED_LIBS) + add_definitions (-DH5_BUILT_AS_DYNAMIC_LIB) + else () + add_definitions (-DH5_BUILT_AS_STATIC_LIB) + endif () + if (USE_SHARED_LIBS AND WIN32) + set_property (TARGET ${HDF5_NAMESPACE}h5dump PROPERTY IMPORTED_LOCATION "${HDF5_TOOLS_DIR}/h5dumpdll") + else () + set_property (TARGET ${HDF5_NAMESPACE}h5dump PROPERTY IMPORTED_LOCATION "${HDF5_TOOLS_DIR}/h5dump") + endif () + if (HDF_BUILD_JAVA) + set (CMAKE_JAVA_INCLUDE_PATH "${CMAKE_JAVA_INCLUDE_PATH};${HDF5_JAVA_INCLUDE_DIRS}") + message (STATUS "HDF5 jars:${HDF5_JAVA_INCLUDE_DIRS}") + endif () + set (H5EX_HDF5_DUMP_EXECUTABLE $) else () - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_C_STATIC_LIBRARY}) - set_property (TARGET ${HDF5_NAMESPACE}h5dump PROPERTY IMPORTED_LOCATION "${HDF5_TOOLS_DIR}/h5dump") - endif () - if (HDF_BUILD_FORTRAN AND ${HDF5_BUILD_FORTRAN}) - if (BUILD_SHARED_LIBS AND HDF5_shared_Fortran_FOUND) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_FORTRAN_SHARED_LIBRARY}) - elseif (HDF5_static_Fortran_FOUND) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_FORTRAN_STATIC_LIBRARY}) + if (USE_SHARED_LIBS AND HDF5_shared_C_FOUND) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_C_SHARED_LIBRARY}) + set (HDF5_LIBRARY_PATH ${PACKAGE_PREFIX_DIR}/lib) + else () + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_C_STATIC_LIBRARY}) + endif () + if (HDF5_VERSION VERSION_LESS "1.14.4" AND NOT HDF5_shared_C_FOUND) + if (NOT TARGET ${HDF5_NAMESPACE}h5dump-shared) + add_executable (${HDF5_NAMESPACE}h5dump-shared IMPORTED) + endif () + set (H5EX_HDF5_DUMP_EXECUTABLE $) else () + if (NOT TARGET ${HDF5_NAMESPACE}h5dump) + add_executable (${HDF5_NAMESPACE}h5dump IMPORTED) + endif() + set (H5EX_HDF5_DUMP_EXECUTABLE $) + endif() + + if (NOT HDF5_static_Fortran_FOUND AND NOT HDF5_shared_Fortran_FOUND) set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build FORTRAN support" FORCE) message (STATUS "HDF5 Fortran libs not found - disable build of Fortran examples") + else () + if (HDF_BUILD_FORTRAN AND ${HDF5_BUILD_FORTRAN}) + if (BUILD_SHARED_LIBS AND HDF5_shared_Fortran_FOUND) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_FORTRAN_SHARED_LIBRARY}) + elseif (HDF5_static_Fortran_FOUND) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_FORTRAN_STATIC_LIBRARY}) + else () + set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build FORTRAN support" FORCE) + message (STATUS "HDF5 Fortran libs not found - disable build of Fortran examples") + endif () + endif () endif () - else () - set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build FORTRAN support" FORCE) - message (STATUS "HDF5 Fortran libs not found - disable build of Fortran examples") - endif () - if (HDF_BUILD_JAVA) - if (${HDF5_BUILD_JAVA} AND HDF5_Java_FOUND) - set (CMAKE_JAVA_INCLUDE_PATH "${CMAKE_JAVA_INCLUDE_PATH};${HDF5_JAVA_INCLUDE_DIRS}") - set (H5EX_JAVA_LIBRARY ${HDF5_JAVA_LIBRARY}) - set (H5EX_JAVA_LIBRARIES ${HDF5_JAVA_LIBRARY}) - message (STATUS "HDF5 lib:${H5EX_JAVA_LIBRARY} jars:${HDF5_JAVA_INCLUDE_DIRS}}") + if (HDF_BUILD_JAVA AND HDF5_Java_FOUND) + if (${HDF5_BUILD_JAVA}) + set (CMAKE_JAVA_INCLUDE_PATH "${CMAKE_JAVA_INCLUDE_PATH};${HDF5_JAVA_INCLUDE_DIRS}") + set (H5EX_JAVA_LIBRARY ${HDF5_JAVA_LIBRARY}) + set (H5EX_JAVA_LIBRARIES ${HDF5_JAVA_LIBRARY}) + message (STATUS "HDF5 lib:${H5EX_JAVA_LIBRARY} jars:${HDF5_JAVA_INCLUDE_DIRS}}") + else () + set (HDF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) + message (STATUS "HDF5 Java libs not found - disable build of Java examples") + endif () else () set (HDF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) - message (STATUS "HDF5 Java libs not found - disable build of Java examples") endif () - else () - set (HDF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) endif () - endif () - else () - find_package (HDF5) # Legacy find - #Legacy find_package does not set HDF5_TOOLS_DIR, so we set it here - set (HDF5_TOOLS_DIR ${HDF5_LIBRARY_DIRS}/../bin) - #Legacy find_package does not set HDF5_BUILD_SHARED_LIBS, so we set it here - if (USE_SHARED_LIBS AND EXISTS "${HDF5_LIBRARY_DIRS}/libhdf5.so") - set (HDF5_BUILD_SHARED_LIBS 1) else () - set (HDF5_BUILD_SHARED_LIBS 0) + find_package (HDF5) # Legacy find + #Legacy find_package does not set HDF5_TOOLS_DIR, so we set it here + set (HDF5_TOOLS_DIR ${HDF5_LIBRARY_DIRS}/../bin) + #Legacy find_package does not set HDF5_BUILD_SHARED_LIBS, so we set it here + if (USE_SHARED_LIBS AND EXISTS "${HDF5_LIBRARY_DIRS}/libhdf5.so") + set (HDF5_BUILD_SHARED_LIBS 1) + else () + set (HDF5_BUILD_SHARED_LIBS 0) + endif () + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_LIBRARIES}) + add_executable (${HDF5_NAMESPACE}h5dump IMPORTED) + set_property (TARGET ${HDF5_NAMESPACE}h5dump PROPERTY IMPORTED_LOCATION "${HDF5_TOOLS_DIR}/h5dump") + set (H5EX_HDF5_DUMP_EXECUTABLE $) endif () - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_LIBRARIES}) - add_executable (${HDF5_NAMESPACE}h5dump IMPORTED) - set_property (TARGET ${HDF5_NAMESPACE}h5dump PROPERTY IMPORTED_LOCATION "${HDF5_TOOLS_DIR}/h5dump") - set (H5EX_HDF5_DUMP_EXECUTABLE $) - endif () - set (HDF5_PACKAGE_NAME ${SEARCH_PACKAGE_NAME}) + set (HDF5_PACKAGE_NAME ${SEARCH_PACKAGE_NAME}) - if (HDF5_FOUND) - set (H5EX_HDF5_INCLUDE_DIRS ${HDF5_INCLUDE_DIR}) + if (HDF5_FOUND) + set (H5EX_HDF5_INCLUDE_DIRS ${HDF5_INCLUDE_DIR}) + set (H5EX_HDF5_HAVE_H5PUBCONF_H 1) + set (H5EX_HDF5_HAVE_HDF5 1) + set (H5EX_HDF5_HEADER "h5pubconf.h") + message (STATUS "HDF5-${HDF5_VERSION_STRING} found: INC=${HDF5_INCLUDE_DIR} TOOLS=${HDF5_TOOLS_DIR}") + else () + message (FATAL_ERROR " HDF5 is Required for HDF5 Examples") + endif () + else () + # This project is being called from within another and HDF5 is already configured set (H5EX_HDF5_HAVE_H5PUBCONF_H 1) set (H5EX_HDF5_HAVE_HDF5 1) - set (H5EX_HDF5_HEADER "h5pubconf.h") - message (STATUS "HDF5-${HDF5_VERSION_STRING} found: INC=${HDF5_INCLUDE_DIR} TOOLS=${HDF5_TOOLS_DIR}") - else () - message (FATAL_ERROR " HDF5 is Required for HDF5 Examples") + message (STATUS "HDF5-${HDF5_VERSION_STRING} used") + endif () + if (HDF_BUILD_FORTRAN) + list (APPEND H5EX_HDF5_INCLUDE_DIRS ${HDF5_INCLUDE_DIR_FORTRAN}) endif () - else () - # This project is being called from within another and HDF5 is already configured - set (H5EX_HDF5_HAVE_H5PUBCONF_H 1) - set (H5EX_HDF5_HAVE_HDF5 1) - message (STATUS "HDF5-${HDF5_VERSION_STRING} used") - endif () - if (HDF_BUILD_FORTRAN) - list (APPEND H5EX_HDF5_INCLUDE_DIRS ${HDF5_INCLUDE_DIR_FORTRAN}) endif () message (STATUS "HDF5 link libs: ${H5EX_HDF5_LINK_LIBS} Includes: ${H5EX_HDF5_INCLUDE_DIRS}") endmacro () diff --git a/config/cmake/HDFMacros.cmake b/config/cmake/HDFMacros.cmake index 66a25aab..b9ef2df0 100644 --- a/config/cmake/HDFMacros.cmake +++ b/config/cmake/HDFMacros.cmake @@ -90,7 +90,7 @@ macro (HDFTEST_COPY_FILE src dest target) endmacro () macro (HDF_DIR_PATHS package_prefix) - option (H5EX_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables, FALSE to use historical settings" FALSE) + option (H5EX_USE_GNU_DIRS "ON to use GNU Coding Standard install directory variables, OFF to use historical settings" OFF) if (H5EX_USE_GNU_DIRS) include(GNUInstallDirs) if (NOT ${package_prefix}_INSTALL_BIN_DIR) @@ -121,7 +121,7 @@ macro (HDF_DIR_PATHS package_prefix) endif () if (APPLE) - option (${package_prefix}_BUILD_FRAMEWORKS "TRUE to build as frameworks libraries, FALSE to build according to BUILD_SHARED_LIBS" FALSE) + option (${package_prefix}_BUILD_FRAMEWORKS "ON to build as frameworks libraries, OFF to build according to BUILD_SHARED_LIBS" OFF) endif () if (NOT ${package_prefix}_INSTALL_BIN_DIR) @@ -170,10 +170,10 @@ macro (HDF_DIR_PATHS package_prefix) message(STATUS "Final: ${${package_prefix}_INSTALL_DOC_DIR}") # Always use full RPATH, i.e. don't skip the full RPATH for the build tree - set (CMAKE_SKIP_BUILD_RPATH FALSE) + set (CMAKE_SKIP_BUILD_RPATH OFF) # when building, don't use the install RPATH already # (but later on when installing) - set (CMAKE_INSTALL_RPATH_USE_LINK_PATH FALSE) + set (CMAKE_INSTALL_RPATH_USE_LINK_PATH OFF) # add the automatically determined parts of the RPATH # which point to directories outside the build tree to the install RPATH set (CMAKE_BUILD_WITH_INSTALL_RPATH ON) diff --git a/config/cmake/grepTest.cmake b/config/cmake/grepTest.cmake index b91c4a8b..4031a1bd 100644 --- a/config/cmake/grepTest.cmake +++ b/config/cmake/grepTest.cmake @@ -51,9 +51,7 @@ endif () if (TEST_ENV_VAR) set (ENV{${TEST_ENV_VAR}} "${TEST_ENV_VALUE}") - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (TRACE "ENV:${TEST_ENV_VAR}=$ENV{${TEST_ENV_VAR}}") - endif () + message (TRACE "ENV:${TEST_ENV_VAR}=$ENV{${TEST_ENV_VAR}}") endif () # run the test program, capture the stdout/stderr and the result var @@ -72,11 +70,18 @@ message (STATUS "COMMAND Result: ${TEST_RESULT}") message (STATUS "COMMAND Error: ${TEST_ERROR}") # remove special output -file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM) -string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT) -if (TEST_FIND_RESULT GREATER -1) - string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}") - file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM}) +if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}") + file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM) + string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT) + if (TEST_FIND_RESULT GREATER -1) + string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}") + file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}") + endif () + string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT) + if (TEST_FIND_RESULT GREATER -1) + string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}") + file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}") + endif () endif () # if the TEST_ERRREF exists grep the error output with the error reference @@ -114,7 +119,7 @@ if (TEST_ERRREF) if (NOT TEST_SORT_COMPARE) # now compare the output with the reference execute_process ( - COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_FOLDER}/${TEST_REFERENCE} + COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_FOLDER}/${TEST_REFERENCE} RESULT_VARIABLE TEST_COMPARE_RESULT ) else () diff --git a/config/cmake/jrunTest.cmake b/config/cmake/jrunTest.cmake index c0eaff33..d7d83d4f 100644 --- a/config/cmake/jrunTest.cmake +++ b/config/cmake/jrunTest.cmake @@ -1,3 +1,14 @@ +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. +# # jrunTest.cmake executes a command and captures the output in a file. File is then compared # against a reference file. Exit status of command can also be compared. cmake_policy(SET CMP0007 NEW) @@ -139,7 +150,7 @@ if (NOT TEST_SKIP_COMPARE) if (NOT TEST_SORT_COMPARE) # now compare the output with the reference execute_process ( - COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_FOLDER}/${TEST_REFERENCE} + COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_FOLDER}/${TEST_REFERENCE} RESULT_VARIABLE TEST_COMPARE_RESULT ) else () @@ -209,7 +220,7 @@ if (NOT TEST_SKIP_COMPARE) # now compare the error output with the error reference execute_process ( - COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${TEST_FOLDER}/${TEST_OUTPUT}.err ${TEST_FOLDER}/${TEST_ERRREF} + COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${TEST_FOLDER}/${TEST_OUTPUT}.err ${TEST_FOLDER}/${TEST_ERRREF} RESULT_VARIABLE TEST_ERRREF_RESULT ) if (TEST_ERRREF_RESULT) diff --git a/config/cmake/runTest.cmake b/config/cmake/runTest.cmake index b8abe921..cc433b1e 100644 --- a/config/cmake/runTest.cmake +++ b/config/cmake/runTest.cmake @@ -133,11 +133,18 @@ endif () message (STATUS "COMMAND Error: ${TEST_ERROR}") # remove special output -file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM) -string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT) -if (TEST_FIND_RESULT GREATER -1) - string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}") - file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM}) +if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}") + file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM) + string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT) + if (TEST_FIND_RESULT GREATER -1) + string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}") + file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}") + endif () + string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT) + if (TEST_FIND_RESULT GREATER -1) + string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}") + file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}") + endif () endif () # remove special error output @@ -148,7 +155,7 @@ else () # the error stack remains in the .err file file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM) endif () -string (FIND TEST_STREAM "no version information available" TEST_FIND_RESULT) +string (FIND "${TEST_STREAM}" "no version information available" TEST_FIND_RESULT) if (TEST_FIND_RESULT GREATER -1) string (REGEX REPLACE "^.*no version information available[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}") # write back the changes to the original files @@ -160,7 +167,7 @@ if (TEST_FIND_RESULT GREATER -1) endif () # if the output file needs Storage text removed -if (TEST_MASK) +if (TEST_MASK_STORE) file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM) string (REGEX REPLACE "Storage:[^\n]+\n" "Storage:
\n" TEST_STREAM "${TEST_STREAM}") file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}") @@ -198,6 +205,13 @@ if (TEST_MASK_ERROR) endif () # remove text from the output file +if (TEST_MASK) + file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM) + string (REGEX REPLACE "${TEST_MASK}" "" TEST_STREAM "${TEST_STREAM}") + file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}") +endif () + +# replace text from the output file if (TEST_FILTER) file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM) string (REGEX REPLACE "${TEST_FILTER}" "${TEST_FILTER_REPLACE}" TEST_STREAM "${TEST_STREAM}") @@ -218,19 +232,11 @@ if (NOT TEST_SKIP_COMPARE) file (READ ${TEST_FOLDER}/${TEST_REFERENCE} TEST_STREAM) list (LENGTH TEST_STREAM test_len) if (test_len GREATER 0) - if (WIN32) - configure_file(${TEST_FOLDER}/${TEST_REFERENCE} ${TEST_FOLDER}/${TEST_REFERENCE}.tmp NEWLINE_STYLE CRLF) - if (EXISTS "${TEST_FOLDER}/${TEST_REFERENCE}.tmp") - file(RENAME ${TEST_FOLDER}/${TEST_REFERENCE}.tmp ${TEST_FOLDER}/${TEST_REFERENCE}) - endif () - #file (READ ${TEST_FOLDER}/${TEST_REFERENCE} TEST_STREAM) - #file (WRITE ${TEST_FOLDER}/${TEST_REFERENCE} "${TEST_STREAM}") - endif () if (NOT TEST_SORT_COMPARE) # now compare the output with the reference execute_process ( - COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_FOLDER}/${TEST_REFERENCE} + COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_FOLDER}/${TEST_REFERENCE} RESULT_VARIABLE TEST_COMPARE_RESULT ) else () @@ -293,18 +299,10 @@ if (NOT TEST_SKIP_COMPARE) file (READ ${TEST_FOLDER}/${TEST_ERRREF} TEST_STREAM) list (LENGTH TEST_STREAM test_len) if (test_len GREATER 0) - if (WIN32) - configure_file(${TEST_FOLDER}/${TEST_ERRREF} ${TEST_FOLDER}/${TEST_ERRREF}.tmp NEWLINE_STYLE CRLF) - if (EXISTS "${TEST_FOLDER}/${TEST_ERRREF}.tmp") - file(RENAME ${TEST_FOLDER}/${TEST_ERRREF}.tmp ${TEST_FOLDER}/${TEST_ERRREF}) - endif () - #file (READ ${TEST_FOLDER}/${TEST_ERRREF} TEST_STREAM) - #file (WRITE ${TEST_FOLDER}/${TEST_ERRREF} "${TEST_STREAM}") - endif () # now compare the error output with the error reference execute_process ( - COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${TEST_FOLDER}/${TEST_OUTPUT}.err ${TEST_FOLDER}/${TEST_ERRREF} + COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${TEST_FOLDER}/${TEST_OUTPUT}.err ${TEST_FOLDER}/${TEST_ERRREF} RESULT_VARIABLE TEST_ERRREF_RESULT ) if (TEST_ERRREF_RESULT) diff --git a/configure.ac b/configure.ac index 80f0b683..babfdecc 100644 --- a/configure.ac +++ b/configure.ac @@ -7,12 +7,10 @@ # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. AC_PREREQ(2.69) AC_INIT(HDF5-examples, 0.1, help@hdfgroup.org)