1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461
|
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
#include "H5_api_test_util.h"
#include "H5_api_test_parallel.h"
#include "H5_api_attribute_test_parallel.h"
#include "H5_api_dataset_test_parallel.h"
#include "H5_api_datatype_test_parallel.h"
#include "H5_api_file_test_parallel.h"
#include "H5_api_group_test_parallel.h"
#include "H5_api_link_test_parallel.h"
#include "H5_api_misc_test_parallel.h"
#include "H5_api_object_test_parallel.h"
#ifdef H5_API_TEST_HAVE_ASYNC
#include "H5_api_async_test_parallel.h"
#endif
char H5_api_test_parallel_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
const char *test_path_prefix;
size_t n_tests_run_g;
size_t n_tests_passed_g;
size_t n_tests_failed_g;
size_t n_tests_skipped_g;
int mpi_size;
int mpi_rank;
/* X-macro to define the following for each test:
* - enum type
* - name
* - test function
* - enabled by default
*/
#ifdef H5_API_TEST_HAVE_ASYNC
#define H5_API_PARALLEL_TESTS \
X(H5_API_TEST_NULL, "", NULL, 0) \
X(H5_API_TEST_FILE, "file", H5_api_file_test_parallel, 1) \
X(H5_API_TEST_GROUP, "group", H5_api_group_test_parallel, 1) \
X(H5_API_TEST_DATASET, "dataset", H5_api_dataset_test_parallel, 1) \
X(H5_API_TEST_DATATYPE, "datatype", H5_api_datatype_test_parallel, 1) \
X(H5_API_TEST_ATTRIBUTE, "attribute", H5_api_attribute_test_parallel, 1) \
X(H5_API_TEST_LINK, "link", H5_api_link_test_parallel, 1) \
X(H5_API_TEST_OBJECT, "object", H5_api_object_test_parallel, 1) \
X(H5_API_TEST_MISC, "misc", H5_api_misc_test_parallel, 1) \
X(H5_API_TEST_ASYNC, "async", H5_api_async_test_parallel, 1) \
X(H5_API_TEST_MAX, "", NULL, 0)
#else
#define H5_API_PARALLEL_TESTS \
X(H5_API_TEST_NULL, "", NULL, 0) \
X(H5_API_TEST_FILE, "file", H5_api_file_test_parallel, 1) \
X(H5_API_TEST_GROUP, "group", H5_api_group_test_parallel, 1) \
X(H5_API_TEST_DATASET, "dataset", H5_api_dataset_test_parallel, 1) \
X(H5_API_TEST_DATATYPE, "datatype", H5_api_datatype_test_parallel, 1) \
X(H5_API_TEST_ATTRIBUTE, "attribute", H5_api_attribute_test_parallel, 1) \
X(H5_API_TEST_LINK, "link", H5_api_link_test_parallel, 1) \
X(H5_API_TEST_OBJECT, "object", H5_api_object_test_parallel, 1) \
X(H5_API_TEST_MISC, "misc", H5_api_misc_test_parallel, 1) \
X(H5_API_TEST_MAX, "", NULL, 0)
#endif
#define X(a, b, c, d) a,
enum H5_api_test_type { H5_API_PARALLEL_TESTS };
#undef X
#define X(a, b, c, d) b,
static const char *const H5_api_test_name[] = {H5_API_PARALLEL_TESTS};
#undef X
#define X(a, b, c, d) c,
static int (*H5_api_test_func[])(void) = {H5_API_PARALLEL_TESTS};
#undef X
#define X(a, b, c, d) d,
static int H5_api_test_enabled[] = {H5_API_PARALLEL_TESTS};
#undef X
static enum H5_api_test_type
H5_api_test_name_to_type(const char *test_name)
{
enum H5_api_test_type i = 0;
while (strcmp(H5_api_test_name[i], test_name) && i != H5_API_TEST_MAX)
i++;
return ((i == H5_API_TEST_MAX) ? H5_API_TEST_NULL : i);
}
static void
H5_api_test_run(void)
{
enum H5_api_test_type i;
for (i = H5_API_TEST_FILE; i < H5_API_TEST_MAX; i++)
if (H5_api_test_enabled[i])
(void)H5_api_test_func[i]();
}
hid_t
create_mpi_fapl(MPI_Comm comm, MPI_Info info, bool coll_md_read)
{
hid_t ret_pl = H5I_INVALID_HID;
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
if ((ret_pl = H5Pcreate(H5P_FILE_ACCESS)) < 0)
goto error;
if (H5Pset_fapl_mpio(ret_pl, comm, info) < 0)
goto error;
if (H5Pset_all_coll_metadata_ops(ret_pl, coll_md_read) < 0)
goto error;
if (H5Pset_coll_metadata_write(ret_pl, true) < 0)
goto error;
return ret_pl;
error:
return H5I_INVALID_HID;
} /* end create_mpi_fapl() */
/*
* Generates random dimensions for a dataspace. The first dimension
* is always `mpi_size` to allow for convenient subsetting; the rest
* of the dimensions are randomized.
*/
int
generate_random_parallel_dimensions(int space_rank, hsize_t **dims_out)
{
hsize_t *dims = NULL;
size_t i;
if (space_rank <= 0)
goto error;
if (NULL == (dims = malloc((size_t)space_rank * sizeof(hsize_t))))
goto error;
if (MAINPROCESS) {
for (i = 0; i < (size_t)space_rank; i++) {
if (i == 0)
dims[i] = (hsize_t)mpi_size;
else
dims[i] = (hsize_t)((rand() % MAX_DIM_SIZE) + 1);
}
}
/*
* Ensure that the dataset dimensions are uniform across ranks.
*/
if (MPI_SUCCESS != MPI_Bcast(dims, space_rank, MPI_UNSIGNED_LONG_LONG, 0, MPI_COMM_WORLD))
goto error;
*dims_out = dims;
return 0;
error:
if (dims)
free(dims);
return -1;
}
int
main(int argc, char **argv)
{
const char *vol_connector_string;
const char *vol_connector_name;
unsigned seed;
hid_t fapl_id = H5I_INVALID_HID;
hid_t default_con_id = H5I_INVALID_HID;
hid_t registered_con_id = H5I_INVALID_HID;
char *vol_connector_string_copy = NULL;
char *vol_connector_info = NULL;
int required = MPI_THREAD_MULTIPLE;
int provided;
/*
* Attempt to initialize with MPI_THREAD_MULTIPLE for VOL connectors
* that require that level of threading support in MPI
*/
if (MPI_SUCCESS != MPI_Init_thread(&argc, &argv, required, &provided)) {
fprintf(stderr, "MPI_Init_thread failed\n");
exit(EXIT_FAILURE);
}
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
if (provided < required) {
if (MAINPROCESS)
printf("** INFO: couldn't initialize with MPI_THREAD_MULTIPLE threading support **\n");
}
/* Simple argument checking, TODO can improve that later */
if (argc > 1) {
enum H5_api_test_type i = H5_api_test_name_to_type(argv[1]);
if (i != H5_API_TEST_NULL) {
/* Run only specific API test */
memset(H5_api_test_enabled, 0, sizeof(H5_api_test_enabled));
H5_api_test_enabled[i] = 1;
}
}
/*
* Make sure that HDF5 is initialized on all MPI ranks before proceeding.
* This is important for certain VOL connectors which may require a
* collective initialization.
*/
H5open();
n_tests_run_g = 0;
n_tests_passed_g = 0;
n_tests_failed_g = 0;
n_tests_skipped_g = 0;
if (MAINPROCESS) {
seed = (unsigned)time(NULL);
}
if (mpi_size > 1) {
if (MPI_SUCCESS != MPI_Bcast(&seed, 1, MPI_UNSIGNED, 0, MPI_COMM_WORLD)) {
if (MAINPROCESS)
fprintf(stderr, "Couldn't broadcast test seed\n");
goto error;
}
}
srand(seed);
if (NULL == (test_path_prefix = getenv(HDF5_API_TEST_PATH_PREFIX)))
test_path_prefix = "";
snprintf(H5_api_test_parallel_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s%s", test_path_prefix,
PARALLEL_TEST_FILE_NAME);
if (NULL == (vol_connector_string = getenv(HDF5_VOL_CONNECTOR))) {
if (MAINPROCESS)
printf("No VOL connector selected; using native VOL connector\n");
vol_connector_name = "native";
vol_connector_info = NULL;
}
else {
char *token = NULL;
BEGIN_INDEPENDENT_OP(copy_connector_string)
{
if (NULL == (vol_connector_string_copy = strdup(vol_connector_string))) {
if (MAINPROCESS)
fprintf(stderr, "Unable to copy VOL connector string\n");
INDEPENDENT_OP_ERROR(copy_connector_string);
}
}
END_INDEPENDENT_OP(copy_connector_string);
BEGIN_INDEPENDENT_OP(get_connector_name)
{
if (NULL == (token = strtok(vol_connector_string_copy, " "))) {
if (MAINPROCESS)
fprintf(stderr, "Error while parsing VOL connector string\n");
INDEPENDENT_OP_ERROR(get_connector_name);
}
}
END_INDEPENDENT_OP(get_connector_name);
vol_connector_name = token;
if (NULL != (token = strtok(NULL, " "))) {
vol_connector_info = token;
}
}
if (MAINPROCESS) {
printf("Running parallel API tests with VOL connector '%s' and info string '%s'\n\n",
vol_connector_name, vol_connector_info ? vol_connector_info : "");
printf("Test parameters:\n");
printf(" - Test file name: '%s'\n", H5_api_test_parallel_filename);
printf(" - Number of MPI ranks: %d\n", mpi_size);
printf(" - Test seed: %u\n", seed);
printf("\n\n");
}
BEGIN_INDEPENDENT_OP(create_fapl)
{
if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, false)) < 0) {
if (MAINPROCESS)
fprintf(stderr, "Unable to create FAPL\n");
INDEPENDENT_OP_ERROR(create_fapl);
}
}
END_INDEPENDENT_OP(create_fapl);
BEGIN_INDEPENDENT_OP(check_vol_register)
{
/*
* If using a VOL connector other than the native
* connector, check whether the VOL connector was
* successfully registered before running the tests.
* Otherwise, HDF5 will default to running the tests
* with the native connector, which could be misleading.
*/
if (0 != strcmp(vol_connector_name, "native")) {
htri_t is_registered;
if ((is_registered = H5VLis_connector_registered_by_name(vol_connector_name)) < 0) {
if (MAINPROCESS)
fprintf(stderr, "Unable to determine if VOL connector is registered\n");
INDEPENDENT_OP_ERROR(check_vol_register);
}
if (!is_registered) {
if (MAINPROCESS)
fprintf(stderr, "Specified VOL connector '%s' wasn't correctly registered!\n",
vol_connector_name);
INDEPENDENT_OP_ERROR(check_vol_register);
}
else {
/*
* If the connector was successfully registered, check that
* the connector ID set on the default FAPL matches the ID
* for the registered connector before running the tests.
*/
if (H5Pget_vol_id(fapl_id, &default_con_id) < 0) {
if (MAINPROCESS)
fprintf(stderr, "Couldn't retrieve ID of VOL connector set on default FAPL\n");
INDEPENDENT_OP_ERROR(check_vol_register);
}
if ((registered_con_id = H5VLget_connector_id_by_name(vol_connector_name)) < 0) {
if (MAINPROCESS)
fprintf(stderr, "Couldn't retrieve ID of registered VOL connector\n");
INDEPENDENT_OP_ERROR(check_vol_register);
}
if (default_con_id != registered_con_id) {
if (MAINPROCESS)
fprintf(stderr,
"VOL connector set on default FAPL didn't match specified VOL connector\n");
INDEPENDENT_OP_ERROR(check_vol_register);
}
}
}
}
END_INDEPENDENT_OP(check_vol_register);
/* Retrieve the VOL cap flags - work around an HDF5
* library issue by creating a FAPL
*/
BEGIN_INDEPENDENT_OP(get_capability_flags)
{
vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0) {
if (MAINPROCESS)
fprintf(stderr, "Unable to retrieve VOL connector capability flags\n");
INDEPENDENT_OP_ERROR(get_capability_flags);
}
}
END_INDEPENDENT_OP(get_capability_flags);
/*
* Create the file that will be used for all of the tests,
* except for those which test file creation.
*/
BEGIN_INDEPENDENT_OP(create_test_container)
{
if (MAINPROCESS) {
if (create_test_container(H5_api_test_parallel_filename, vol_cap_flags_g) < 0) {
fprintf(stderr, " failed to create testing container file '%s'\n",
H5_api_test_parallel_filename);
INDEPENDENT_OP_ERROR(create_test_container);
}
}
}
END_INDEPENDENT_OP(create_test_container);
/* Run all the tests that are enabled */
H5_api_test_run();
if (MAINPROCESS)
printf("Cleaning up testing files\n");
H5Fdelete(H5_api_test_parallel_filename, fapl_id);
if (n_tests_run_g > 0) {
if (MAINPROCESS)
printf("The below statistics are minimum values due to the possibility of some ranks failing a "
"test while others pass:\n");
if (MPI_SUCCESS != MPI_Allreduce(MPI_IN_PLACE, &n_tests_passed_g, 1, MPI_UNSIGNED_LONG_LONG, MPI_MIN,
MPI_COMM_WORLD)) {
if (MAINPROCESS)
printf(" failed to collect consensus about the minimum number of tests that passed -- "
"reporting rank 0's (possibly inaccurate) value\n");
}
if (MAINPROCESS)
printf("%s%zu/%zu (%.2f%%) API tests passed across all ranks with VOL connector '%s'\n",
n_tests_passed_g > 0 ? "At least " : "", n_tests_passed_g, n_tests_run_g,
((double)n_tests_passed_g / (double)n_tests_run_g * 100.0), vol_connector_name);
if (MPI_SUCCESS != MPI_Allreduce(MPI_IN_PLACE, &n_tests_failed_g, 1, MPI_UNSIGNED_LONG_LONG, MPI_MIN,
MPI_COMM_WORLD)) {
if (MAINPROCESS)
printf(" failed to collect consensus about the minimum number of tests that failed -- "
"reporting rank 0's (possibly inaccurate) value\n");
}
if (MAINPROCESS) {
printf("%s%zu/%zu (%.2f%%) API tests did not pass across all ranks with VOL connector '%s'\n",
n_tests_failed_g > 0 ? "At least " : "", n_tests_failed_g, n_tests_run_g,
((double)n_tests_failed_g / (double)n_tests_run_g * 100.0), vol_connector_name);
printf("%zu/%zu (%.2f%%) API tests were skipped with VOL connector '%s'\n", n_tests_skipped_g,
n_tests_run_g, ((double)n_tests_skipped_g / (double)n_tests_run_g * 100.0),
vol_connector_name);
}
}
if (default_con_id >= 0 && H5VLclose(default_con_id) < 0) {
if (MAINPROCESS)
fprintf(stderr, " failed to close VOL connector ID\n");
}
if (registered_con_id >= 0 && H5VLclose(registered_con_id) < 0) {
if (MAINPROCESS)
fprintf(stderr, " failed to close VOL connector ID\n");
}
if (fapl_id >= 0 && H5Pclose(fapl_id) < 0) {
if (MAINPROCESS)
fprintf(stderr, " failed to close MPI FAPL\n");
}
H5close();
MPI_Finalize();
exit(EXIT_SUCCESS);
error:
free(vol_connector_string_copy);
H5E_BEGIN_TRY
{
H5VLclose(default_con_id);
H5VLclose(registered_con_id);
H5Pclose(fapl_id);
}
H5E_END_TRY
MPI_Finalize();
exit(EXIT_FAILURE);
}
|