API test updates (#3018)

* Remove macros from api tests (#2929)
* Remove macros and undefined callbacks (#2959)
* Remove remaining macros from H5_api_tests_disabled.h (#2968)
* Put some vol capability checks in testpar tests and remove remaining warnings (#2995)
* API tests datatype generation cleanup
* Clean up API tests' random datatype generation and fix bug with enum
datatype generation
* Init parallel API tests with MPI_THREAD_MULTIPLE
* HDF5 API tests - Check VOL connector registration
* Determine whether a VOL connector failed to load before running API
tests
* Cleanup some usages of H5VL_CAP_FLAG_CREATION_ORDER in API tests
* Remove some now-unused macros from H5_api_tests_disabled.h
* Enable HDF5 API tests by default
* Implement CMake option to install HDF5 API tests
* Check for invalid AAPL from H5Acreate
* Enable building of VOL connectors alongside HDF5 in CMake
* Prepend CMake VOL URL option indices with 0s so they come in order
* Don't turn on API tests by default yet
* Document VOL connector FetchContent functionality
* Add release note for API test updates
* Only install testing library if API tests are installed
* Fix grammar
This commit is contained in:
jhendersonHDF 2023-05-26 15:29:02 -05:00 committed by GitHub
parent 77e64e0df4
commit 79bb60c3f6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 4310 additions and 3754 deletions

View File

@ -834,6 +834,11 @@ include (UserMacros.cmake)
#-----------------------------------------------------------------------------
include (CMakeFilters.cmake)
#-----------------------------------------------------------------------------
# Include external VOL connectors
#-----------------------------------------------------------------------------
include (CMakeVOL.cmake)
#-----------------------------------------------------------------------------
# Option for external libraries on windows
#-----------------------------------------------------------------------------

180
CMakeVOL.cmake Normal file
View File

@ -0,0 +1,180 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
include (FetchContent)
# Function to retrieve all of the CMake targets generated
# in a directory and all its subdirectories
function (get_generated_cmake_targets out_var dir)
get_directory_property (dir_targets DIRECTORY "${dir}" BUILDSYSTEM_TARGETS)
get_directory_property (dir_subdirs DIRECTORY "${dir}" SUBDIRECTORIES)
foreach (subdir ${dir_subdirs})
get_generated_cmake_targets(subdir_targets "${subdir}")
list (APPEND dir_targets "${subdir_targets}")
endforeach()
set (${out_var} "${dir_targets}" PARENT_SCOPE)
endfunction ()
# For now, only support building of external VOL connectors with FetchContent
option (HDF5_VOL_ALLOW_EXTERNAL "Allow building of external HDF5 VOL connectors with FetchContent" OFF)
mark_as_advanced (HDF5_VOL_ALLOW_EXTERNAL)
if (HDF5_VOL_ALLOW_EXTERNAL)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "NO" OR NOT HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT")
message (FATAL_ERROR "HDF5_ALLOW_EXTERNAL_SUPPORT must be set to 'GIT' to allow building of external HDF5 VOL connectors")
endif ()
# For compatibility, set some variables that projects would
# typically look for after calling find_package(HDF5)
set (HDF5_FOUND 1)
set (HDF5_LIBRARIES "${HDF5_LIBSH_TARGET};${LINK_LIBS};${LINK_COMP_LIBS};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>")
set (HDF5_INCLUDE_DIRS "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
set (HDF5_MAX_EXTERNAL_VOLS 10)
set (HDF5_EXTERNAL_VOL_TARGETS "")
foreach (vol_idx RANGE 1 ${HDF5_MAX_EXTERNAL_VOLS})
# Generate fixed-width index number prepended with 0s
# so URLs come in order from 1 - HDF5_MAX_EXTERNAL_VOLS
set (vol_idx_num_digits 2) # Based on HDF5_MAX_EXTERNAL_VOLS
set (vol_idx_fixed "${vol_idx}")
string (LENGTH "${vol_idx_fixed}" vol_idx_len)
while (vol_idx_len LESS vol_idx_num_digits)
string (PREPEND vol_idx_fixed "0")
math (EXPR vol_idx_len "${vol_idx_len}+1")
endwhile ()
set (HDF5_VOL_URL${vol_idx_fixed} "" CACHE STRING "Git repository URL of an external HDF5 VOL connector to build")
mark_as_advanced (HDF5_VOL_URL${vol_idx_fixed})
if (NOT "${HDF5_VOL_URL${vol_idx_fixed}}" STREQUAL "")
# Extract the name of the VOL connector
string (FIND "${HDF5_VOL_URL${vol_idx_fixed}}" "/" hdf5_vol_name_pos REVERSE)
if (hdf5_vol_name_pos EQUAL -1)
message (SEND_ERROR "Invalid URL '${HDF5_VOL_URL${vol_idx_fixed}}' specified for HDF5_VOL_URL${vol_idx_fixed}")
endif ()
math (EXPR hdf5_vol_name_pos "${hdf5_vol_name_pos}+1")
string (SUBSTRING "${HDF5_VOL_URL${vol_idx_fixed}}" ${hdf5_vol_name_pos} -1 hdf5_vol_name)
string (REPLACE ".git" "" hdf5_vol_name "${hdf5_vol_name}")
string (STRIP "${hdf5_vol_name}" hdf5_vol_name)
string (TOUPPER "${hdf5_vol_name}" hdf5_vol_name_upper)
string (TOLOWER "${hdf5_vol_name}" hdf5_vol_name_lower)
message (VERBOSE "Building VOL connector '${hdf5_vol_name}' with FetchContent")
# Set some cache variables that can be set by users when building
set ("HDF5_VOL_${hdf5_vol_name_upper}_NAME" "" CACHE STRING "Name of VOL connector to set for the HDF5_VOL_CONNECTOR environment variable")
set ("HDF5_VOL_${hdf5_vol_name_upper}_BRANCH" "main" CACHE STRING "Git branch (or tag) to use when building VOL connector '${hdf5_vol_name}'")
option ("HDF5_VOL_${hdf5_vol_name_upper}_TEST_PARALLEL" "Whether to test VOL connector '${hdf5_vol_name}' against the parallel API tests" OFF)
mark_as_advanced ("HDF5_VOL_${hdf5_vol_name_upper}_NAME")
mark_as_advanced ("HDF5_VOL_${hdf5_vol_name_upper}_BRANCH")
mark_as_advanced ("HDF5_VOL_${hdf5_vol_name_upper}_TEST_PARALLEL")
if (HDF5_TEST_API)
if ("${HDF5_VOL_${hdf5_vol_name_upper}_NAME}" STREQUAL "")
message (SEND_ERROR "HDF5_VOL_${hdf5_vol_name_upper}_NAME must be set to a valid connector name to use VOL connector '${hdf5_vol_name}' for testing")
endif ()
endif ()
if ("${HDF5_VOL_${hdf5_vol_name_upper}_BRANCH}" STREQUAL "")
message (SEND_ERROR "HDF5_VOL_${hdf5_vol_name_upper}_BRANCH must be set to a valid git branch name (or git tag) to build VOL connector '${hdf5_vol_name}'")
endif ()
FetchContent_Declare (HDF5_VOL_${hdf5_vol_name_lower}
GIT_REPOSITORY "${HDF5_VOL_URL${vol_idx_fixed}}"
GIT_TAG "${HDF5_VOL_${hdf5_vol_name_upper}_BRANCH}"
)
FetchContent_GetProperties(HDF5_VOL_${hdf5_vol_name_lower})
if (NOT hdf5_vol_${hdf5_vol_name_lower}_POPULATED)
FetchContent_Populate(HDF5_VOL_${hdf5_vol_name_lower})
if (NOT EXISTS "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/CMakeLists.txt")
message (SEND_ERROR "The git repository branch '${HDF5_VOL_${hdf5_vol_name_upper}_BRANCH}' for VOL connector '${hdf5_vol_name}' does not appear to contain a CMakeLists.txt file")
endif ()
# If there are any calls to find_package(HDF5) in the connector's
# CMakeLists.txt files, remove those since any found HDF5 targets
# will conflict with targets being generated by this build of HDF5
if (EXISTS "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/CMakeLists.txt")
file (READ "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/CMakeLists.txt" vol_cmake_contents)
string (REGEX REPLACE "[ \t]*find_package[ \t]*\\([ \t]*HDF5[^\r\n\\)]*\\)[ \t]*[\r\n]+" "" vol_cmake_contents "${vol_cmake_contents}")
file (WRITE "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/CMakeLists.txt" "${vol_cmake_contents}")
endif ()
if (EXISTS "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/src/CMakeLists.txt")
file (READ "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/src/CMakeLists.txt" vol_cmake_contents)
string (REGEX REPLACE "[ \t]*find_package[ \t]*\\([ \t]*HDF5[^\r\n\\)]*\\)[ \t]*[\r\n]+" "" vol_cmake_contents "${vol_cmake_contents}")
file (WRITE "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/src/CMakeLists.txt" "${vol_cmake_contents}")
endif ()
add_subdirectory (${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR} ${hdf5_vol_${hdf5_vol_name_lower}_BINARY_DIR})
# Get list of targets generated by build of connector
get_generated_cmake_targets (connector_targets ${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR})
# Create a custom target for the connector to encompass all its
# targets and other custom properties set by us for later use
add_custom_target ("HDF5_VOL_${hdf5_vol_name_lower}")
# Define and set a custom property on the VOL connector target to
# capture all of the connector's generated targets
define_property (
TARGET
PROPERTY HDF5_VOL_TARGETS
)
set_target_properties (
"HDF5_VOL_${hdf5_vol_name_lower}"
PROPERTIES
HDF5_VOL_TARGETS "${connector_targets}"
)
# Define and set a custom property on the VOL connector target to
# capture the connector's name to set for the HDF5_VOL_CONNECTOR
# environment variable for testing
define_property (
TARGET
PROPERTY HDF5_VOL_NAME
BRIEF_DOCS "VOL connector name to use for the HDF5_VOL_CONNECTOR environment variable when testing"
)
set_target_properties (
"HDF5_VOL_${hdf5_vol_name_lower}"
PROPERTIES
HDF5_VOL_NAME "${HDF5_VOL_${hdf5_vol_name_upper}_NAME}"
)
# Define and set a custom property on the VOL connector target to
# capture whether the connector should be tested with the parallel
# API tests
define_property (
TARGET
PROPERTY HDF5_VOL_TEST_PARALLEL
BRIEF_DOCS "Whether the VOL connector should be tested with the parallel API tests"
)
set_target_properties (
"HDF5_VOL_${hdf5_vol_name_lower}"
PROPERTIES
HDF5_VOL_TEST_PARALLEL ${HDF5_VOL_${hdf5_vol_name_upper}_TEST_PARALLEL}
)
# Add this connector's target to the list of external connector targets
list (APPEND HDF5_EXTERNAL_VOL_TARGETS "HDF5_VOL_${hdf5_vol_name_lower}")
endif ()
endif ()
endforeach ()
endif ()

View File

@ -0,0 +1,195 @@
# Building and testing HDF5 VOL connectors with CMake FetchContent
This document details the process of using CMake options to build and test
an HDF5 VOL connector alongside the HDF5 library when building HDF5 from
source. There are several benefits that this may provide, but among them
are the following:
* A VOL connector built this way can be tested at the same time that
HDF5 is, which eliminates the need to have a multi-step build process
where one builds HDF5, uses it to build the VOL connector and then
uses the external [HDF5 VOL tests](https://github.com/hdfGroup/vol-tests)
repository to test their connector.
* Building VOL connectors in this manner will usually install the built
connector library alongside the HDF5 library, allowing future opportunities
for HDF5 to set a default plugin path such that the HDF5_PLUGIN_PATH
environment variable doesn't need to be set.
## Building
To enable building of an HDF5 VOL connector using HDF5's CMake functionality,
two CMake variables must first be set:
HDF5_VOL_ALLOW_EXTERNAL (Default: OFF)
This variable determines whether or not building of external HDF5 VOL connectors
is enabled.
HDF5_ALLOW_EXTERNAL_SUPPORT (Default: "NO")
This variable is a string that specifies the manner in which the source code for
an external VOL connector will be retrieved. Currently, this variable must be set
to "GIT" for building external VOL connectors.
Once the `HDF5_VOL_ALLOW_EXTERNAL` option is set to ON and the `HDF5_ALLOW_EXTERNAL_SUPPORT`
variable is set to "GIT", the CMake cache will be populated with a predefined
(currently 10) amount of new variables, named:
HDF5_VOL_URL01
HDF5_VOL_URL02
HDF5_VOL_URL03
...
For each of these variables, a URL that points to an HDF5 VOL connector Git
repository can be specified. These URLs should currently be HTTPS URLs. For
example, to specify the HDF5 Asynchronous I/O VOL Connector developed by the
ECP team, one can provide the following option to `cmake`:
-DHDF5_VOL_URL01=https://github.com/hpc-io/vol-async.git
For each URL specified, HDF5's CMake code will attempt to use CMake's
[FetchContent](https://cmake.org/cmake/help/latest/module/FetchContent.html)
functionality to retrieve the source code for a VOL connector pointed to by
that URL and will try to build that VOL connector as part of the HDF5 library
build process. The VOL connector must be able to be built by CMake and currently
must have a CMakeLists.txt file in the top level of the source tree in order to
be buildable by this process. If the source code for a VOL connector is successfully
retrieved, the HDF5 build's CMake cache will be populated with variables from
the VOL connector's CMake code, as if one were building the connector by itself.
This gives one the ability to customize the build of the connector as usual.
The CMake cache will also be populated with a few new variables for each VOL
connector that was successfully retrieved from a given URL. To generate these
variables, the CMake code first creates an internal name for the VOL connector
by stripping off the last part of the Git repository URL given for the connector,
removing the ".git" suffix and any whitespace and then upper-casing the result.
For example, the name of the VOL connector located at the URL
https://github.com/hpc-io/vol-async.git would become "VOL-ASYNC". Then, the following
new variables get created:
HDF5_VOL_<VOL name>_BRANCH (Default: "main")
This variable specifies the git branch name or tag to use when fetching
the source code for the VOL connector with the CMake-internal name
'<VOL name>'.
HDF5_VOL_<VOL name>_NAME (Default: "")
This variable specifies the string that should be used when setting the
HDF5_VOL_CONNECTOR environment variable for testing the VOL connector
with the CMake-internal name '<VOL name>'. The value for this variable
can be determined according to the canonical name given to the connector
by the connector's author(s), as well as any extra info that needs to be
passed to the connector for its configuration (see example below). This
variable must be set in order for the VOL connector to be testable with
HDF5's tests.
HDF5_VOL_<VOL name>_TEST_PARALLEL (Default: OFF)
This variable determines whether the VOL connector with the CMake-internal
name '<VOL name>' should be tested against HDF5's parallel tests.
As an example, this would create the following variables for the
previously-mentioned VOL connector:
HDF5_VOL_VOL-ASYNC_BRANCH
HDF5_VOL_VOL-ASYNC_NAME
HDF5_VOL_VOL-ASYNC_TEST_PARALLEL
**NOTE**
If a VOL connector requires extra information to be passed in its
HDF5_VOL_<VOL name>_NAME variable and that information contains any semicolons,
those semicolons should be escaped with a single backslash so that CMake
doesn't parse the string as a list. If `cmake` is run from a shell, extra care
may need to be taken when escaping the semicolons depending on how the
shell interprets backslashes.
### Example - Build and test HDF5 Asynchronous I/O VOL connector
Assuming that the HDF5 source code has been checked out and a build directory
has been created, running the following cmake command from that build directory
will retrieve, build and test the HDF5 Asynchronous I/O VOL connector while
building HDF5. Note that `[hdf5 options]` represents other build options that
would typically be passed when building HDF5, such as `CMAKE_INSTALL_PREFIX`,
`HDF5_BUILD_CPP_LIB`, etc.
cmake [hdf5 options]
-DHDF5_ENABLE_THREADSAFE=ON
-DHDF5_ENABLE_PARALLEL=ON
-DALLOW_UNSUPPORTED=ON
-DHDF5_TEST_API=ON
-DHDF5_VOL_ALLOW_EXTERNAL=ON
-DHDF5_ALLOW_EXTERNAL_SUPPORT="GIT"
-DHDF5_VOL_URL01=https://github.com/hpc-io/vol-async.git
-DHDF5_VOL_VOL-ASYNC_BRANCH=develop
-DHDF5_VOL_VOL-ASYNC_NAME="async under_vol=0\;under_info={}"
-DHDF5_VOL_VOL-ASYNC_TEST_PARALLEL=ON ..
Here, we are specifying that:
* HDF5 should be built with thread-safety enabled (required by Async VOL connector)
* HDF5 should be built with parallel enabled (required by Async VOL connector)
* Allow unsupported HDF5 combinations (thread-safety and HL, which is on by default)
* Enable the API tests so that they can be tested with the Async VOL connector
* Build and use the HDF5 Asynchronous I/O VOL connector, located at
https://github.com/hpc-io/vol-async.git
* Clone the Asynchronous I/O VOL connector from the repository's 'develop' branch
* When testing the Asynchronous I/O VOL connector, the `HDF5_VOL_CONNECTOR` environment
variable should be set to "async under_vol=0\;under_info={}", which
specifies that the VOL connector with the canonical name "async" should
be loaded and it should be passed the string "under_vol=0;under_info={}"
for its configuration
* The Asynchronous I/O VOL connector should be tested against HDF5's parallel API tests
Note that this also assumes that the Asynchronous I/O VOL connector's
[other dependencies](https://hdf5-vol-async.readthedocs.io/en/latest/gettingstarted.html#preparation)
are installed on the system in a way that CMake can find them. If that is not
the case, the locations for these dependencies may need to be provided to CMake
by passing extra options, such as:
-DABT_INCLUDE_DIR=/path/to/argobots/build/include
-DABT_LIBRARY=/path/to/argbots/build/lib/libabt.so
which would help CMake find an argobots installation in a non-standard location.
## Testing
To facilitate testing of HDF5 VOL connectors when building HDF5, tests from
the [HDF5 VOL tests](https://github.com/hdfGroup/vol-tests) repository were
integrated back into the library and the following new CMake options were
added to HDF5 builds for the 1.14.1 release:
HDF5_TEST_API (Default: OFF)
This variable determines whether the HDF5 API tests will be built and tested.
HDF5_TEST_API_INSTALL (Default: OFF)
This variable determines whether the HDF5 API test executables will be installed
on the system alongside the HDF5 library.
HDF5_TEST_API_ENABLE_ASYNC (Default: OFF)
This variable determines whether the HDF5 Asynchronous I/O API tests will be
built and tested. These tests will only run if a VOL connector reports that
it supports asynchronous I/O operations when queried via the H5Pget_vol_cap_flags
API routine.
HDF5_TEST_API_ENABLE_DRIVER (Default: OFF)
This variable determines whether the HDF5 API test driver program will be
built and used for testing. This driver program is useful when a VOL connector
uses a client/server model where the server program needs to be up and running
before the VOL connector can function. This option is currently not functional.
When the `HDF5_TEST_API` option is set to ON, HDF5's CMake code builds and tests
the new API tests using the native VOL connector. When one or more external VOL
connectors are built successfully with the process described in this document,
the CMake code will duplicate some of these API tests by adding separate
versions of the tests (for each VOL connector that was built) that set the
`HDF5_VOL_CONNECTOR` environment variable to the value specified for the
HDF5_VOL_<VOL name>_NAME variable for each external VOL connector at build time.
Running the `ctest` command will then run these new tests which load and run with
each VOL connector that was built in turn. When run via the `ctest` command, the
new tests typically follow the naming scheme:
HDF5_VOL_<VOL name lowercase>-h5_api_test_<test name>
HDF5_VOL_<VOL name lowercase>-h5_api_test_parallel_<test name>
**NOTE**
If dependencies of a built VOL connector are installed on the system in
a non-standard location that would typically require one to set `LD_LIBRARY_PATH`
or similar, one should ensure that those environment variables are set before
running tests. Otherwise, the tests that run with that connector will likely
fail due to being unable to load the necessary libraries for its dependencies.

View File

@ -47,6 +47,15 @@ New Features
Configuration:
-------------
- Updated HDF5 API tests CMake code to support VOL connectors
* Implemented support for fetching, building and testing HDF5
VOL connectors during the library build process and documented
the feature under doc/cmake-vols-fetchcontent.md
* Implemented the HDF5_TEST_API_INSTALL option that enables
installation of the HDF5 API tests on the system
- Added new CMake options for building and running HDF5 API tests
(Experimental)
@ -216,6 +225,13 @@ Bug Fixes since HDF5-1.14.0 release
===================================
Library
-------
- Added an AAPL check to H5Acreate
A check was added to H5Acreate to ensure that a failure is correctly
returned when an invalid Attribute Access Property List is passed
in to the function. The HDF5 API tests were failing for certain
build types due to this condition not being checked previously.
- Fixed a bug in H5Ocopy that could generate invalid HDF5 files
H5Ocopy was missing a check to determine whether the new object's
@ -542,7 +558,23 @@ Bug Fixes since HDF5-1.14.0 release
Testing
-------
-
- A bug was fixed in the HDF5 API test random datatype generation code
A bug in the random datatype generation code could cause test failures
when trying to generate an enumeration datatype that has duplicated
name/value pairs in it. This has now been fixed.
- A bug was fixed in the HDF5 API test VOL connector registration checking code
The HDF5 API test code checks to see if the VOL connector specified by the
HDF5_VOL_CONNECTOR environment variable (if any) is registered with the library
before attempting to run tests with it so that testing can be skipped and an
error can be returned when a VOL connector fails to register successfully.
Previously, this code didn't account for VOL connectors that specify extra
configuration information in the HDF5_VOL_CONNECTOR environment variable and
would incorrectly report that the specified VOL connector isn't registered
due to including the configuration information as part of the VOL connector
name being checked for registration status. This has now been fixed.
Platforms Tested

View File

@ -76,13 +76,14 @@ H5VL__native_attr_create(void *obj, const H5VL_loc_params_t *loc_params, const c
hid_t space_id, hid_t acpl_id, hid_t H5_ATTR_UNUSED aapl_id,
hid_t H5_ATTR_UNUSED dxpl_id, void H5_ATTR_UNUSED **req)
{
H5G_loc_t loc; /* Object location */
H5G_loc_t obj_loc; /* Location used to open group */
hbool_t loc_found = FALSE;
H5T_t *type, *dt; /* Datatype to use for attribute */
H5S_t *space; /* Dataspace to use for attribute */
H5A_t *attr = NULL;
void *ret_value = NULL;
H5P_genplist_t *plist;
H5G_loc_t loc; /* Object location */
H5G_loc_t obj_loc; /* Location used to open group */
hbool_t loc_found = FALSE;
H5T_t *type, *dt; /* Datatype to use for attribute */
H5S_t *space; /* Dataspace to use for attribute */
H5A_t *attr = NULL;
void *ret_value = NULL;
FUNC_ENTER_PACKAGE
@ -91,6 +92,9 @@ H5VL__native_attr_create(void *obj, const H5VL_loc_params_t *loc_params, const c
if (0 == (H5F_INTENT(loc.oloc->file) & H5F_ACC_RDWR))
HGOTO_ERROR(H5E_ARGS, H5E_WRITEERROR, NULL, "no write intent on file")
if (NULL == (plist = H5P_object_verify(aapl_id, H5P_ATTRIBUTE_ACCESS)))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "AAPL is not an attribute access property list")
if (NULL == (dt = (H5T_t *)H5I_object_verify(type_id, H5I_DATATYPE)))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "not a datatype")
/* If this is a named datatype, get the connector's pointer to the datatype */

View File

@ -161,6 +161,21 @@ if (HDF5_ENABLE_FORMATTERS)
clang_format (HDF5_TEST_h5_api_test_FORMAT h5_api_test)
endif ()
if (HDF5_TEST_API_INSTALL)
install (
TARGETS
h5_api_test
EXPORT
${HDF5_EXPORTED_TARGETS}
DESTINATION
${HDF5_INSTALL_BIN_DIR}
PERMISSIONS
OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
COMPONENT
tests
)
endif ()
#-----------------------------------------------------------------------------
# Build the ported HDF5 test executables
#-----------------------------------------------------------------------------
@ -232,12 +247,32 @@ foreach (api_test_extra ${HDF5_API_TESTS_EXTRA})
if (HDF5_ENABLE_FORMATTERS)
clang_format (HDF5_TEST_h5_api_test_${api_test_extra}_FORMAT h5_api_test_${api_test_extra})
endif ()
if (HDF5_TEST_API_INSTALL)
install (
TARGETS
h5_api_test_${api_test_extra}
EXPORT
${HDF5_EXPORTED_TARGETS}
DESTINATION
${HDF5_INSTALL_BIN_DIR}
PERMISSIONS
OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
COMPONENT
tests
)
endif ()
endforeach ()
#-----------------------------------------------------------------------------
# Add tests if HDF5 serial testing is enabled
#-----------------------------------------------------------------------------
if (HDF5_TEST_SERIAL)
# Setup working directories for any external VOL connectors to be tested
foreach (external_vol_tgt ${HDF5_EXTERNAL_VOL_TARGETS})
file (MAKE_DIRECTORY "${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}")
endforeach ()
if (HDF5_TEST_API_ENABLE_DRIVER)
if ("${HDF5_TEST_API_SERVER}" STREQUAL "")
message (FATAL_ERROR "Please set HDF5_TEST_API_SERVER to point to a server executable for the test driver program.")
@ -258,7 +293,7 @@ if (HDF5_TEST_SERIAL)
)
endif ()
set(last_api_test "")
set (last_api_test "")
foreach (api_test ${HDF5_API_TESTS})
add_test (
NAME "h5_api_test_${api_test}"
@ -269,9 +304,9 @@ if (HDF5_TEST_SERIAL)
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
set_tests_properties("h5_api_test_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
set_tests_properties ("h5_api_test_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
set(last_api_test "h5_api_test_${api_test}")
set (last_api_test "h5_api_test_${api_test}")
endforeach ()
foreach (hdf5_test ${HDF5_API_TESTS_EXTRA})
@ -296,17 +331,104 @@ if (HDF5_TEST_SERIAL)
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
endforeach ()
# Add tests for each external VOL connector that was built
foreach (external_vol_tgt ${HDF5_EXTERNAL_VOL_TARGETS})
# Determine environment variables that need to be set for testing
set (vol_test_env "")
set (vol_plugin_paths "${CMAKE_BINARY_DIR}/${HDF5_INSTALL_BIN_DIR}")
get_target_property (vol_test_string "${external_vol_tgt}" HDF5_VOL_NAME)
list (APPEND vol_test_env "HDF5_VOL_CONNECTOR=${vol_test_string}")
get_target_property (vol_lib_targets "${external_vol_tgt}" HDF5_VOL_TARGETS)
foreach (lib_target ${vol_lib_targets})
get_target_property (lib_target_output_dir "${lib_target}" LIBRARY_OUTPUT_DIRECTORY)
if (NOT "${lib_target_output_dir}" STREQUAL "lib_target_output_dir-NOTFOUND"
AND NOT "${lib_target_output_dir}" STREQUAL ""
AND NOT "${lib_target_output_dir}" STREQUAL "${CMAKE_BINARY_DIR}/${HDF5_INSTALL_BIN_DIR}")
set (vol_plugin_paths "${vol_plugin_paths}${CMAKE_SEP}${lib_target_output_dir}")
endif ()
endforeach ()
list (APPEND vol_test_env "HDF5_PLUGIN_PATH=${vol_plugin_paths}")
# Add main API tests
set (last_api_test "")
foreach (api_test ${HDF5_API_TESTS})
add_test (
NAME "${external_vol_tgt}-h5_api_test_${api_test}"
COMMAND $<TARGET_FILE:h5_api_test_driver>
--server ${HDF5_TEST_API_SERVER}
--client $<TARGET_FILE:h5_api_test> "${api_test}"
--serial
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
set_tests_properties (
"${external_vol_tgt}-h5_api_test_${api_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
DEPENDS
"${last_api_test}"
)
set (last_api_test "${external_vol_tgt}-h5_api_test_${api_test}")
endforeach ()
# Add any extra HDF5 tests
foreach (hdf5_test ${HDF5_API_TESTS_EXTRA})
add_test (
NAME "${external_vol_tgt}-h5_api_test_${hdf5_test}"
COMMAND $<TARGET_FILE:h5_api_test_driver>
--server ${HDF5_TEST_API_SERVER}
--client $<TARGET_FILE:h5_api_test_${hdf5_test}>
--serial
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
set_tests_properties (
"${external_vol_tgt}-h5_api_test_${hdf5_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
)
endforeach ()
# Hook external tests to same test suite
foreach (ext_api_test ${HDF5_API_EXT_SERIAL_TESTS})
add_test (
NAME "${external_vol_tgt}-h5_api_ext_test_${ext_api_test}"
COMMAND $<TARGET_FILE:h5_api_test_driver>
--server ${HDF5_TEST_API_SERVER}
--client $<TARGET_FILE:${ext_api_test}>
--serial
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
set_tests_properties (
"${external_vol_tgt}-h5_api_ext_test_${ext_api_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
)
endforeach ()
endforeach ()
else ()
set(last_api_test "")
set (last_api_test "")
foreach (api_test ${HDF5_API_TESTS})
add_test (
NAME "h5_api_test_${api_test}"
COMMAND $<TARGET_FILE:h5_api_test> "${api_test}"
)
set_tests_properties("h5_api_test_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
set_tests_properties ("h5_api_test_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
set(last_api_test "h5_api_test_${api_test}")
set (last_api_test "h5_api_test_${api_test}")
endforeach ()
foreach (hdf5_test ${HDF5_API_TESTS_EXTRA})
@ -315,5 +437,64 @@ if (HDF5_TEST_SERIAL)
COMMAND $<TARGET_FILE:h5_api_test_${hdf5_test}>
)
endforeach ()
# Add tests for each external VOL connector that was built
foreach (external_vol_tgt ${HDF5_EXTERNAL_VOL_TARGETS})
# Determine environment variables that need to be set for testing
set (vol_test_env "")
set (vol_plugin_paths "${CMAKE_BINARY_DIR}/${HDF5_INSTALL_BIN_DIR}")
get_target_property (vol_test_string "${external_vol_tgt}" HDF5_VOL_NAME)
list (APPEND vol_test_env "HDF5_VOL_CONNECTOR=${vol_test_string}")
get_target_property (vol_lib_targets "${external_vol_tgt}" HDF5_VOL_TARGETS)
foreach (lib_target ${vol_lib_targets})
get_target_property (lib_target_output_dir "${lib_target}" LIBRARY_OUTPUT_DIRECTORY)
if (NOT "${lib_target_output_dir}" STREQUAL "lib_target_output_dir-NOTFOUND"
AND NOT "${lib_target_output_dir}" STREQUAL ""
AND NOT "${lib_target_output_dir}" STREQUAL "${CMAKE_BINARY_DIR}/${HDF5_INSTALL_BIN_DIR}")
set (vol_plugin_paths "${vol_plugin_paths}${CMAKE_SEP}${lib_target_output_dir}")
endif ()
endforeach ()
list (APPEND vol_test_env "HDF5_PLUGIN_PATH=${vol_plugin_paths}")
# Add main API tests
set (last_api_test "")
foreach (api_test ${HDF5_API_TESTS})
add_test (
NAME "${external_vol_tgt}-h5_api_test_${api_test}"
COMMAND $<TARGET_FILE:h5_api_test> "${api_test}"
)
set_tests_properties (
"${external_vol_tgt}-h5_api_test_${api_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
DEPENDS
"${last_api_test}"
)
set (last_api_test "${external_vol_tgt}-h5_api_test_${api_test}")
endforeach ()
# Add any extra HDF5 tests
foreach (hdf5_test ${HDF5_API_TESTS_EXTRA})
add_test (
NAME "${external_vol_tgt}-h5_api_test_${hdf5_test}"
COMMAND $<TARGET_FILE:h5_api_test_${hdf5_test}>
)
set_tests_properties (
"${external_vol_tgt}-h5_api_test_${hdf5_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
)
endforeach ()
endforeach ()
endif ()
endif ()

View File

@ -2141,10 +2141,9 @@ test_group(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
SKIPPED();
HDprintf(" API functions for basic file, group, group more, or creation order aren't supported "
HDprintf(" API functions for basic file, group, or group more aren't supported "
"with this connector\n");
return 0;
}
@ -2153,9 +2152,11 @@ test_group(void)
if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0)
TEST_ERROR;
/* Track creation order */
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0)
TEST_ERROR;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
/* Track creation order */
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0)
TEST_ERROR;
}
/* Create event stack */
if ((es_id = H5EScreate()) < 0)
@ -2219,10 +2220,12 @@ test_group(void)
if (H5Gget_info_async(group_id, &info1, es_id) < 0)
TEST_ERROR;
/* Test H5Gget_info_by_idx_async */
if (H5Gget_info_by_idx_async(parent_group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &info2,
H5P_DEFAULT, es_id) < 0)
TEST_ERROR;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
/* Test H5Gget_info_by_idx_async */
if (H5Gget_info_by_idx_async(parent_group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &info2,
H5P_DEFAULT, es_id) < 0)
TEST_ERROR;
}
/* Test H5Gget_info_by_name_async */
if (H5Gget_info_by_name_async(parent_group_id, "group3", &info3, H5P_DEFAULT, es_id) < 0)
@ -2237,8 +2240,10 @@ test_group(void)
/* Verify group infos */
if (info1.nlinks != 0)
FAIL_PUTS_ERROR(" incorrect number of links");
if (info2.nlinks != 1)
FAIL_PUTS_ERROR(" incorrect number of links");
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (info2.nlinks != 1)
FAIL_PUTS_ERROR(" incorrect number of links");
}
if (info3.nlinks != 2)
FAIL_PUTS_ERROR(" incorrect number of links");

View File

@ -1114,7 +1114,7 @@ test_create_attribute_invalid_params(void)
PART_BEGIN(H5Acreate_invalid_aapl)
{
TESTING_2("H5Acreate with an invalid AAPL");
#ifndef NO_INVALID_PROPERTY_LIST_TESTS
H5E_BEGIN_TRY
{
attr_id = H5Acreate2(group_id, ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype,
@ -1130,10 +1130,6 @@ test_create_attribute_invalid_params(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Acreate_invalid_aapl);
#endif
}
PART_END(H5Acreate_invalid_aapl);
@ -1306,7 +1302,7 @@ test_create_attribute_invalid_params(void)
PART_BEGIN(H5Acreate_by_name_invalid_aapl)
{
TESTING_2("H5Acreate_by_name with invalid AAPL");
#ifndef NO_INVALID_PROPERTY_LIST_TESTS
H5E_BEGIN_TRY
{
attr_id = H5Acreate_by_name(container_group, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME,
@ -1323,10 +1319,6 @@ test_create_attribute_invalid_params(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Acreate_by_name_invalid_aapl);
#endif
}
PART_END(H5Acreate_by_name_invalid_aapl);
@ -1406,8 +1398,8 @@ test_open_attribute(void)
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
SKIPPED();
HDprintf(
" API functions for basic file, group, or attribute aren't supported with this connector\n");
HDprintf(" API functions for basic file, group, or attribute aren't supported "
"with this connector\n");
return 0;
}
@ -1431,10 +1423,12 @@ test_open_attribute(void)
goto error;
}
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
}
}
if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
@ -1531,6 +1525,12 @@ test_open_attribute(void)
{
TESTING_2("H5Aopen_by_idx by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking is not supported with this VOL connector\n");
PART_EMPTY(H5Aopen_by_idx_crt_order_increasing);
}
if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
H5_ITER_INC, 0, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
@ -1584,6 +1584,12 @@ test_open_attribute(void)
{
TESTING_2("H5Aopen_by_idx by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking is not supported with this VOL connector\n");
PART_EMPTY(H5Aopen_by_idx_crt_order_decreasing);
}
if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
H5_ITER_DEC, 2, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
@ -1689,11 +1695,11 @@ test_open_attribute(void)
PART_BEGIN(H5Aopen_by_idx_name_order_decreasing)
{
TESTING_2("H5Aopen_by_idx by alphabetical order in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
H5_ITER_DEC, 2, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open attribute '%s' at index %lld using H5Aopen_by_idx by "
HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by "
"alphabetical order in decreasing order\n",
ATTRIBUTE_OPEN_TEST_ATTR_NAME, 2);
PART_ERROR(H5Aopen_by_idx_name_order_decreasing);
@ -1708,7 +1714,7 @@ test_open_attribute(void)
if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
H5_ITER_DEC, 1, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open attribute '%s' at index %lld using H5Aopen_by_idx by "
HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by "
"alphabetical order in decreasing order\n",
ATTRIBUTE_OPEN_TEST_ATTR_NAME2, 1);
PART_ERROR(H5Aopen_by_idx_name_order_decreasing);
@ -1723,7 +1729,7 @@ test_open_attribute(void)
if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
H5_ITER_DEC, 0, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open attribute '%s' at index %lld using H5Aopen_by_idx by "
HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by "
"alphabetical order in decreasing order\n",
ATTRIBUTE_OPEN_TEST_ATTR_NAME3, 0);
PART_ERROR(H5Aopen_by_idx_name_order_decreasing);
@ -1736,10 +1742,6 @@ test_open_attribute(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aopen_by_idx_name_order_decreasing);
#endif
}
PART_END(H5Aopen_by_idx_name_order_decreasing);
}
@ -3193,8 +3195,8 @@ test_get_attribute_space_and_type(void)
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
SKIPPED();
HDprintf(
" API functions for basic file, group, or attribute aren't supported with this connector\n");
HDprintf(" API functions for basic file, group, attribute, or attribute aren't supported with "
"this connector\n");
return 0;
}
@ -3941,10 +3943,9 @@ test_get_attribute_name(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
SKIPPED();
HDprintf(" API functions for basic file, group, attribute, or creation order aren't supported "
HDprintf(" API functions for basic file, group, or attribute aren't supported "
"with this connector\n");
return 0;
}
@ -3969,10 +3970,12 @@ test_get_attribute_name(void)
goto error;
}
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
}
}
if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
@ -4116,6 +4119,12 @@ test_get_attribute_name(void)
{
TESTING_2("H5Aget_name_by_idx by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aget_name_by_idx_crt_order_increasing);
}
*name_buf = '\0';
if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
H5_ITER_INC, 0, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
@ -4178,6 +4187,12 @@ test_get_attribute_name(void)
{
TESTING_2("H5Aget_name_by_idx by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aget_name_by_idx_crt_order_decreasing);
}
*name_buf = '\0';
if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
H5_ITER_DEC, 2, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
@ -4301,12 +4316,12 @@ test_get_attribute_name(void)
PART_BEGIN(H5Aget_name_by_idx_name_order_decreasing)
{
TESTING_2("H5Aget_name_by_idx by alphabetical order in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
*name_buf = '\0';
if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_NAME,
H5_ITER_DEC, 2, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
H5_FAILED();
HDprintf(" couldn't retrieve name of attribute at index %lld using H5Aget_name_by_index "
HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index "
"by alphabetical order in decreasing order\n",
2);
PART_ERROR(H5Aget_name_by_idx_name_order_decreasing);
@ -4324,7 +4339,7 @@ test_get_attribute_name(void)
if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_NAME,
H5_ITER_DEC, 1, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
H5_FAILED();
HDprintf(" couldn't retrieve name of attribute at index %lld using H5Aget_name_by_index "
HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index "
"by alphabetical order in decreasing order\n",
1);
PART_ERROR(H5Aget_name_by_idx_name_order_decreasing);
@ -4342,7 +4357,7 @@ test_get_attribute_name(void)
if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_NAME,
H5_ITER_DEC, 0, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
H5_FAILED();
HDprintf(" couldn't retrieve name of attribute at index %lld using H5Aget_name_by_index "
HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index "
"by alphabetical order in decreasing order\n",
0);
PART_ERROR(H5Aget_name_by_idx_name_order_decreasing);
@ -4357,10 +4372,6 @@ test_get_attribute_name(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aget_name_by_idx_name_order_decreasing);
#endif
}
PART_END(H5Aget_name_by_idx_name_order_decreasing);
}
@ -4799,10 +4810,9 @@ test_get_attribute_info(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
SKIPPED();
HDprintf(" API functions for basic file, group, attribute, or creation order aren't supported "
HDprintf(" API functions for basic file, group, or attribute aren't supported "
"with this connector\n");
return 0;
}
@ -4827,10 +4837,12 @@ test_get_attribute_info(void)
goto error;
}
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
}
}
if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_GET_INFO_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
@ -5050,6 +5062,12 @@ test_get_attribute_info(void)
{
TESTING_2("H5Aget_info_by_idx by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aget_info_by_idx_crt_order_increasing);
}
HDmemset(&attr_info, 0, sizeof(attr_info));
if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &attr_info,
H5P_DEFAULT) < 0) {
@ -5133,6 +5151,12 @@ test_get_attribute_info(void)
{
TESTING_2("H5Aget_info_by_idx by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aget_info_by_idx_crt_order_decreasing);
}
HDmemset(&attr_info, 0, sizeof(attr_info));
if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, &attr_info,
H5P_DEFAULT) < 0) {
@ -5298,12 +5322,12 @@ test_get_attribute_info(void)
PART_BEGIN(H5Aget_info_by_idx_name_order_decreasing)
{
TESTING_2("H5Aget_info_by_idx by alphabetical order in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
HDmemset(&attr_info, 0, sizeof(attr_info));
if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, &attr_info, H5P_DEFAULT) <
0) {
H5_FAILED();
HDprintf(" couldn't get info for attribute at index %lld using H5Aget_info_by_idx by "
HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
"alphabetical order in decreasing order\n",
2);
PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
@ -5328,7 +5352,7 @@ test_get_attribute_info(void)
if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, &attr_info, H5P_DEFAULT) <
0) {
H5_FAILED();
HDprintf(" couldn't get info for attribute at index %lld using H5Aget_info_by_idx by "
HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
"alphabetical order in decreasing order\n",
1);
PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
@ -5353,7 +5377,7 @@ test_get_attribute_info(void)
if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, &attr_info, H5P_DEFAULT) <
0) {
H5_FAILED();
HDprintf(" couldn't get info for attribute at index %lld using H5Aget_info_by_idx by "
HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
"alphabetical order in decreasing order\n",
0);
PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
@ -5375,10 +5399,6 @@ test_get_attribute_info(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aget_info_by_idx_name_order_decreasing);
#endif
}
PART_END(H5Aget_info_by_idx_name_order_decreasing);
}
@ -6493,10 +6513,9 @@ test_attribute_iterate_group(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE)) {
SKIPPED();
HDprintf(" API functions for basic file, group, attribute, iterate, or creation order aren't "
HDprintf(" API functions for basic file, group, attribute, or iterate aren't "
"supported with this connector\n");
return 0;
}
@ -6521,10 +6540,12 @@ test_attribute_iterate_group(void)
goto error;
}
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
}
}
if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_ITERATE_TEST_GRP_SUBGROUP_NAME, H5P_DEFAULT,
@ -6615,7 +6636,6 @@ test_attribute_iterate_group(void)
PART_BEGIN(H5Aiterate2_name_decreasing)
{
TESTING_2("H5Aiterate by attribute name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -6635,10 +6655,6 @@ test_attribute_iterate_group(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aiterate2_name_decreasing);
#endif
}
PART_END(H5Aiterate2_name_decreasing);
@ -6646,6 +6662,12 @@ test_attribute_iterate_group(void)
{
TESTING_2("H5Aiterate by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate2_creation_increasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -6672,6 +6694,12 @@ test_attribute_iterate_group(void)
{
TESTING_2("H5Aiterate by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate2_creation_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -6724,7 +6752,7 @@ test_attribute_iterate_group(void)
PART_BEGIN(H5Aiterate_by_name_name_decreasing)
{
TESTING_2("H5Aiterate_by_name by attribute name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -6745,10 +6773,6 @@ test_attribute_iterate_group(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aiterate_by_name_name_decreasing);
#endif
}
PART_END(H5Aiterate_by_name_name_decreasing);
@ -6756,6 +6780,12 @@ test_attribute_iterate_group(void)
{
TESTING_2("H5Aiterate_by_name by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate_by_name_creation_increasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -6784,6 +6814,12 @@ test_attribute_iterate_group(void)
{
TESTING_2("H5Aiterate_by_name by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate_by_name_creation_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -6873,10 +6909,9 @@ test_attribute_iterate_dataset(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
SKIPPED();
HDprintf(" API functions for basic file, group, dataset, attribute, iterate, or creation order "
HDprintf(" API functions for basic file, group, dataset, attribute, or iterate "
"aren't supported with this connector\n");
return 0;
}
@ -6908,10 +6943,12 @@ test_attribute_iterate_dataset(void)
goto error;
}
if (H5Pset_attr_creation_order(dcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_attr_creation_order(dcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
}
}
if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
@ -7008,7 +7045,6 @@ test_attribute_iterate_dataset(void)
PART_BEGIN(H5Aiterate2_name_decreasing)
{
TESTING_2("H5Aiterate by attribute name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7028,10 +7064,6 @@ test_attribute_iterate_dataset(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aiterate2_name_decreasing);
#endif
}
PART_END(H5Aiterate2_name_decreasing);
@ -7039,6 +7071,12 @@ test_attribute_iterate_dataset(void)
{
TESTING_2("H5Aiterate by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate2_creation_increasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7065,6 +7103,12 @@ test_attribute_iterate_dataset(void)
{
TESTING_2("H5Aiterate by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate2_creation_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7119,7 +7163,7 @@ test_attribute_iterate_dataset(void)
PART_BEGIN(H5Aiterate_by_name_name_decreasing)
{
TESTING_2("H5Aiterate_by_name by attribute name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7142,10 +7186,6 @@ test_attribute_iterate_dataset(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aiterate_by_name_name_decreasing);
#endif
}
PART_END(H5Aiterate_by_name_name_decreasing);
@ -7153,6 +7193,12 @@ test_attribute_iterate_dataset(void)
{
TESTING_2("H5Aiterate_by_name by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate_by_name_creation_increasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7182,6 +7228,12 @@ test_attribute_iterate_dataset(void)
{
TESTING_2("H5Aiterate_by_name by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate_by_name_creation_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7279,11 +7331,10 @@ test_attribute_iterate_datatype(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
SKIPPED();
HDprintf(" API functions for basic file, group, stored datatype, attribute, iterate, or creation "
"order aren't supported with this connector\n");
HDprintf(" API functions for basic file, group, stored datatype, attribute, or iterate "
"aren't supported with this connector\n");
return 0;
}
@ -7314,10 +7365,12 @@ test_attribute_iterate_datatype(void)
goto error;
}
if (H5Pset_attr_creation_order(tcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_attr_creation_order(tcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
}
}
if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
@ -7411,7 +7464,6 @@ test_attribute_iterate_datatype(void)
PART_BEGIN(H5Aiterate2_name_decreasing)
{
TESTING_2("H5Aiterate by attribute name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7431,10 +7483,6 @@ test_attribute_iterate_datatype(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aiterate2_name_decreasing);
#endif
}
PART_END(H5Aiterate2_name_decreasing);
@ -7442,6 +7490,12 @@ test_attribute_iterate_datatype(void)
{
TESTING_2("H5Aiterate by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate2_creation_increasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7468,6 +7522,12 @@ test_attribute_iterate_datatype(void)
{
TESTING_2("H5Aiterate by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate2_creation_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7522,7 +7582,7 @@ test_attribute_iterate_datatype(void)
PART_BEGIN(H5Aiterate_by_name_name_decreasing)
{
TESTING_2("H5Aiterate_by_name by attribute name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7545,10 +7605,6 @@ test_attribute_iterate_datatype(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aiterate_by_name_name_decreasing);
#endif
}
PART_END(H5Aiterate_by_name_name_decreasing);
@ -7556,6 +7612,12 @@ test_attribute_iterate_datatype(void)
{
TESTING_2("H5Aiterate_by_name by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate_by_name_creation_increasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -7586,6 +7648,12 @@ test_attribute_iterate_datatype(void)
{
TESTING_2("H5Aiterate_by_name by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Aiterate_by_name_creation_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
@ -8112,8 +8180,8 @@ test_attribute_iterate_0_attributes(void)
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
SKIPPED();
HDprintf(" API functions for basic file, group, dataset, attribute, or iterate aren't supported "
"with this connector\n");
HDprintf(" API functions for basic file, group, dataset, attribute, or iterate "
"aren't supported with this connector\n");
return 0;
}
@ -8187,7 +8255,7 @@ test_attribute_iterate_0_attributes(void)
PART_BEGIN(H5Aiterate_0_attributes_dec)
{
TESTING_2("H5Aiterate (decreasing order)");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
if (H5Aiterate2(dset_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, attr_iter_callback2, NULL) < 0) {
H5_FAILED();
HDprintf(" H5Aiterate2 on object with 0 attributes failed\n");
@ -8195,10 +8263,6 @@ test_attribute_iterate_0_attributes(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aiterate_0_attributes_dec);
#endif
}
PART_END(H5Aiterate_0_attributes_dec);
@ -8235,7 +8299,6 @@ test_attribute_iterate_0_attributes(void)
PART_BEGIN(H5Aiterate_by_name_0_attributes_dec)
{
TESTING_2("H5Aiterate_by_name (decreasing order)");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
if (H5Aiterate_by_name(group_id, ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_DSET_NAME, H5_INDEX_NAME,
H5_ITER_DEC, NULL, attr_iter_callback2, NULL, H5P_DEFAULT) < 0) {
H5_FAILED();
@ -8244,10 +8307,6 @@ test_attribute_iterate_0_attributes(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Aiterate_by_name_0_attributes_dec);
#endif
}
PART_END(H5Aiterate_by_name_0_attributes_dec);
}
@ -8307,9 +8366,9 @@ test_delete_attribute(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
SKIPPED();
HDprintf(" API functions for basic file, group, attribute, or creation order aren't supported "
HDprintf(" API functions for basic file, group, or attribute aren't supported "
"with this connector\n");
return 0;
}
@ -8334,10 +8393,12 @@ test_delete_attribute(void)
goto error;
}
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
H5_FAILED();
HDprintf(" couldn't set attribute creation order tracking\n");
goto error;
}
}
if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
@ -8492,6 +8553,12 @@ test_delete_attribute(void)
{
TESTING_2("H5Adelete_by_idx by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Adelete_by_idx_crt_order_increasing);
}
/* Create several attributes */
if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME, attr_dtype, space_id,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
@ -8737,6 +8804,12 @@ test_delete_attribute(void)
{
TESTING_2("H5Adelete_by_idx by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Adelete_by_idx_crt_order_decreasing);
}
/* Create several attributes */
if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME, attr_dtype, space_id,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
@ -9226,7 +9299,7 @@ test_delete_attribute(void)
PART_BEGIN(H5Adelete_by_idx_name_order_decreasing)
{
TESTING_2("H5Adelete_by_idx by alphabetical order in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Create several attributes */
if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME, attr_dtype, space_id,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
@ -9458,10 +9531,6 @@ test_delete_attribute(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Adelete_by_idx_name_order_decreasing);
#endif
}
PART_END(H5Adelete_by_idx_name_order_decreasing);
@ -10758,7 +10827,6 @@ error:
static int
test_attr_shared_dtype(void)
{
#ifndef NO_SHARED_DATATYPES
H5O_info2_t obj_info;
htri_t attr_exists;
hid_t file_id = H5I_INVALID_HID;
@ -10768,11 +10836,9 @@ test_attr_shared_dtype(void)
hid_t attr_dtype = H5I_INVALID_HID;
hid_t space_id = H5I_INVALID_HID;
hid_t dset_id = H5I_INVALID_HID;
#endif
TESTING("shared datatype for attributes");
#ifndef NO_SHARED_DATATYPES
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
@ -10910,10 +10976,6 @@ error:
H5E_END_TRY;
return 1;
#else
SKIPPED();
return 0;
#endif
}
static herr_t

View File

@ -50,20 +50,10 @@ static int test_read_dataset_small_all(void);
static int test_read_dataset_small_hyperslab(void);
static int test_read_dataset_small_point_selection(void);
static int test_dataset_io_point_selections(void);
#ifndef NO_LARGE_TESTS
static int test_read_dataset_large_all(void);
static int test_read_dataset_large_hyperslab(void);
static int test_read_dataset_large_point_selection(void);
#endif
static int test_read_dataset_invalid_params(void);
static int test_write_dataset_small_all(void);
static int test_write_dataset_small_hyperslab(void);
static int test_write_dataset_small_point_selection(void);
#ifndef NO_LARGE_TESTS
static int test_write_dataset_large_all(void);
static int test_write_dataset_large_hyperslab(void);
static int test_write_dataset_large_point_selection(void);
#endif
static int test_write_dataset_data_verification(void);
static int test_write_dataset_invalid_params(void);
static int test_dataset_builtin_type_conversion(void);
@ -132,20 +122,10 @@ static int (*dataset_tests[])(void) = {
test_read_dataset_small_hyperslab,
test_read_dataset_small_point_selection,
test_dataset_io_point_selections,
#ifndef NO_LARGE_TESTS
test_read_dataset_large_all,
test_read_dataset_large_hyperslab,
test_read_dataset_large_point_selection,
#endif
test_read_dataset_invalid_params,
test_write_dataset_small_all,
test_write_dataset_small_hyperslab,
test_write_dataset_small_point_selection,
#ifndef NO_LARGE_TESTS
test_write_dataset_large_all,
test_write_dataset_large_hyperslab,
test_write_dataset_large_point_selection,
#endif
test_write_dataset_data_verification,
test_write_dataset_invalid_params,
test_dataset_builtin_type_conversion,
@ -1780,7 +1760,7 @@ test_create_dataset_enum_types(void)
for (i = 0; i < DATASET_ENUM_TYPE_TEST_NUM_MEMBERS; i++) {
char val_name[15];
HDsprintf(val_name, "%s%zu", DATASET_ENUM_TYPE_TEST_VAL_BASE_NAME, i);
HDsnprintf(val_name, 15, "%s%zu", DATASET_ENUM_TYPE_TEST_VAL_BASE_NAME, i);
if (H5Tenum_insert(enum_non_native, val_name, &i) < 0)
TEST_ERROR;
@ -2080,13 +2060,11 @@ test_create_dataset_creation_properties(void)
TESTING_MULTIPART("dataset creation properties");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILTERS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) || !(vol_cap_flags_g & H5VL_CAP_FLAG_TRACK_TIMES) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_FILTERS)) {
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
SKIPPED();
HDprintf(" API functions for basic file, group, dataset, creation order, track time, or filter "
"pipeline aren't supported with this connector\n");
HDprintf(" API functions for basic file, group, or dataset "
"aren't supported with this connector\n");
return 0;
}
@ -2209,6 +2187,12 @@ test_create_dataset_creation_properties(void)
TESTING_2("attribute creation order property for DCPL");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking is not supported by this VOL connector\n");
PART_EMPTY(DCPL_attr_crt_order_test);
}
if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
H5_FAILED();
HDprintf(" couldn't create DCPL\n");
@ -2411,6 +2395,12 @@ test_create_dataset_creation_properties(void)
{
TESTING_2("dataset filters");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILTERS)) {
SKIPPED();
HDprintf(" dataset filters are not supported by this VOL connector\n");
PART_EMPTY(DCPL_filters_test);
}
if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
H5_FAILED();
HDprintf(" couldn't create DCPL\n");
@ -2591,6 +2581,12 @@ test_create_dataset_creation_properties(void)
{
TESTING_2("object time tracking property for DCPL");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_TRACK_TIMES)) {
SKIPPED();
HDprintf(" object time tracking is not supported by this VOL connector\n");
PART_EMPTY(DCPL_track_obj_times_test);
}
if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
H5_FAILED();
HDprintf(" couldn't create DCPL\n");
@ -4933,372 +4929,6 @@ error:
return 1;
} /* end test_dataset_io_point_selections() */
#ifndef NO_LARGE_TESTS
/*
* A test to check that a large amount of data can be
* read back from a dataset using an H5S_ALL selection.
*/
static int
test_read_dataset_large_all(void)
{
hsize_t dims[DATASET_LARGE_READ_TEST_ALL_DSET_SPACE_RANK] = {600, 600, 600};
size_t i, data_size;
hid_t file_id = H5I_INVALID_HID;
hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
hid_t dset_id = H5I_INVALID_HID;
hid_t fspace_id = H5I_INVALID_HID;
void *read_buf = NULL;
TESTING("large read from dataset with H5S_ALL");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
SKIPPED();
HDprintf(
" API functions for basic file, group, or dataset aren't supported with this connector\n");
return 0;
}
if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
goto error;
}
if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
goto error;
}
if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_READ_TEST_ALL_GROUP_NAME, H5P_DEFAULT,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create container sub-group '%s'\n", DATASET_LARGE_READ_TEST_ALL_GROUP_NAME);
goto error;
}
if ((fspace_id = H5Screate_simple(DATASET_LARGE_READ_TEST_ALL_DSET_SPACE_RANK, dims, NULL)) < 0)
TEST_ERROR;
if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_READ_TEST_ALL_DSET_NAME,
DATASET_LARGE_READ_TEST_ALL_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_READ_TEST_ALL_DSET_NAME);
goto error;
}
for (i = 0, data_size = 1; i < DATASET_LARGE_READ_TEST_ALL_DSET_SPACE_RANK; i++)
data_size *= dims[i];
data_size *= DATASET_LARGE_READ_TEST_ALL_DSET_DTYPESIZE;
if (NULL == (read_buf = HDmalloc(data_size)))
TEST_ERROR;
if (H5Dread(dset_id, DATASET_LARGE_READ_TEST_ALL_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_buf) <
0) {
H5_FAILED();
HDprintf(" couldn't read from dataset '%s'\n", DATASET_LARGE_READ_TEST_ALL_DSET_NAME);
goto error;
}
if (read_buf) {
HDfree(read_buf);
read_buf = NULL;
}
if (H5Sclose(fspace_id) < 0)
TEST_ERROR;
if (H5Dclose(dset_id) < 0)
TEST_ERROR;
if (H5Gclose(group_id) < 0)
TEST_ERROR;
if (H5Gclose(container_group) < 0)
TEST_ERROR;
if (H5Fclose(file_id) < 0)
TEST_ERROR;
PASSED();
return 0;
error:
H5E_BEGIN_TRY
{
if (read_buf)
HDfree(read_buf);
H5Sclose(fspace_id);
H5Dclose(dset_id);
H5Gclose(group_id);
H5Gclose(container_group);
H5Fclose(file_id);
}
H5E_END_TRY;
return 1;
}
/*
* A test to check that a large amount of data can be
* read back from a dataset using a hyperslab selection.
*/
static int
test_read_dataset_large_hyperslab(void)
{
hsize_t start[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
hsize_t stride[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
hsize_t count[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
hsize_t block[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
hsize_t dims[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK] = {600, 600, 600};
size_t i, data_size;
hid_t file_id = H5I_INVALID_HID;
hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
hid_t dset_id = H5I_INVALID_HID;
hid_t mspace_id = H5I_INVALID_HID, fspace_id = H5I_INVALID_HID;
void *read_buf = NULL;
TESTING("large read from dataset with a hyperslab selection");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
SKIPPED();
HDprintf(
" API functions for basic file, group, or dataset aren't supported with this connector\n");
return 0;
}
if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
goto error;
}
if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
goto error;
}
if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_READ_TEST_HYPERSLAB_GROUP_NAME, H5P_DEFAULT,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create container sub-group '%s'\n",
DATASET_LARGE_READ_TEST_HYPERSLAB_GROUP_NAME);
goto error;
}
if ((fspace_id = H5Screate_simple(DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
TEST_ERROR;
if ((mspace_id = H5Screate_simple(DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
TEST_ERROR;
if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_NAME,
DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPE, fspace_id, H5P_DEFAULT,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_NAME);
goto error;
}
for (i = 0; i < DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK; i++) {
start[i] = 0;
stride[i] = 1;
count[i] = dims[i];
block[i] = 1;
}
if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0)
TEST_ERROR;
for (i = 0, data_size = 1; i < DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK; i++)
data_size *= dims[i];
data_size *= DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPESIZE;
if (NULL == (read_buf = HDmalloc(data_size)))
TEST_ERROR;
if (H5Dread(dset_id, DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
read_buf) < 0) {
H5_FAILED();
HDprintf(" couldn't read from dataset '%s'\n", DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_NAME);
goto error;
}
if (read_buf) {
HDfree(read_buf);
read_buf = NULL;
}
if (H5Sclose(mspace_id) < 0)
TEST_ERROR;
if (H5Sclose(fspace_id) < 0)
TEST_ERROR;
if (H5Dclose(dset_id) < 0)
TEST_ERROR;
if (H5Gclose(group_id) < 0)
TEST_ERROR;
if (H5Gclose(container_group) < 0)
TEST_ERROR;
if (H5Fclose(file_id) < 0)
TEST_ERROR;
PASSED();
return 0;
error:
H5E_BEGIN_TRY
{
H5Sclose(mspace_id);
H5Sclose(fspace_id);
H5Dclose(dset_id);
H5Gclose(group_id);
H5Gclose(container_group);
H5Fclose(file_id);
}
H5E_END_TRY;
return 1;
}
/*
* A test to check that a large amount of data can be
* read back from a dataset using a large point selection.
*
* XXX: Test takes up significant amounts of memory.
*/
static int
test_read_dataset_large_point_selection(void)
{
hsize_t *points = NULL;
hsize_t dims[DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK] = {225000000};
size_t i, data_size;
hid_t file_id = H5I_INVALID_HID;
hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
hid_t dset_id = H5I_INVALID_HID;
hid_t fspace_id = H5I_INVALID_HID;
void *data = NULL;
TESTING("large read from dataset with a point selection");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
SKIPPED();
HDprintf(
" API functions for basic file, group, or dataset aren't supported with this connector\n");
return 0;
}
if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
goto error;
}
if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
goto error;
}
if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_READ_TEST_POINT_SELECTION_GROUP_NAME,
H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create container sub-group '%s'\n",
DATASET_LARGE_READ_TEST_POINT_SELECTION_GROUP_NAME);
goto error;
}
if ((fspace_id = H5Screate_simple(DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK, dims, NULL)) <
0)
TEST_ERROR;
if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_NAME,
DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPE, fspace_id, H5P_DEFAULT,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_NAME);
goto error;
}
for (i = 0, data_size = 1; i < DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK; i++)
data_size *= dims[i];
data_size *= DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE;
if (NULL == (data = HDmalloc(data_size)))
TEST_ERROR;
if (NULL ==
(points = HDmalloc((data_size / DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE) *
((DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK) * (sizeof(hsize_t))))))
TEST_ERROR;
/* Select the entire dataspace */
for (i = 0; i < data_size / DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE; i++) {
points[i] = i;
}
if (H5Sselect_elements(fspace_id, H5S_SELECT_SET,
data_size / DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE, points) < 0) {
H5_FAILED();
HDprintf(" couldn't select points\n");
goto error;
}
if (H5Dread(dset_id, DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPE, H5S_ALL, fspace_id, H5P_DEFAULT,
data) < 0) {
H5_FAILED();
HDprintf(" couldn't read from dataset '%s'\n", DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_NAME);
goto error;
}
if (data) {
HDfree(data);
data = NULL;
}
if (points) {
HDfree(points);
points = NULL;
}
if (H5Sclose(fspace_id) < 0)
TEST_ERROR;
if (H5Dclose(dset_id) < 0)
TEST_ERROR;
if (H5Gclose(group_id) < 0)
TEST_ERROR;
if (H5Gclose(container_group) < 0)
TEST_ERROR;
if (H5Fclose(file_id) < 0)
TEST_ERROR;
PASSED();
return 0;
error:
H5E_BEGIN_TRY
{
if (data)
HDfree(data);
if (points)
HDfree(points);
H5Sclose(fspace_id);
H5Dclose(dset_id);
H5Gclose(group_id);
H5Gclose(container_group);
H5Fclose(file_id);
}
H5E_END_TRY;
return 1;
}
#endif
/*
* A test to check that data can't be read from a
* dataset when H5Dread is passed invalid parameters.
@ -5929,281 +5559,6 @@ error:
return 1;
}
#ifndef NO_LARGE_TESTS
/*
* A test to check that a large write can be made
* to a dataset using an H5S_ALL selection.
*/
static int
test_write_dataset_large_all(void)
{
hssize_t space_npoints;
hsize_t dims[DATASET_LARGE_WRITE_TEST_ALL_DSET_SPACE_RANK] = {600, 600, 600};
size_t i, data_size;
hid_t file_id = H5I_INVALID_HID;
hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
hid_t dset_id = H5I_INVALID_HID;
hid_t fspace_id = H5I_INVALID_HID;
void *data = NULL;
TESTING("large write to dataset with H5S_ALL");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
SKIPPED();
HDprintf(" API functions for basic file, group, basic or more dataset aren't supported with this "
"connector\n");
return 0;
}
if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
goto error;
}
if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
goto error;
}
if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_WRITE_TEST_ALL_GROUP_NAME, H5P_DEFAULT,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create container sub-group '%s'\n", DATASET_LARGE_WRITE_TEST_ALL_GROUP_NAME);
goto error;
}
if ((fspace_id = H5Screate_simple(DATASET_LARGE_WRITE_TEST_ALL_DSET_SPACE_RANK, dims, NULL)) < 0)
TEST_ERROR;
if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME,
DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME);
goto error;
}
/* Close the dataset and dataspace to ensure that retrieval of file space ID is working */
if (H5Dclose(dset_id) < 0)
TEST_ERROR;
if (H5Sclose(fspace_id) < 0)
TEST_ERROR;
if ((dset_id = H5Dopen2(group_id, DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open dataset '%s'\n", DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME);
goto error;
}
if ((fspace_id = H5Dget_space(dset_id)) < 0) {
H5_FAILED();
HDprintf(" couldn't get dataset dataspace\n");
goto error;
}
if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
H5_FAILED();
HDprintf(" couldn't get dataspace num points\n");
goto error;
}
if (NULL == (data = HDmalloc((hsize_t)space_npoints * DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPESIZE)))
TEST_ERROR;
for (i = 0; i < (hsize_t)space_npoints; i++)
((int *)data)[i] = (int)i;
if (H5Dwrite(dset_id, DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT, data) < 0) {
H5_FAILED();
HDprintf(" couldn't write to dataset '%s'\n", DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME);
goto error;
}
if (data) {
HDfree(data);
data = NULL;
}
if (H5Sclose(fspace_id) < 0)
TEST_ERROR;
if (H5Dclose(dset_id) < 0)
TEST_ERROR;
if (H5Gclose(group_id) < 0)
TEST_ERROR;
if (H5Gclose(container_group) < 0)
TEST_ERROR;
if (H5Fclose(file_id) < 0)
TEST_ERROR;
PASSED();
return 0;
error:
H5E_BEGIN_TRY
{
H5Sclose(fspace_id);
H5Dclose(dset_id);
H5Gclose(group_id);
H5Gclose(container_group);
H5Fclose(file_id);
}
H5E_END_TRY;
return 1;
}
/*
* A test to check that a large write can be made
* to a dataset using a hyperslab selection.
*/
static int
test_write_dataset_large_hyperslab(void)
{
hsize_t start[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
hsize_t stride[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
hsize_t count[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
hsize_t block[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
hsize_t dims[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK] = {600, 600, 600};
size_t i, data_size;
hid_t file_id = H5I_INVALID_HID;
hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
hid_t dset_id = H5I_INVALID_HID;
hid_t mspace_id = H5I_INVALID_HID, fspace_id = H5I_INVALID_HID;
void *data = NULL;
TESTING("large write to dataset with a hyperslab selection");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
SKIPPED();
HDprintf(
" API functions for basic file, group, or dataset aren't supported with this connector\n");
return 0;
}
if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
goto error;
}
if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
goto error;
}
if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_WRITE_TEST_HYPERSLAB_GROUP_NAME, H5P_DEFAULT,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create container sub-group '%s'\n",
DATASET_LARGE_WRITE_TEST_HYPERSLAB_GROUP_NAME);
goto error;
}
if ((fspace_id = H5Screate_simple(DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
TEST_ERROR;
if ((mspace_id = H5Screate_simple(DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
TEST_ERROR;
if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_NAME,
DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPE, fspace_id, H5P_DEFAULT,
H5P_DEFAULT, H5P_DEFAULT)) < 0) {
H5_FAILED();
HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_NAME);
goto error;
}
for (i = 0, data_size = 1; i < DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK; i++)
data_size *= dims[i];
data_size *= DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPESIZE;
if (NULL == (data = HDmalloc(data_size)))
TEST_ERROR;
for (i = 0; i < data_size / DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPESIZE; i++)
((int *)data)[i] = (int)i;
for (i = 0; i < DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK; i++) {
start[i] = 0;
stride[i] = 1;
count[i] = dims[i];
block[i] = 1;
}
if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0)
TEST_ERROR;
if (H5Dwrite(dset_id, DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
data) < 0) {
H5_FAILED();
HDprintf(" couldn't write to dataset '%s'\n", DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_NAME);
goto error;
}
if (data) {
HDfree(data);
data = NULL;
}
if (H5Sclose(mspace_id) < 0)
TEST_ERROR;
if (H5Sclose(fspace_id) < 0)
TEST_ERROR;
if (H5Dclose(dset_id) < 0)
TEST_ERROR;
if (H5Gclose(group_id) < 0)
TEST_ERROR;
if (H5Gclose(container_group) < 0)
TEST_ERROR;
if (H5Fclose(file_id) < 0)
TEST_ERROR;
PASSED();
return 0;
error:
H5E_BEGIN_TRY
{
if (data)
HDfree(data);
H5Sclose(mspace_id);
H5Sclose(fspace_id);
H5Dclose(dset_id);
H5Gclose(group_id);
H5Gclose(container_group);
H5Fclose(file_id);
}
H5E_END_TRY;
return 1;
}
/*
* A test to check that a large write can be made
* to a dataset using a point selection.
*/
static int
test_write_dataset_large_point_selection(void)
{
TESTING("large write to dataset with a point selection");
SKIPPED();
return 0;
error:
return 1;
}
#endif
/*
* A test to ensure that data is read back correctly from
* a dataset after it has been written.
@ -8298,10 +7653,8 @@ test_dataset_set_extent_data(void)
hid_t dcpl_id = H5I_INVALID_HID;
hid_t fspace_id = H5I_INVALID_HID, dset_space_id = H5I_INVALID_HID;
int buf_origin[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM][DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM];
#ifndef NO_CLEAR_ON_SHRINK
int buf_expand2[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM][DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM];
#endif
int buf_expand[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM * 2 - 1]
int buf_expand2[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM][DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM];
int buf_expand[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM * 2 - 1]
[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM * 2 - 1];
int buf_shrink[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM / 2 + 1]
[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM / 2 + 1];
@ -8468,7 +7821,7 @@ test_dataset_set_extent_data(void)
PART_BEGIN(H5Dset_extent_data_expand_to_origin)
{
TESTING_2("H5Dset_extent for data back to the original size");
#ifndef NO_CLEAR_ON_SHRINK
/* Expand the dataset back to the original size. The data should look like this:
* X X X X X 0 0 0
* X X X X X 0 0 0
@ -8508,10 +7861,6 @@ test_dataset_set_extent_data(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Dset_extent_data_expand_to_origin);
#endif
}
PART_END(H5Dset_extent_data_expand_to_origin);
@ -8552,7 +7901,7 @@ test_dataset_set_extent_data(void)
PART_BEGIN(H5Dset_extent_data_expand_to_origin_again)
{
TESTING_2("H5Dset_extent for data expansion back to the original again");
#ifndef NO_CLEAR_ON_SHRINK
/* Expand the dataset back to the original size. The data should look like this:
* 0 0 0 0 0 0 0 0
* 0 0 0 0 0 0 0 0
@ -8581,10 +7930,6 @@ test_dataset_set_extent_data(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Dset_extent_data_expand_to_origin_again);
#endif
}
PART_END(H5Dset_extent_data_expand_to_origin_again);
}
@ -8633,7 +7978,6 @@ error:
static int
test_dataset_set_extent_double_handles(void)
{
#ifndef NO_DOUBLE_OBJECT_OPENS
hsize_t dims_origin[DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK] = {
DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_DIM, DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_DIM};
hsize_t dims_expand[DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK] = {
@ -8650,12 +7994,9 @@ test_dataset_set_extent_double_handles(void)
hid_t dcpl_id = H5I_INVALID_HID;
hid_t fspace_id = H5I_INVALID_HID, dset_space_id = H5I_INVALID_HID;
int i;
#endif
TESTING("H5Dset_extent on double dataset handles");
#ifndef NO_DOUBLE_OBJECT_OPENS
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
@ -8730,7 +8071,8 @@ test_dataset_set_extent_double_handles(void)
for (i = 0; i < DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK; i++)
if (dims_out[i] != dims_expand[i]) {
H5_FAILED();
HDprintf(" dims_out[%d] = %d. It should be %d.\n", i, dims_out[i], dims_expand[i]);
HDprintf(" dims_out[%d] = %" PRIuHSIZE ". It should be %" PRIuHSIZE ".\n", i, dims_out[i],
dims_expand[i]);
goto error;
}
@ -8768,10 +8110,6 @@ error:
H5E_END_TRY;
return 1;
#else
SKIPPED();
return 0;
#endif
} /* test_dataset_set_extent_double_handles */
/*
@ -11551,7 +10889,7 @@ test_get_vlen_buf_size(void)
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
SKIPPED();
HDprintf(" API functions for basic file, group, dataset, or more aren't supported with this "
HDprintf(" API functions for basic file, group, or dataset aren't supported with this "
"connector\n");
return 0;
}

View File

@ -149,26 +149,6 @@ int H5_api_dataset_test(void);
#define DATASET_IO_POINT_DSET_NAME_NOCHUNK "dataset_io_point_selection_dset_nochunk"
#define DATASET_IO_POINT_DSET_NAME_CHUNK "dataset_io_point_selection_dset_chunk"
#ifndef NO_LARGE_TESTS
#define DATASET_LARGE_READ_TEST_ALL_DSET_SPACE_RANK 3
#define DATASET_LARGE_READ_TEST_ALL_DSET_DTYPESIZE sizeof(int)
#define DATASET_LARGE_READ_TEST_ALL_DSET_DTYPE H5T_NATIVE_INT
#define DATASET_LARGE_READ_TEST_ALL_GROUP_NAME "dataset_large_read_all_test"
#define DATASET_LARGE_READ_TEST_ALL_DSET_NAME "dataset_large_read_all_dset"
#define DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK 3
#define DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPESIZE sizeof(int)
#define DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPE H5T_NATIVE_INT
#define DATASET_LARGE_READ_TEST_HYPERSLAB_GROUP_NAME "dataset_large_read_hyperslab_test"
#define DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_NAME "dataset_large_read_hyperslab_dset"
#define DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK 1
#define DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE sizeof(int)
#define DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPE H5T_NATIVE_INT
#define DATASET_LARGE_READ_TEST_POINT_SELECTION_GROUP_NAME "dataset_large_read_point_selection_test"
#define DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_NAME "dataset_large_read_point_selection_dset"
#endif
#define DATASET_READ_INVALID_PARAMS_TEST_DSET_SPACE_RANK 3
#define DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPESIZE sizeof(int)
#define DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPE H5T_NATIVE_INT
@ -194,26 +174,6 @@ int H5_api_dataset_test(void);
#define DATASET_SMALL_WRITE_TEST_POINT_SELECTION_GROUP_NAME "dataset_small_write_point_selection_test"
#define DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_NAME "dataset_small_write_point_selection_dset"
#ifndef NO_LARGE_TESTS
#define DATASET_LARGE_WRITE_TEST_ALL_DSET_SPACE_RANK 3
#define DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPESIZE sizeof(int)
#define DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPE H5T_NATIVE_INT
#define DATASET_LARGE_WRITE_TEST_ALL_GROUP_NAME "dataset_large_write_all_test"
#define DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME "dataset_large_write_all_dset"
#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK 3
#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPESIZE sizeof(int)
#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPE H5T_NATIVE_INT
#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_GROUP_NAME "dataset_large_write_hyperslab_test"
#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_NAME "dataset_large_write_hyperslab_dset"
#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_DSET_SPACE_RANK 3
#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_DSET_DTYPESIZE sizeof(int)
#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_DSET_DTYPE H5T_NATIVE_INT
#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_GROUP_NAME "dataset_large_write_point_selection_test"
#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_DSET_NAME "dataset_large_write_point_selection_dset"
#endif
#define DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK 3
#define DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE sizeof(int)
#define DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE H5T_NATIVE_INT

View File

@ -2300,16 +2300,13 @@ error:
static int
test_resurrect_datatype(void)
{
#ifndef NO_ID_PREVENTS_OBJ_DELETE
hid_t file_id = H5I_INVALID_HID;
hid_t container_group = H5I_INVALID_HID;
hid_t group_id = H5I_INVALID_HID;
hid_t type_id = H5I_INVALID_HID;
#endif /* NO_ID_PREVENTS_OBJ_DELETE */
TESTING("resurrecting datatype after deletion");
#ifndef NO_ID_PREVENTS_OBJ_DELETE
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
@ -2359,14 +2356,13 @@ test_resurrect_datatype(void)
HDprintf(" failed to delete datatype\n");
goto error;
}
#ifndef NO_OBJECT_GET_NAME
/* Check that datatype name is NULL */
if (H5Iget_name(type_id, NULL, (size_t)0) != 0) {
H5_FAILED();
HDprintf(" deleted datatype name was not NULL!\n");
goto error;
}
#endif
/* Re-link the datatype to the group hierarchy (shouldn't get deleted now) */
if (H5Lcreate_hard(type_id, ".", group_id, DATATYPE_RESURRECT_TEST_DTYPE_NAME2, H5P_DEFAULT,
@ -2422,13 +2418,9 @@ test_resurrect_datatype(void)
TEST_ERROR;
PASSED();
#else /* NO_ID_PREVENTS_OBJ_DELETE */
SKIPPED();
#endif /* NO_ID_PREVENTS_OBJ_DELETE */
return 0;
#ifndef NO_ID_PREVENTS_OBJ_DELETE
error:
H5E_BEGIN_TRY
{
@ -2440,7 +2432,6 @@ error:
H5E_END_TRY;
return 1;
#endif /* NO_ID_PREVENTS_OBJ_DELETE */
}
static int

View File

@ -602,7 +602,7 @@ test_file_permission(void)
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
SKIPPED();
HDprintf(" API functions for basic file, group, dataset, attribute, stored datatype aren't "
HDprintf(" API functions for basic file, group, dataset, attribute, or stored datatype aren't "
"supported with this connector\n");
return 0;
}
@ -1597,8 +1597,9 @@ test_get_file_obj_count(void)
!(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
SKIPPED();
HDprintf(" API functions for basic or more file, basic dataset, group, datatype, or attribute "
"aren't supported with this connector\n");
HDprintf(
" API functions for basic or more file, basic dataset, group, stored datatypes, or attribute "
"aren't supported with this connector\n");
return 0;
}
@ -1742,7 +1743,7 @@ test_get_file_obj_count(void)
PART_BEGIN(H5Fget_obj_count_types)
{
TESTING_2("H5Fget_obj_count for datatypes");
#ifndef WRONG_DATATYPE_OBJ_COUNT
/* Get the number of named datatype in two opened files */
if ((obj_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_DATATYPE)) < 0) {
H5_FAILED();
@ -1758,10 +1759,6 @@ test_get_file_obj_count(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Fget_obj_count_types);
#endif
}
PART_END(H5Fget_obj_count_types);
@ -1833,7 +1830,7 @@ test_get_file_obj_count(void)
PART_BEGIN(H5Fget_obj_count_all)
{
TESTING_2("H5Fget_obj_count for all object types");
#ifndef WRONG_DATATYPE_OBJ_COUNT
/* Get the number of all open objects */
if ((obj_count = H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL)) < 0) {
H5_FAILED();
@ -1848,10 +1845,6 @@ test_get_file_obj_count(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Fget_obj_count_all);
#endif
}
PART_END(H5Fget_obj_count_all);
@ -1930,7 +1923,6 @@ error:
static int
test_file_open_overlap(void)
{
#ifndef NO_DOUBLE_OBJECT_OPENS
ssize_t obj_count;
hid_t file_id = H5I_INVALID_HID;
hid_t file_id2 = H5I_INVALID_HID;
@ -1938,7 +1930,6 @@ test_file_open_overlap(void)
hid_t dspace_id = H5I_INVALID_HID;
hid_t dset_id = H5I_INVALID_HID;
char *prefixed_filename = NULL;
#endif
TESTING("overlapping file opens");
@ -1951,7 +1942,6 @@ test_file_open_overlap(void)
return 0;
}
#ifndef NO_DOUBLE_OBJECT_OPENS
if (prefix_filename(test_path_prefix, OVERLAPPING_FILENAME, &prefixed_filename) < 0) {
H5_FAILED();
HDprintf(" couldn't prefix filename\n");
@ -2062,10 +2052,6 @@ error:
HDfree(prefixed_filename);
return 1;
#else
SKIPPED();
return 0;
#endif
}
/*
@ -2075,12 +2061,10 @@ error:
static int
test_file_mounts(void)
{
#ifndef NO_FILE_MOUNTS
hid_t file_id = H5I_INVALID_HID;
hid_t child_fid = H5I_INVALID_HID;
hid_t group_id = H5I_INVALID_HID;
char *prefixed_filename = NULL;
#endif
TESTING("file mounting/unmounting");
@ -2093,7 +2077,6 @@ test_file_mounts(void)
return 0;
}
#ifndef NO_FILE_MOUNTS
if (prefix_filename(test_path_prefix, FILE_MOUNT_TEST_FILENAME, &prefixed_filename) < 0) {
H5_FAILED();
HDprintf(" couldn't prefix filename\n");
@ -2158,10 +2141,6 @@ error:
HDfree(prefixed_filename);
return 1;
#else
SKIPPED();
return 0;
#endif
}
/*
@ -2188,8 +2167,9 @@ test_get_file_name(void)
!(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
SKIPPED();
HDprintf(" API functions for basic or more file, basic dataset, group, datatype, or attribute "
"aren't supported with this connector\n");
HDprintf(
" API functions for basic or more file, basic dataset, group, stored datatypes, or attribute "
"aren't supported with this connector\n");
return 0;
}
@ -2523,9 +2503,7 @@ cleanup_files(void)
/* The below file should not get created */
/* remove_test_file(test_path_prefix, FILE_CREATE_INVALID_PARAMS_FILE_NAME); */
#ifndef NO_DOUBLE_OBJECT_OPENS
remove_test_file(test_path_prefix, OVERLAPPING_FILENAME);
#endif
remove_test_file(test_path_prefix, FILE_PERMISSION_TEST_FILENAME);
remove_test_file(test_path_prefix, FILE_FLUSH_TEST_FILENAME);
remove_test_file(test_path_prefix, FILE_PROPERTY_LIST_TEST_FNAME1);
@ -2533,9 +2511,7 @@ cleanup_files(void)
remove_test_file(test_path_prefix, FILE_INTENT_TEST_FILENAME);
remove_test_file(test_path_prefix, GET_OBJ_COUNT_TEST_FILENAME1);
remove_test_file(test_path_prefix, GET_OBJ_COUNT_TEST_FILENAME2);
#ifndef NO_FILE_MOUNTS
remove_test_file(test_path_prefix, FILE_MOUNT_TEST_FILENAME);
#endif
remove_test_file(test_path_prefix, GET_FILE_NAME_TEST_FNAME);
}

View File

@ -1123,7 +1123,7 @@ test_group_property_lists(void)
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" API functions for basic file, group, property list, creation order aren't supported "
HDprintf(" API functions for basic file, group, property list, or creation order aren't supported "
"with this connector\n");
return 0;
}
@ -1393,10 +1393,9 @@ test_get_group_info(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE)) {
SKIPPED();
HDprintf(" API functions for basic file or group, creation order aren't supported with this "
"connector\n");
HDprintf(" API functions for basic file or group aren't supported with this connector\n");
return 0;
}
@ -1420,10 +1419,12 @@ test_get_group_info(void)
goto error;
}
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
H5_FAILED();
HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
H5_FAILED();
HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
goto error;
}
}
if ((parent_group_id = H5Gcreate2(container_group, GROUP_GET_INFO_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
@ -1562,6 +1563,12 @@ test_get_group_info(void)
{
TESTING_2("H5Gget_info_by_idx by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Gget_info_by_idx_crt_order_increasing);
}
for (i = 0; i < GROUP_GET_INFO_TEST_GROUP_NUMB; i++) {
memset(&group_info, 0, sizeof(group_info));
@ -1613,6 +1620,12 @@ test_get_group_info(void)
{
TESTING_2("H5Gget_info_by_idx by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Gget_info_by_idx_crt_order_decreasing);
}
for (i = 0; i < GROUP_GET_INFO_TEST_GROUP_NUMB; i++) {
memset(&group_info, 0, sizeof(group_info));
@ -1714,7 +1727,7 @@ test_get_group_info(void)
PART_BEGIN(H5Gget_info_by_idx_name_order_decreasing)
{
TESTING_2("H5Gget_info_by_idx by alphabetical order in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
for (i = 0; i < GROUP_GET_INFO_TEST_GROUP_NUMB; i++) {
memset(&group_info, 0, sizeof(group_info));
@ -1728,14 +1741,15 @@ test_get_group_info(void)
if (group_info.nlinks != 0) {
H5_FAILED();
HDprintf(" group's number of links '%lld' doesn't match expected value '%lld'\n",
HDprintf(" group's number of links '%" PRIuHSIZE
"' doesn't match expected value '%d'\n",
group_info.nlinks, 0);
PART_ERROR(H5Gget_info_by_idx_name_order_decreasing);
}
if (group_info.max_corder != 0) {
H5_FAILED();
HDprintf(" group's max creation order '%lld' doesn't match expected value '%lld'\n",
HDprintf(" group's max creation order '%lld' doesn't match expected value '%d'\n",
(long long)group_info.max_corder, 0);
PART_ERROR(H5Gget_info_by_idx_name_order_decreasing);
}
@ -1759,10 +1773,6 @@ test_get_group_info(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Gget_info_by_idx_name_order_decreasing);
#endif
}
PART_END(H5Gget_info_by_idx_name_order_decreasing);
}
@ -1811,10 +1821,9 @@ test_get_group_info_invalid_params(void)
TESTING_MULTIPART("retrieval of group info with invalid parameters");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE)) {
SKIPPED();
HDprintf(" API functions for basic file, more group, creation order aren't supported with this "
HDprintf(" API functions for basic file, or more group aren't supported with this "
"connector\n");
return 0;
}
@ -2172,7 +2181,7 @@ test_flush_group(void)
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
SKIPPED();
HDprintf(" API functions for basic file, more group, creation order aren't supported with this "
HDprintf(" API functions for basic file, group, or flush refresh aren't supported with this "
"connector\n");
return 0;
}
@ -2280,8 +2289,8 @@ test_refresh_group(void)
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
SKIPPED();
HDprintf(
" API functions for basic file, group, or refresh aren't supported with this connector\n");
HDprintf(" API functions for basic file, group, or flush refresh aren't supported with this "
"connector\n");
return 0;
}

File diff suppressed because it is too large Load Diff

View File

@ -432,10 +432,9 @@ test_open_object_invalid_params(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC)) {
SKIPPED();
HDprintf(" API functions for basic file, group, object, or creation order aren't supported with "
HDprintf(" API functions for basic file, group, or object aren't supported with "
"this connector\n");
return 0;
}
@ -460,10 +459,12 @@ test_open_object_invalid_params(void)
goto error;
}
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
H5_FAILED();
HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
H5_FAILED();
HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
goto error;
}
}
if ((group_id = H5Gcreate2(container_group, OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
@ -1550,7 +1551,7 @@ test_link_object_invalid_params(void)
PART_BEGIN(H5Olink_invalid_lapl)
{
TESTING_2("H5Olink with an invalid LAPL");
#ifndef NO_INVALID_PROPERTY_LIST_TESTS
H5E_BEGIN_TRY
{
status =
@ -1565,10 +1566,6 @@ test_link_object_invalid_params(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Olink_invalid_lapl);
#endif
}
PART_END(H5Olink_invalid_lapl);
}
@ -1624,7 +1621,7 @@ test_incr_decr_object_refcount(void)
!(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
SKIPPED();
HDprintf(" API functions for basic file, group, dataset, stored datatype, basic or more object "
HDprintf(" API functions for basic file, group, dataset, stored datatype, or object "
"aren't supported with this connector\n");
return 0;
}
@ -3844,7 +3841,8 @@ test_object_copy_group_with_soft_links(void)
!(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
SKIPPED();
HDprintf(" API functions for basic file, group, object, link, or soft link aren't supported with "
HDprintf(" API functions for basic file, group, object, link, iterate, or soft link aren't "
"supported with "
"this connector\n");
return 0;
}
@ -5075,12 +5073,10 @@ test_object_visit(void)
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
SKIPPED();
HDprintf(" API functions for basic file, group, object, dataset, attribute, stored datatype, "
"iterate, or creation order aren't supported with this connector\n");
HDprintf(" API functions for basic file, group, object, dataset, attribute, stored datatype, or "
"iterate aren't supported with this connector\n");
return 0;
}
@ -5104,10 +5100,12 @@ test_object_visit(void)
goto error;
}
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
H5_FAILED();
HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
H5_FAILED();
HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
goto error;
}
}
if ((group_id = H5Gcreate2(container_group, OBJECT_VISIT_TEST_SUBGROUP_NAME, H5P_DEFAULT, gcpl_id,
@ -5187,7 +5185,7 @@ test_object_visit(void)
PART_BEGIN(H5Ovisit_obj_name_decreasing)
{
TESTING_2("H5Ovisit by object name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
i = OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
@ -5205,10 +5203,6 @@ test_object_visit(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Ovisit_obj_name_decreasing);
#endif
}
PART_END(H5Ovisit_obj_name_decreasing);
@ -5216,6 +5210,12 @@ test_object_visit(void)
{
TESTING_2("H5Ovisit by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Ovisit_create_order_increasing);
}
/* Reset the counter to the appropriate value for the next test */
i = 2 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
@ -5240,6 +5240,12 @@ test_object_visit(void)
{
TESTING_2("H5Ovisit by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Ovisit_create_order_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
i = 3 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
@ -5347,7 +5353,7 @@ test_object_visit(void)
PART_BEGIN(H5Ovisit_by_name_obj_name_decreasing)
{
TESTING_2("H5Ovisit_by_name by object name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
i = OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
@ -5382,10 +5388,6 @@ test_object_visit(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Ovisit_by_name_obj_name_decreasing);
#endif
}
PART_END(H5Ovisit_by_name_obj_name_decreasing);
@ -5393,6 +5395,12 @@ test_object_visit(void)
{
TESTING_2("H5Ovisit_by_name by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Ovisit_by_name_create_order_increasing);
}
/* Reset the counter to the appropriate value for the next test */
i = 2 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
@ -5434,6 +5442,12 @@ test_object_visit(void)
{
TESTING_2("H5Ovisit_by_name by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Ovisit_by_name_create_order_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
i = 3 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
@ -5583,9 +5597,9 @@ test_object_visit_soft_link(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
SKIPPED();
HDprintf(" API functions for basic file, group, object, soft link, iterate, or creation order "
HDprintf(" API functions for basic file, group, object, soft link, or iterate "
"aren't supported with this connector\n");
return 0;
}
@ -5610,10 +5624,12 @@ test_object_visit_soft_link(void)
goto error;
}
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
H5_FAILED();
HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
goto error;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
H5_FAILED();
HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
goto error;
}
}
if ((group_id = H5Gcreate2(container_group, OBJECT_VISIT_SOFT_LINK_TEST_SUBGROUP_NAME, H5P_DEFAULT,
@ -5747,7 +5763,7 @@ test_object_visit_soft_link(void)
PART_BEGIN(H5Ovisit_obj_name_decreasing)
{
TESTING_2("H5Ovisit by object name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
i = OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
@ -5765,10 +5781,6 @@ test_object_visit_soft_link(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Ovisit_obj_name_decreasing);
#endif
}
PART_END(H5Ovisit_obj_name_decreasing);
@ -5776,6 +5788,12 @@ test_object_visit_soft_link(void)
{
TESTING_2("H5Ovisit by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Ovisit_create_order_increasing);
}
/* Reset the counter to the appropriate value for the next test */
i = 2 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
@ -5800,6 +5818,12 @@ test_object_visit_soft_link(void)
{
TESTING_2("H5Ovisit by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Ovisit_create_order_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
i = 3 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
@ -5866,7 +5890,7 @@ test_object_visit_soft_link(void)
PART_BEGIN(H5Ovisit_by_name_obj_name_decreasing)
{
TESTING_2("H5Ovisit_by_name by object name in decreasing order");
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Reset the counter to the appropriate value for the next test */
i = OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
@ -5903,10 +5927,6 @@ test_object_visit_soft_link(void)
}
PASSED();
#else
SKIPPED();
PART_EMPTY(H5Ovisit_by_name_obj_name_decreasing);
#endif
}
PART_END(H5Ovisit_by_name_obj_name_decreasing);
@ -5914,6 +5934,12 @@ test_object_visit_soft_link(void)
{
TESTING_2("H5Ovisit_by_name by creation order in increasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Ovisit_by_name_create_order_increasing);
}
/* Reset the counter to the appropriate value for the next test */
i = 2 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
@ -5957,6 +5983,12 @@ test_object_visit_soft_link(void)
{
TESTING_2("H5Ovisit_by_name by creation order in decreasing order");
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
SKIPPED();
HDprintf(" creation order tracking isn't supported with this VOL connector\n");
PART_EMPTY(H5Ovisit_by_name_create_order_decreasing);
}
/* Reset the counter to the appropriate value for the next test */
i = 3 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
@ -6624,7 +6656,7 @@ test_close_invalid_objects(void)
if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC)) {
SKIPPED();
HDprintf(" API functions for basic file, group, object, dataset, attribute, or stored datatype "
HDprintf(" API functions for basic file, group, or object "
"aren't supported with this connector\n");
return 0;
}

View File

@ -116,10 +116,15 @@ H5_api_test_run(void)
int
main(int argc, char **argv)
{
const char *vol_connector_string;
const char *vol_connector_name;
unsigned seed;
hid_t fapl_id = H5I_INVALID_HID;
hbool_t err_occurred = FALSE;
hid_t fapl_id = H5I_INVALID_HID;
hid_t default_con_id = H5I_INVALID_HID;
hid_t registered_con_id = H5I_INVALID_HID;
char *vol_connector_string_copy = NULL;
char *vol_connector_info = NULL;
hbool_t err_occurred = FALSE;
/* Simple argument checking, TODO can improve that later */
if (argc > 1) {
@ -140,7 +145,7 @@ main(int argc, char **argv)
MPI_Init(&argc, &argv);
#endif
/* h5_reset(); */
H5open();
n_tests_run_g = 0;
n_tests_passed_g = 0;
@ -156,26 +161,97 @@ main(int argc, char **argv)
HDsnprintf(H5_api_test_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s%s", test_path_prefix,
TEST_FILE_NAME);
if (NULL == (vol_connector_name = HDgetenv(HDF5_VOL_CONNECTOR))) {
if (NULL == (vol_connector_string = HDgetenv(HDF5_VOL_CONNECTOR))) {
HDprintf("No VOL connector selected; using native VOL connector\n");
vol_connector_name = "native";
vol_connector_info = NULL;
}
else {
char *token;
if (NULL == (vol_connector_string_copy = HDstrdup(vol_connector_string))) {
HDfprintf(stderr, "Unable to copy VOL connector string\n");
err_occurred = TRUE;
goto done;
}
if (NULL == (token = HDstrtok(vol_connector_string_copy, " "))) {
HDfprintf(stderr, "Error while parsing VOL connector string\n");
err_occurred = TRUE;
goto done;
}
vol_connector_name = token;
if (NULL != (token = HDstrtok(NULL, " "))) {
vol_connector_info = token;
}
}
HDprintf("Running API tests with VOL connector '%s'\n\n", vol_connector_name);
HDprintf("Running API tests with VOL connector '%s' and info string '%s'\n\n", vol_connector_name,
vol_connector_info ? vol_connector_info : "");
HDprintf("Test parameters:\n");
HDprintf(" - Test file name: '%s'\n", H5_api_test_filename);
HDprintf(" - Test seed: %u\n", seed);
HDprintf("\n\n");
/* Retrieve the VOL cap flags - work around an HDF5
* library issue by creating a FAPL
*/
if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) {
HDfprintf(stderr, "Unable to create FAPL\n");
err_occurred = TRUE;
goto done;
}
/*
* If using a VOL connector other than the native
* connector, check whether the VOL connector was
* successfully registered before running the tests.
* Otherwise, HDF5 will default to running the tests
* with the native connector, which could be misleading.
*/
if (0 != HDstrcmp(vol_connector_name, "native")) {
htri_t is_registered;
if ((is_registered = H5VLis_connector_registered_by_name(vol_connector_name)) < 0) {
HDfprintf(stderr, "Unable to determine if VOL connector is registered\n");
err_occurred = TRUE;
goto done;
}
if (!is_registered) {
HDfprintf(stderr, "Specified VOL connector '%s' wasn't correctly registered!\n",
vol_connector_name);
err_occurred = TRUE;
goto done;
}
else {
/*
* If the connector was successfully registered, check that
* the connector ID set on the default FAPL matches the ID
* for the registered connector before running the tests.
*/
if (H5Pget_vol_id(fapl_id, &default_con_id) < 0) {
HDfprintf(stderr, "Couldn't retrieve ID of VOL connector set on default FAPL\n");
err_occurred = TRUE;
goto done;
}
if ((registered_con_id = H5VLget_connector_id_by_name(vol_connector_name)) < 0) {
HDfprintf(stderr, "Couldn't retrieve ID of registered VOL connector\n");
err_occurred = TRUE;
goto done;
}
if (default_con_id != registered_con_id) {
HDfprintf(stderr, "VOL connector set on default FAPL didn't match specified VOL connector\n");
err_occurred = TRUE;
goto done;
}
}
}
/* Retrieve the VOL cap flags - work around an HDF5
* library issue by creating a FAPL
*/
vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0) {
HDfprintf(stderr, "Unable to retrieve VOL connector capability flags\n");
@ -212,6 +288,18 @@ main(int argc, char **argv)
}
done:
HDfree(vol_connector_string_copy);
if (default_con_id >= 0 && H5VLclose(default_con_id) < 0) {
HDfprintf(stderr, "Unable to close VOL connector ID\n");
err_occurred = TRUE;
}
if (registered_con_id >= 0 && H5VLclose(registered_con_id) < 0) {
HDfprintf(stderr, "Unable to close VOL connector ID\n");
err_occurred = TRUE;
}
if (fapl_id >= 0 && H5Pclose(fapl_id) < 0) {
HDfprintf(stderr, "Unable to close FAPL\n");
err_occurred = TRUE;

View File

@ -20,7 +20,6 @@
#include "H5_api_test_config.h"
#include "H5_api_test_util.h"
#include "H5_api_tests_disabled.h"
/* Define H5VL_VERSION if not already defined */
#ifndef H5VL_VERSION

File diff suppressed because it is too large Load Diff

View File

@ -1,46 +0,0 @@
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
#ifndef H5_API_TESTS_DISABLED_H
#define H5_API_TESTS_DISABLED_H
#include "H5_api_test_config.h"
/* Contains #defines to temporarily disable API tests based
* on problematic or unsupported functionality */
#define NO_LARGE_TESTS
#define NO_ATTR_FILL_VALUE_SUPPORT
#define NO_DECREASING_ALPHA_ITER_ORDER
#define NO_USER_DEFINED_LINKS
#define NO_EXTERNAL_LINKS
#define NO_ITERATION_RESTART
#define NO_FILE_MOUNTS
#define NO_CLEAR_ON_SHRINK
#define NO_DOUBLE_OBJECT_OPENS
#define NO_OBJECT_GET_NAME
#define WRONG_DATATYPE_OBJ_COUNT
#define NO_SHARED_DATATYPES
#define NO_INVALID_PROPERTY_LIST_TESTS
#define NO_MAX_LINK_CRT_ORDER_RESET
#define NO_PREVENT_HARD_LINKS_ACROSS_FILES
#define NO_SOFT_LINK_MANY_DANGLING
#define NO_ID_PREVENTS_OBJ_DELETE
#define NO_WRITE_SAME_ELEMENT_TWICE
#define NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
#define NO_DELETE_NONEXISTENT_ATTRIBUTE
#define NO_TRUNCATE_OPEN_FILE
#define NO_CHECK_SELECTION_BOUNDS
#define NO_VALIDATE_DATASPACE
#define NO_REFERENCE_TO_DELETED
#endif /* H5_API_TESTS_DISABLED_H */

View File

@ -201,10 +201,8 @@ test_attr_basic_write(hid_t fapl)
hsize_t dims3[] = {ATTR2_DIM1, ATTR2_DIM2};
int read_data1[ATTR1_DIM1] = {0}; /* Buffer for reading 1st attribute */
int i;
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
hid_t ret_id; /* Generic hid_t return value */
#endif
herr_t ret; /* Generic return value */
hid_t ret_id; /* Generic hid_t return value */
herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Scalar Attribute Writing Functions\n"));
@ -252,15 +250,17 @@ test_attr_basic_write(hid_t fapl)
/* Create an attribute for the dataset */
attr = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr, FAIL, "H5Acreate2");
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) {
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
#endif
/* Write attribute information */
ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data1);
CHECK(ret, FAIL, "H5Awrite");
@ -398,15 +398,17 @@ test_attr_basic_write(hid_t fapl)
attr_size = H5Aget_storage_size(attr);
VERIFY(attr_size, (ATTR2_DIM1 * ATTR2_DIM2 * sizeof(int)), "H5Aget_storage_size");
#endif
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(group, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) {
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(group, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
#endif
/* Write attribute information */
ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data2);
CHECK(ret, FAIL, "H5Awrite");
@ -555,24 +557,28 @@ test_attr_flush(hid_t fapl)
att = H5Acreate2(set, ATTR1_NAME, H5T_NATIVE_DOUBLE, spc, H5P_DEFAULT, H5P_DEFAULT);
CHECK(att, FAIL, "H5Acreate2");
#ifndef NO_ATTR_FILL_VALUE_SUPPORT
ret = H5Aread(att, H5T_NATIVE_DOUBLE, &rdata);
CHECK(ret, FAIL, "H5Aread");
if (!H5_DBL_ABS_EQUAL(rdata, 0.0))
TestErrPrintf("attribute value wrong: rdata=%f, should be %f\n", rdata, 0.0);
if ((vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) && (vol_cap_flags_g & H5VL_CAP_FLAG_FILL_VALUES) &&
(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE)) {
ret = H5Aread(att, H5T_NATIVE_DOUBLE, &rdata);
CHECK(ret, FAIL, "H5Aread");
ret = H5Fflush(fil, H5F_SCOPE_GLOBAL);
CHECK(ret, FAIL, "H5Fflush");
if (!H5_DBL_ABS_EQUAL(rdata, 0.0))
TestErrPrintf("attribute value wrong: rdata=%f, should be %f\n", rdata, 0.0);
ret = H5Aread(att, H5T_NATIVE_DOUBLE, &rdata);
CHECK(ret, FAIL, "H5Awrite");
ret = H5Fflush(fil, H5F_SCOPE_GLOBAL);
CHECK(ret, FAIL, "H5Fflush");
ret = H5Aread(att, H5T_NATIVE_DOUBLE, &rdata);
CHECK(ret, FAIL, "H5Awrite");
if (!H5_DBL_ABS_EQUAL(rdata, 0.0))
TestErrPrintf("attribute value wrong: rdata=%f, should be %f\n", rdata, 0.0);
}
else {
HDprintf("** SKIPPED attribute pre-read due to fill values not being supported **\n");
}
if (!H5_DBL_ABS_EQUAL(rdata, 0.0))
TestErrPrintf("attribute value wrong: rdata=%f, should be %f\n", rdata, 0.0);
#else
HDprintf("** SKIPPED attribute pre-read temporarily until attribute fill values supported **\n");
#endif
ret = H5Awrite(att, H5T_NATIVE_DOUBLE, &wdata);
CHECK(ret, FAIL, "H5Awrite");
@ -731,10 +737,8 @@ test_attr_compound_write(hid_t fapl)
hid_t attr; /* Attribute ID */
hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
hsize_t dims2[] = {ATTR4_DIM1, ATTR4_DIM2};
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
hid_t ret_id; /* Generic hid_t return value */
#endif
herr_t ret; /* Generic return value */
hid_t ret_id; /* Generic hid_t return value */
herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Multiple Attribute Functions\n"));
@ -775,15 +779,17 @@ test_attr_compound_write(hid_t fapl)
/* Create complex attribute for the dataset */
attr = H5Acreate2(dataset, ATTR4_NAME, tid1, sid2, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr, FAIL, "H5Acreate2");
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR4_NAME, tid1, sid2, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) {
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR4_NAME, tid1, sid2, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
#endif
/* Write complex attribute data */
ret = H5Awrite(attr, tid1, attr_data4);
CHECK(ret, FAIL, "H5Awrite");
@ -975,10 +981,8 @@ test_attr_scalar_write(hid_t fapl)
hid_t sid1, sid2; /* Dataspace ID */
hid_t attr; /* Attribute ID */
hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
hid_t ret_id; /* Generic hid_t return value */
#endif
herr_t ret; /* Generic return value */
hid_t ret_id; /* Generic hid_t return value */
herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Attribute Functions\n"));
@ -1002,15 +1006,17 @@ test_attr_scalar_write(hid_t fapl)
/* Create an attribute for the dataset */
attr = H5Acreate2(dataset, ATTR5_NAME, H5T_NATIVE_FLOAT, sid2, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr, FAIL, "H5Acreate2");
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR5_NAME, H5T_NATIVE_FLOAT, sid2, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) {
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR5_NAME, H5T_NATIVE_FLOAT, sid2, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
#endif
/* Write attribute information */
ret = H5Awrite(attr, H5T_NATIVE_FLOAT, &attr_data5);
CHECK(ret, FAIL, "H5Awrite");
@ -1120,10 +1126,8 @@ test_attr_mult_write(hid_t fapl)
hsize_t dims2[] = {ATTR1_DIM1};
hsize_t dims3[] = {ATTR2_DIM1, ATTR2_DIM2};
hsize_t dims4[] = {ATTR3_DIM1, ATTR3_DIM2, ATTR3_DIM3};
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
hid_t ret_id; /* Generic hid_t return value */
#endif
herr_t ret; /* Generic return value */
hid_t ret_id; /* Generic hid_t return value */
herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Multiple Attribute Functions\n"));
@ -1151,15 +1155,17 @@ test_attr_mult_write(hid_t fapl)
/* Create 1st attribute for the dataset */
attr = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr, FAIL, "H5Acreate2");
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) {
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
#endif
/* Write 1st attribute data */
ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data1);
CHECK(ret, FAIL, "H5Awrite");
@ -1179,15 +1185,17 @@ test_attr_mult_write(hid_t fapl)
/* Create 2nd attribute for the dataset */
attr = H5Acreate2(dataset, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr, FAIL, "H5Acreate2");
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) {
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
#endif
/* Write 2nd attribute information */
ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data2);
CHECK(ret, FAIL, "H5Awrite");
@ -1207,15 +1215,17 @@ test_attr_mult_write(hid_t fapl)
/* Create 3rd attribute for the dataset */
attr = H5Acreate2(dataset, ATTR3_NAME, H5T_NATIVE_DOUBLE, sid2, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr, FAIL, "H5Acreate2");
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR3_NAME, H5T_NATIVE_DOUBLE, sid2, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) {
/* Try to create the same attribute again (should fail) */
H5E_BEGIN_TRY
{
ret_id = H5Acreate2(dataset, ATTR3_NAME, H5T_NATIVE_DOUBLE, sid2, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
}
H5E_END_TRY;
VERIFY(ret_id, FAIL, "H5Acreate2");
#endif
/* Write 3rd attribute information */
ret = H5Awrite(attr, H5T_NATIVE_DOUBLE, attr_data3);
CHECK(ret, FAIL, "H5Awrite");
@ -1605,15 +1615,17 @@ test_attr_delete(hid_t fapl)
ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
CHECK(ret, FAIL, "H5Oget_info3");
VERIFY(oinfo.num_attrs, 3, "H5Oget_info3");
#ifndef NO_DELETE_NONEXISTENT_ATTRIBUTE
/* Try to delete bogus attribute */
H5E_BEGIN_TRY
{
ret = H5Adelete(dataset, "Bogus");
if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) {
/* Try to delete bogus attribute */
H5E_BEGIN_TRY
{
ret = H5Adelete(dataset, "Bogus");
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Adelete");
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Adelete");
#endif
/* Verify the correct number of attributes */
ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
CHECK(ret, FAIL, "H5Oget_info3");
@ -1709,7 +1721,6 @@ test_attr_delete(hid_t fapl)
static void
test_attr_dtype_shared(hid_t fapl)
{
#ifndef NO_SHARED_DATATYPES
hid_t file_id; /* File ID */
hid_t dset_id; /* Dataset ID */
hid_t space_id; /* Dataspace ID for dataset & attribute */
@ -1723,164 +1734,162 @@ test_attr_dtype_shared(hid_t fapl)
h5_stat_size_t filesize; /* Size of file after modifications */
#endif
herr_t ret; /* Generic return value */
#else
(void)fapl;
#endif
/* Output message about test being performed */
MESSAGE(5, ("Testing Shared Datatypes with Attributes - SKIPPED for now due to no support for shared "
"datatypes\n"));
#ifndef NO_SHARED_DATATYPES
/* Create a file */
file_id = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
CHECK(file_id, FAIL, "H5Fopen");
MESSAGE(5, ("Testing Shared Datatypes with Attributes\n"));
/* Close file */
ret = H5Fclose(file_id);
CHECK(ret, FAIL, "H5Fclose");
if ((vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) && (vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
/* Create a file */
file_id = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
CHECK(file_id, FAIL, "H5Fopen");
/* Close file */
ret = H5Fclose(file_id);
CHECK(ret, FAIL, "H5Fclose");
#if 0
/* Get size of file */
empty_filesize = h5_get_file_size(FILENAME, fapl);
if (empty_filesize < 0)
TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
/* Get size of file */
empty_filesize = h5_get_file_size(FILENAME, fapl);
if (empty_filesize < 0)
TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
#endif
/* Re-open file */
file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
CHECK(file_id, FAIL, "H5Fopen");
/* Re-open file */
file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
CHECK(file_id, FAIL, "H5Fopen");
/* Create a datatype to commit and use */
type_id = H5Tcopy(H5T_NATIVE_INT);
CHECK(type_id, FAIL, "H5Tcopy");
/* Create a datatype to commit and use */
type_id = H5Tcopy(H5T_NATIVE_INT);
CHECK(type_id, FAIL, "H5Tcopy");
/* Commit datatype to file */
ret = H5Tcommit2(file_id, TYPE1_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Tcommit2");
/* Commit datatype to file */
ret = H5Tcommit2(file_id, TYPE1_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Tcommit2");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 1, "H5Oget_info_by_name3");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 1, "H5Oget_info_by_name3");
/* Create dataspace for dataset */
space_id = H5Screate(H5S_SCALAR);
CHECK(space_id, FAIL, "H5Screate");
/* Create dataspace for dataset */
space_id = H5Screate(H5S_SCALAR);
CHECK(space_id, FAIL, "H5Screate");
/* Create dataset */
dset_id = H5Dcreate2(file_id, DSET1_NAME, type_id, space_id, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
CHECK(dset_id, FAIL, "H5Dcreate2");
/* Create dataset */
dset_id = H5Dcreate2(file_id, DSET1_NAME, type_id, space_id, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
CHECK(dset_id, FAIL, "H5Dcreate2");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 2, "H5Oget_info_by_name3");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 2, "H5Oget_info_by_name3");
/* Create attribute on dataset */
attr_id = H5Acreate2(dset_id, ATTR1_NAME, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr_id, FAIL, "H5Acreate2");
/* Create attribute on dataset */
attr_id = H5Acreate2(dset_id, ATTR1_NAME, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr_id, FAIL, "H5Acreate2");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 3, "H5Oget_info_by_name3");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 3, "H5Oget_info_by_name3");
/* Close attribute */
ret = H5Aclose(attr_id);
CHECK(ret, FAIL, "H5Aclose");
/* Close attribute */
ret = H5Aclose(attr_id);
CHECK(ret, FAIL, "H5Aclose");
/* Delete attribute */
ret = H5Adelete(dset_id, ATTR1_NAME);
CHECK(ret, FAIL, "H5Adelete");
/* Delete attribute */
ret = H5Adelete(dset_id, ATTR1_NAME);
CHECK(ret, FAIL, "H5Adelete");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 2, "H5Oget_info_by_name3");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 2, "H5Oget_info_by_name3");
/* Create attribute on dataset */
attr_id = H5Acreate2(dset_id, ATTR1_NAME, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr_id, FAIL, "H5Acreate2");
/* Create attribute on dataset */
attr_id = H5Acreate2(dset_id, ATTR1_NAME, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT);
CHECK(attr_id, FAIL, "H5Acreate2");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 3, "H5Oget_info_by_name3");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 3, "H5Oget_info_by_name3");
/* Write data into the attribute */
ret = H5Awrite(attr_id, H5T_NATIVE_INT, &data);
CHECK(ret, FAIL, "H5Awrite");
/* Write data into the attribute */
ret = H5Awrite(attr_id, H5T_NATIVE_INT, &data);
CHECK(ret, FAIL, "H5Awrite");
/* Close attribute */
ret = H5Aclose(attr_id);
CHECK(ret, FAIL, "H5Aclose");
/* Close attribute */
ret = H5Aclose(attr_id);
CHECK(ret, FAIL, "H5Aclose");
/* Close dataset */
ret = H5Dclose(dset_id);
CHECK(ret, FAIL, "H5Dclose");
/* Close dataset */
ret = H5Dclose(dset_id);
CHECK(ret, FAIL, "H5Dclose");
/* Close dataspace */
ret = H5Sclose(space_id);
CHECK(ret, FAIL, "H5Sclose");
/* Close dataspace */
ret = H5Sclose(space_id);
CHECK(ret, FAIL, "H5Sclose");
/* Close datatype */
ret = H5Tclose(type_id);
CHECK(ret, FAIL, "H5Tclose");
/* Close datatype */
ret = H5Tclose(type_id);
CHECK(ret, FAIL, "H5Tclose");
/* Close file */
ret = H5Fclose(file_id);
CHECK(ret, FAIL, "H5Fclose");
/* Close file */
ret = H5Fclose(file_id);
CHECK(ret, FAIL, "H5Fclose");
/* Re-open file */
file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
CHECK(file_id, FAIL, "H5Fopen");
/* Re-open file */
file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
CHECK(file_id, FAIL, "H5Fopen");
/* Open dataset */
dset_id = H5Dopen2(file_id, DSET1_NAME, H5P_DEFAULT);
CHECK(dset_id, FAIL, "H5Dopen2");
/* Open dataset */
dset_id = H5Dopen2(file_id, DSET1_NAME, H5P_DEFAULT);
CHECK(dset_id, FAIL, "H5Dopen2");
/* Open attribute */
attr_id = H5Aopen(dset_id, ATTR1_NAME, H5P_DEFAULT);
CHECK(attr_id, FAIL, "H5Aopen");
/* Open attribute */
attr_id = H5Aopen(dset_id, ATTR1_NAME, H5P_DEFAULT);
CHECK(attr_id, FAIL, "H5Aopen");
/* Read data from the attribute */
ret = H5Aread(attr_id, H5T_NATIVE_INT, &rdata);
CHECK(ret, FAIL, "H5Aread");
VERIFY(data, rdata, "H5Aread");
/* Read data from the attribute */
ret = H5Aread(attr_id, H5T_NATIVE_INT, &rdata);
CHECK(ret, FAIL, "H5Aread");
VERIFY(data, rdata, "H5Aread");
/* Close attribute */
ret = H5Aclose(attr_id);
CHECK(ret, FAIL, "H5Aclose");
/* Close attribute */
ret = H5Aclose(attr_id);
CHECK(ret, FAIL, "H5Aclose");
/* Close dataset */
ret = H5Dclose(dset_id);
CHECK(ret, FAIL, "H5Dclose");
/* Close dataset */
ret = H5Dclose(dset_id);
CHECK(ret, FAIL, "H5Dclose");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 3, "H5Oget_info_by_name3");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 3, "H5Oget_info_by_name3");
/* Unlink the dataset */
ret = H5Ldelete(file_id, DSET1_NAME, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Ldelete");
/* Unlink the dataset */
ret = H5Ldelete(file_id, DSET1_NAME, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Ldelete");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 1, "H5Oget_info_by_name3");
/* Check reference count on named datatype */
ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name3");
VERIFY(oinfo.rc, 1, "H5Oget_info_by_name3");
/* Unlink the named datatype */
ret = H5Ldelete(file_id, TYPE1_NAME, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Ldelete");
/* Unlink the named datatype */
ret = H5Ldelete(file_id, TYPE1_NAME, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Ldelete");
/* Close file */
ret = H5Fclose(file_id);
CHECK(ret, FAIL, "H5Fclose");
/* Close file */
ret = H5Fclose(file_id);
CHECK(ret, FAIL, "H5Fclose");
#if 0
/* Check size of file */
filesize = h5_get_file_size(FILENAME, fapl);
VERIFY(filesize, empty_filesize, "h5_get_file_size");
#endif
/* Check size of file */
filesize = h5_get_file_size(FILENAME, fapl);
VERIFY(filesize, empty_filesize, "h5_get_file_size");
#endif
}
} /* test_attr_dtype_shared() */
/****************************************************************
@ -2313,15 +2322,17 @@ test_attr_dense_create(hid_t fcpl, hid_t fapl)
/* Close attribute */
ret = H5Aclose(attr);
CHECK(ret, FAIL, "H5Aclose");
#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
/* Attempt to add attribute again, which should fail */
H5E_BEGIN_TRY
{
attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) {
/* Attempt to add attribute again, which should fail */
H5E_BEGIN_TRY
{
attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(attr, FAIL, "H5Acreate2");
}
H5E_END_TRY;
VERIFY(attr, FAIL, "H5Acreate2");
#endif
/* Close dataspace */
ret = H5Sclose(sid);
CHECK(ret, FAIL, "H5Sclose");
@ -5911,11 +5922,10 @@ attr_info_by_idx_check(hid_t obj_id, const char *attrname, hsize_t n, hbool_t us
if (HDstrcmp(attrname, tmpname) != 0)
TestErrPrintf("Line %d: attribute name size wrong!\n", __LINE__);
/* Don't test "native" order queries on link name order, since there's not
* a good way to easily predict the order of the links in the name index.
*/
/* Don't test "native" order queries on link name order, since there's not
* a good way to easily predict the order of the links in the name index.
*/
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
/* Verify the information for first attribute, in decreasing name order */
HDmemset(&ainfo, 0, sizeof(ainfo));
ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_NAME, H5_ITER_DEC, n, &ainfo, H5P_DEFAULT);
@ -5935,7 +5945,7 @@ attr_info_by_idx_check(hid_t obj_id, const char *attrname, hsize_t n, hbool_t us
CHECK(ret, FAIL, "H5Aget_name_by_idx");
if (HDstrcmp(attrname, tmpname) != 0)
TestErrPrintf("Line %d: attribute name size wrong!\n", __LINE__);
#endif
/* Retrieve current # of errors */
if (old_nerrs == nerrors)
return (0);
@ -6463,6 +6473,11 @@ test_attr_delete_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
MESSAGE(5, ("Testing Deleting Attribute By Index\n"))
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
MESSAGE(5, (" SKIPPED\n"))
return;
}
/* Create dataspace for dataset & attributes */
sid = H5Screate(H5S_SCALAR);
CHECK(sid, FAIL, "H5Screate");
@ -6516,18 +6531,14 @@ test_attr_delete_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
"Creation Order Index\n"))
} /* end if */
else {
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
if (use_index)
MESSAGE(5, ("Testing Deleting Attribute By Name Index in Decreasing Order "
"w/Creation Order Index\n"))
else
MESSAGE(5, ("Testing Deleting Attribute By Name Index in Decreasing Order w/o "
"Creation Order Index\n"))
#else
continue;
#endif
} /* end else */
} /* end else */
}
} /* end else */
/* Create file */
fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
@ -7096,7 +7107,6 @@ attr_iterate1_cb(hid_t loc_id, const char *attr_name, void *_op_data)
#endif /* H5_NO_DEPRECATED_SYMBOLS */
#endif
#ifndef NO_ITERATION_RESTART
/*-------------------------------------------------------------------------
* Function: attr_iterate2_fail_cb
*
@ -7148,6 +7158,13 @@ attr_iterate_check(hid_t fid, const char *dsetname, hid_t obj_id, H5_index_t idx
/* Retrieve the current # of reported errors */
old_nerrs = nerrors;
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE)) {
SKIPPED();
HDprintf(" API functions for iterate aren't "
"supported with this connector\n");
return 1;
}
/* Iterate over attributes on object */
iter_info->nskipped = (unsigned)(skip = 0);
iter_info->order = order;
@ -7425,7 +7442,6 @@ attr_iterate_check(hid_t fid, const char *dsetname, hid_t obj_id, H5_index_t idx
else
return (-1);
} /* end attr_iterate_check() */
#endif
/****************************************************************
**
@ -7451,19 +7467,21 @@ test_attr_iterate2(hbool_t new_format, hid_t fcpl, hid_t fapl)
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
#endif
H5_index_t idx_type; /* Type of index to operate on */
H5_iter_order_t order; /* Order within in the index */
attr_iter_info_t iter_info; /* Iterator info */
hbool_t *visited = NULL; /* Array of flags for visiting links */
#ifndef NO_ITERATION_RESTART
hsize_t idx; /* Start index for iteration */
#endif
unsigned use_index; /* Use index on creation order values */
const char *dsetname; /* Name of dataset for attributes */
char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
herr_t ret; /* Generic return value */
H5_index_t idx_type; /* Type of index to operate on */
H5_iter_order_t order; /* Order within in the index */
attr_iter_info_t iter_info; /* Iterator info */
hbool_t *visited = NULL; /* Array of flags for visiting links */
hsize_t idx; /* Start index for iteration */
unsigned use_index; /* Use index on creation order values */
const char *dsetname; /* Name of dataset for attributes */
char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
herr_t ret; /* Generic return value */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
return;
}
/* Create dataspace for dataset & attributes */
sid = H5Screate(H5S_SCALAR);
@ -7524,16 +7542,12 @@ test_attr_iterate2(hbool_t new_format, hid_t fcpl, hid_t fapl)
"w/o Creation Order Index\n"))
} /* end if */
else {
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
if (use_index)
MESSAGE(5, ("Testing Iterating over Attributes By Name Index in Decreasing Order "
"w/Creation Order Index\n"))
else
MESSAGE(5, ("Testing Iterating over Attributes By Name Index in Decreasing Order "
"w/o Creation Order Index\n"))
#else
continue;
#endif
} /* end else */
} /* end else */
@ -7626,38 +7640,39 @@ test_attr_iterate2(hbool_t new_format, hid_t fcpl, hid_t fapl)
is_dense = H5O__is_attr_dense_test(my_dataset);
VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
#endif
#ifndef NO_ITERATION_RESTART
/* Check for out of bound iteration */
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate2(my_dataset, idx_type, order, &idx, attr_iterate2_cb, NULL);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate2");
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &idx, attr_iterate2_cb, NULL,
H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate_by_name");
if (vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) {
/* Check for out of bound iteration */
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate2(my_dataset, idx_type, order, &idx, attr_iterate2_cb, NULL);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate2");
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate_by_name(my_dataset, ".", idx_type, order, &idx, attr_iterate2_cb,
NULL, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate_by_name");
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &idx, attr_iterate2_cb,
NULL, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate_by_name");
/* Test iteration over attributes stored compactly */
ret = attr_iterate_check(fid, dsetname, my_dataset, idx_type, order, u, &iter_info);
CHECK(ret, FAIL, "attr_iterate_check");
#endif
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate_by_name(my_dataset, ".", idx_type, order, &idx, attr_iterate2_cb,
NULL, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate_by_name");
/* Test iteration over attributes stored compactly */
ret = attr_iterate_check(fid, dsetname, my_dataset, idx_type, order, u, &iter_info);
CHECK(ret, FAIL, "attr_iterate_check");
}
} /* end for */
/* Work on all the datasets */
@ -7725,41 +7740,42 @@ test_attr_iterate2(hbool_t new_format, hid_t fcpl, hid_t fapl)
if (use_index)
VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
VERIFY(name_count, (max_compact * 2), "H5O__attr_dense_info_test");
} /* end if */
}
#endif
#ifndef NO_ITERATION_RESTART
/* Check for out of bound iteration */
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate2(my_dataset, idx_type, order, &idx, attr_iterate2_cb, NULL);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate2");
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &idx, attr_iterate2_cb, NULL,
H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate_by_name");
if (vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) {
/* Check for out of bound iteration */
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate2(my_dataset, idx_type, order, &idx, attr_iterate2_cb, NULL);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate2");
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate_by_name(my_dataset, ".", idx_type, order, &idx, attr_iterate2_cb,
NULL, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate_by_name");
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &idx, attr_iterate2_cb,
NULL, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate_by_name");
/* Test iteration over attributes stored densely */
ret = attr_iterate_check(fid, dsetname, my_dataset, idx_type, order, u, &iter_info);
CHECK(ret, FAIL, "attr_iterate_check");
#endif
} /* end for */
idx = u;
H5E_BEGIN_TRY
{
ret = H5Aiterate_by_name(my_dataset, ".", idx_type, order, &idx, attr_iterate2_cb,
NULL, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Aiterate_by_name");
/* Test iteration over attributes stored densely */
ret = attr_iterate_check(fid, dsetname, my_dataset, idx_type, order, u, &iter_info);
CHECK(ret, FAIL, "attr_iterate_check");
}
}
/* Close Datasets */
ret = H5Dclose(dset1);
@ -7879,6 +7895,10 @@ test_attr_open_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
hid_t ret_id; /* Generic hid_t return value */
herr_t ret; /* Generic return value */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
return;
}
/* Create dataspace for dataset & attributes */
sid = H5Screate(H5S_SCALAR);
CHECK(sid, FAIL, "H5Screate");
@ -7932,16 +7952,12 @@ test_attr_open_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
"Creation Order Index\n"))
} /* end if */
else {
#ifndef NO_DECREASING_ALPHA_ITER_ORDER
if (use_index)
MESSAGE(5, ("Testing Opening Attributes By Name Index in Decreasing Order "
"w/Creation Order Index\n"))
else
MESSAGE(5, ("Testing Opening Attributes By Name Index in Decreasing Order w/o "
"Creation Order Index\n"))
#else
continue;
#endif
} /* end else */
} /* end else */

View File

@ -41,6 +41,8 @@ char *paraprefix = NULL; /* for command line option para-prefix */
/* Length of multi-file VFD filename buffers */
#define H5TEST_MULTI_FILENAME_LEN 1024
uint64_t vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
/*
* This routine is designed to provide equivalent functionality to 'printf'
* and allow easy replacement for environments which don't have stdin/stdout

View File

@ -21,7 +21,6 @@
/* #include "h5test.h" */
#include "hdf5.h"
#include "H5private.h"
#include "H5_api_tests_disabled.h"
#define VERBO_NONE 0 /* None */
#define VERBO_DEF 3 /* Default */
@ -343,6 +342,9 @@ void cleanup_sohm(void);
void cleanup_misc(void);
void cleanup_unicode(void);
/* Extern global variables */
extern uint64_t vol_cap_flags_g;
#ifdef __cplusplus
}
#endif

View File

@ -258,18 +258,20 @@ test_file_create(void)
/* Create with H5F_ACC_EXCL */
fid1 = H5Fcreate(FILE1, H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT);
CHECK(fid1, FAIL, "H5Fcreate");
#ifndef NO_TRUNCATE_OPEN_FILE
/*
* try to create the same file with H5F_ACC_TRUNC. This should fail
* because fid1 is the same file and is currently open.
*/
H5E_BEGIN_TRY
{
fid2 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) {
/*
* try to create the same file with H5F_ACC_TRUNC. This should fail
* because fid1 is the same file and is currently open.
*/
H5E_BEGIN_TRY
{
fid2 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(fid2, FAIL, "H5Fcreate");
}
H5E_END_TRY;
VERIFY(fid2, FAIL, "H5Fcreate");
#endif
/* Close all files */
ret = H5Fclose(fid1);
CHECK(ret, FAIL, "H5Fclose");
@ -295,18 +297,20 @@ test_file_create(void)
/* Test create with H5F_ACC_TRUNC. This will truncate the existing file. */
fid1 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
CHECK(fid1, FAIL, "H5Fcreate");
#ifndef NO_TRUNCATE_OPEN_FILE
/*
* Try to truncate first file again. This should fail because fid1 is the
* same file and is currently open.
*/
H5E_BEGIN_TRY
{
fid2 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) {
/*
* Try to truncate first file again. This should fail because fid1 is the
* same file and is currently open.
*/
H5E_BEGIN_TRY
{
fid2 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY;
VERIFY(fid2, FAIL, "H5Fcreate");
}
H5E_END_TRY;
VERIFY(fid2, FAIL, "H5Fcreate");
#endif
/*
* Try with H5F_ACC_EXCL. This should fail too because the file already
* exists.
@ -1283,30 +1287,31 @@ test_get_obj_ids(void)
/* Close the file first */
H5Fclose(fid);
#ifndef WRONG_DATATYPE_OBJ_COUNT
/* Get the number of all opened objects */
oid_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_ALL);
CHECK(oid_count, FAIL, "H5Fget_obj_count");
VERIFY(oid_count, NDSETS, "H5Fget_obj_count");
oid_list = (hid_t *)HDcalloc((size_t)oid_count, sizeof(hid_t));
CHECK_PTR(oid_list, "HDcalloc");
if (vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE) {
/* Get the number of all opened objects */
oid_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_ALL);
CHECK(oid_count, FAIL, "H5Fget_obj_count");
VERIFY(oid_count, NDSETS, "H5Fget_obj_count");
/* Get the list of all opened objects */
ret_count = H5Fget_obj_ids((hid_t)H5F_OBJ_ALL, H5F_OBJ_ALL, (size_t)oid_count, oid_list);
CHECK(ret_count, FAIL, "H5Fget_obj_ids");
VERIFY(ret_count, NDSETS, "H5Fget_obj_ids");
oid_list = (hid_t *)HDcalloc((size_t)oid_count, sizeof(hid_t));
CHECK_PTR(oid_list, "HDcalloc");
H5E_BEGIN_TRY
{
/* Close all open objects with H5Oclose */
for (n = 0; n < oid_count; n++)
H5Oclose(oid_list[n]);
/* Get the list of all opened objects */
ret_count = H5Fget_obj_ids((hid_t)H5F_OBJ_ALL, H5F_OBJ_ALL, (size_t)oid_count, oid_list);
CHECK(ret_count, FAIL, "H5Fget_obj_ids");
VERIFY(ret_count, NDSETS, "H5Fget_obj_ids");
H5E_BEGIN_TRY
{
/* Close all open objects with H5Oclose */
for (n = 0; n < oid_count; n++)
H5Oclose(oid_list[n]);
}
H5E_END_TRY;
HDfree(oid_list);
}
H5E_END_TRY;
HDfree(oid_list);
#endif
}
/****************************************************************
@ -2342,11 +2347,13 @@ test_file_open_overlap(void)
/* Create dataset in group w/first file ID */
did1 = H5Dcreate2(gid, DSET1, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(did1, FAIL, "H5Dcreate2");
#ifndef WRONG_DATATYPE_OBJ_COUNT
/* Check number of objects opened in first file */
nobjs = H5Fget_obj_count(fid1, H5F_OBJ_LOCAL | H5F_OBJ_ALL);
VERIFY(nobjs, 3, "H5Fget_obj_count"); /* 3 == file, dataset & group */
#endif
if (vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE) {
/* Check number of objects opened in first file */
nobjs = H5Fget_obj_count(fid1, H5F_OBJ_LOCAL | H5F_OBJ_ALL);
VERIFY(nobjs, 3, "H5Fget_obj_count"); /* 3 == file, dataset & group */
}
/* Close dataset */
ret = H5Dclose(did1);
CHECK(ret, FAIL, "H5Dclose");

View File

@ -115,13 +115,11 @@ struct space4_struct {
static void
test_h5s_basic(void)
{
hid_t fid1; /* HDF5 File IDs */
hid_t sid1, sid2; /* Dataspace ID */
hid_t dset1; /* Dataset ID */
#ifndef NO_VALIDATE_DATASPACE
hid_t aid1; /* Attribute ID */
#endif
int rank; /* Logical rank of dataspace */
hid_t fid1; /* HDF5 File IDs */
hid_t sid1, sid2; /* Dataspace ID */
hid_t dset1; /* Dataset ID */
hid_t aid1; /* Attribute ID */
int rank; /* Logical rank of dataspace */
hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2, SPACE2_DIM3, SPACE2_DIM4};
hsize_t dims3[H5S_MAX_RANK + 1];
@ -254,15 +252,18 @@ test_h5s_basic(void)
CHECK(sid1, FAIL, "H5Screate");
sid2 = H5Screate_simple(1, dims1, dims1);
CHECK(sid2, FAIL, "H5Screate");
#ifndef NO_VALIDATE_DATASPACE
/* This dataset's space has no extent; it should not be created */
H5E_BEGIN_TRY
{
dset1 = H5Dcreate2(fid1, BASICDATASET, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
if (vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) {
/* This dataset's space has no extent; it should not be created */
H5E_BEGIN_TRY
{
dset1 =
H5Dcreate2(fid1, BASICDATASET, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY
VERIFY(dset1, FAIL, "H5Dcreate2");
}
H5E_END_TRY
VERIFY(dset1, FAIL, "H5Dcreate2");
#endif
dset1 = H5Dcreate2(fid1, BASICDATASET2, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dset1, FAIL, "H5Dcreate2");
@ -273,21 +274,23 @@ test_h5s_basic(void)
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dwrite");
#ifndef NO_VALIDATE_DATASPACE
H5E_BEGIN_TRY
{
ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, &n);
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dwrite");
H5E_BEGIN_TRY
{
ret = H5Dwrite(dset1, H5T_NATIVE_INT, sid1, sid1, H5P_DEFAULT, &n);
if (vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) {
H5E_BEGIN_TRY
{
ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, &n);
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dwrite");
H5E_BEGIN_TRY
{
ret = H5Dwrite(dset1, H5T_NATIVE_INT, sid1, sid1, H5P_DEFAULT, &n);
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dwrite");
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dwrite");
#endif
/* Try to iterate using the bad dataspace */
H5E_BEGIN_TRY
{
@ -303,15 +306,17 @@ test_h5s_basic(void)
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dfill");
#ifndef NO_VALIDATE_DATASPACE
/* Now use the bad dataspace as the space for an attribute */
H5E_BEGIN_TRY
{
aid1 = H5Acreate2(dset1, BASICATTR, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT);
if ((vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) && (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
/* Now use the bad dataspace as the space for an attribute */
H5E_BEGIN_TRY
{
aid1 = H5Acreate2(dset1, BASICATTR, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT);
}
H5E_END_TRY
VERIFY(aid1, FAIL, "H5Acreate2");
}
H5E_END_TRY
VERIFY(aid1, FAIL, "H5Acreate2");
#endif
/* Make sure that dataspace reads using the bad dataspace fail */
H5E_BEGIN_TRY
{
@ -319,21 +324,23 @@ test_h5s_basic(void)
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dread");
#ifndef NO_VALIDATE_DATASPACE
H5E_BEGIN_TRY
{
ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, &n);
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dread");
H5E_BEGIN_TRY
{
ret = H5Dread(dset1, H5T_NATIVE_INT, sid1, sid1, H5P_DEFAULT, &n);
if (vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) {
H5E_BEGIN_TRY
{
ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, &n);
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dread");
H5E_BEGIN_TRY
{
ret = H5Dread(dset1, H5T_NATIVE_INT, sid1, sid1, H5P_DEFAULT, &n);
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dread");
}
H5E_END_TRY
VERIFY(ret, FAIL, "H5Dread");
#endif
/* Clean up */
ret = H5Dclose(dset1);
CHECK(ret, FAIL, "H5Dclose");
@ -577,35 +584,33 @@ test_h5s_null(void)
static void
test_h5s_zero_dim(void)
{
hid_t fid1; /* HDF5 File IDs */
hid_t sid1, attr_sid; /* Dataspace ID */
hid_t sid_chunk; /* Dataspace ID for chunked dataset */
hid_t dset1; /* Dataset ID */
hid_t plist_id; /* Dataset creation property list */
hid_t attr; /* Attribute ID */
int rank; /* Logical rank of dataspace */
hsize_t dims1[] = {0, SPACE1_DIM2, SPACE1_DIM3};
hsize_t max_dims[] = {SPACE1_DIM1 + 1, SPACE1_DIM2, SPACE1_DIM3};
hsize_t extend_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
hsize_t chunk_dims[] = {SPACE1_DIM1, SPACE1_DIM2 / 3, SPACE1_DIM3};
hsize_t tdims[SPACE1_RANK]; /* Dimension array to test with */
int wdata[SPACE1_DIM2][SPACE1_DIM3];
int rdata[SPACE1_DIM2][SPACE1_DIM3];
short wdata_short[SPACE1_DIM2][SPACE1_DIM3];
short rdata_short[SPACE1_DIM2][SPACE1_DIM3];
int wdata_real[SPACE1_DIM1][SPACE1_DIM2][SPACE1_DIM3];
int rdata_real[SPACE1_DIM1][SPACE1_DIM2][SPACE1_DIM3];
#ifndef NO_CHECK_SELECTION_BOUNDS
int val = 3;
hsize_t start[] = {0, 0, 0};
hsize_t count[] = {3, 15, 13};
hsize_t coord[1][3]; /* Coordinates for point selection */
#endif
hssize_t nelem; /* Number of elements */
H5S_sel_type sel_type; /* Type of selection currently */
H5S_class_t stype; /* dataspace type */
H5D_alloc_time_t alloc_time; /* Space allocation time */
herr_t ret; /* Generic return value */
hid_t fid1; /* HDF5 File IDs */
hid_t sid1, attr_sid; /* Dataspace ID */
hid_t sid_chunk; /* Dataspace ID for chunked dataset */
hid_t dset1; /* Dataset ID */
hid_t plist_id; /* Dataset creation property list */
hid_t attr; /* Attribute ID */
int rank; /* Logical rank of dataspace */
hsize_t dims1[] = {0, SPACE1_DIM2, SPACE1_DIM3};
hsize_t max_dims[] = {SPACE1_DIM1 + 1, SPACE1_DIM2, SPACE1_DIM3};
hsize_t extend_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
hsize_t chunk_dims[] = {SPACE1_DIM1, SPACE1_DIM2 / 3, SPACE1_DIM3};
hsize_t tdims[SPACE1_RANK]; /* Dimension array to test with */
int wdata[SPACE1_DIM2][SPACE1_DIM3];
int rdata[SPACE1_DIM2][SPACE1_DIM3];
short wdata_short[SPACE1_DIM2][SPACE1_DIM3];
short rdata_short[SPACE1_DIM2][SPACE1_DIM3];
int wdata_real[SPACE1_DIM1][SPACE1_DIM2][SPACE1_DIM3];
int rdata_real[SPACE1_DIM1][SPACE1_DIM2][SPACE1_DIM3];
int val = 3;
hsize_t start[] = {0, 0, 0};
hsize_t count[] = {3, 15, 13};
hsize_t coord[1][3]; /* Coordinates for point selection */
hssize_t nelem; /* Number of elements */
H5S_sel_type sel_type; /* Type of selection currently */
H5S_class_t stype; /* dataspace type */
H5D_alloc_time_t alloc_time; /* Space allocation time */
herr_t ret; /* Generic return value */
unsigned int i, j, k;
/* Output message about test being performed */
@ -743,38 +748,42 @@ test_h5s_zero_dim(void)
}
}
}
#ifndef NO_CHECK_SELECTION_BOUNDS
/* Select a hyperslab beyond its current dimension sizes, then try to write
* the data. It should fail. */
ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
CHECK(ret, FAIL, "H5Sselect_hyperslab");
H5E_BEGIN_TRY
{
ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, wdata);
if (vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) {
/* Select a hyperslab beyond its current dimension sizes, then try to write
* the data. It should fail. */
ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
CHECK(ret, FAIL, "H5Sselect_hyperslab");
H5E_BEGIN_TRY
{
ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, wdata);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Dwrite");
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Dwrite");
#endif
/* Change to "none" selection */
ret = H5Sselect_none(sid1);
CHECK(ret, FAIL, "H5Sselect_none");
#ifndef NO_CHECK_SELECTION_BOUNDS
/* Select a point beyond the dimension size, then try to write the data.
* It should fail. */
coord[0][0] = 2;
coord[0][1] = 5;
coord[0][2] = 3;
ret = H5Sselect_elements(sid1, H5S_SELECT_SET, (size_t)1, (const hsize_t *)coord);
CHECK(ret, FAIL, "H5Sselect_elements");
H5E_BEGIN_TRY
{
ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, &val);
if (vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) {
/* Select a point beyond the dimension size, then try to write the data.
* It should fail. */
coord[0][0] = 2;
coord[0][1] = 5;
coord[0][2] = 3;
ret = H5Sselect_elements(sid1, H5S_SELECT_SET, (size_t)1, (const hsize_t *)coord);
CHECK(ret, FAIL, "H5Sselect_elements");
H5E_BEGIN_TRY
{
ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, &val);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Dwrite");
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Dwrite");
#endif
/* Restore the selection to all */
ret = H5Sselect_all(sid1);
CHECK(ret, FAIL, "H5Sselect_all");
@ -864,17 +873,18 @@ test_h5s_zero_dim(void)
HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata[i][j]);
}
}
#ifndef NO_CHECK_SELECTION_BOUNDS
/* Now extend the first dimension size of the dataset to SPACE1_DIM1*3 past the maximal size.
* It is supposed to fail. */
extend_dims[0] = SPACE1_DIM1 * 3;
H5E_BEGIN_TRY
{
ret = H5Dset_extent(dset1, extend_dims);
if ((vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) &&
(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
/* Now extend the first dimension size of the dataset to SPACE1_DIM1*3 past the maximal size.
* It is supposed to fail. */
extend_dims[0] = SPACE1_DIM1 * 3;
H5E_BEGIN_TRY
{
ret = H5Dset_extent(dset1, extend_dims);
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Dset_extent");
}
H5E_END_TRY;
VERIFY(ret, FAIL, "H5Dset_extent");
#endif
ret = H5Pclose(plist_id);
CHECK(ret, FAIL, "H5Pclose");

File diff suppressed because it is too large Load Diff

View File

@ -4175,15 +4175,11 @@ test_misc23(void)
hsize_t dims[] = {10};
hid_t file_id = 0, group_id = 0, type_id = 0, space_id = 0, tmp_id = 0, create_id = H5P_DEFAULT,
access_id = H5P_DEFAULT;
#ifndef NO_OBJECT_GET_NAME
char objname[MISC23_NAME_BUF_SIZE]; /* Name of object */
#endif
char objname[MISC23_NAME_BUF_SIZE]; /* Name of object */
H5O_info2_t oinfo;
htri_t tri_status;
#ifndef NO_OBJECT_GET_NAME
ssize_t namelen;
#endif
herr_t status;
ssize_t namelen;
herr_t status;
/* Output message about test being performed */
MESSAGE(5, ("Testing intermediate group creation\n"));
@ -4269,12 +4265,12 @@ test_misc23(void)
tmp_id = H5Gcreate2(file_id, "/A/B01/grp", create_id, H5P_DEFAULT, access_id);
CHECK(tmp_id, FAIL, "H5Gcreate2");
#ifndef NO_OBJECT_GET_NAME
/* Query that the name of the new group is correct */
namelen = H5Iget_name(tmp_id, objname, (size_t)MISC23_NAME_BUF_SIZE);
CHECK(namelen, FAIL, "H5Iget_name");
VERIFY_STR(objname, "/A/B01/grp", "H5Iget_name");
#endif
status = H5Gclose(tmp_id);
CHECK(status, FAIL, "H5Gclose");
@ -4484,24 +4480,29 @@ test_misc23(void)
/**********************************************************************
* test H5Lcreate_external()
**********************************************************************/
#ifndef NO_EXTERNAL_LINKS
status = H5Lcreate_external("fake_filename", "fake_path", file_id, "/A/B20/grp", create_id, access_id);
CHECK(status, FAIL, "H5Lcreate_external");
tri_status = H5Lexists(file_id, "/A/B20/grp", access_id);
VERIFY(tri_status, TRUE, "H5Lexists");
#endif
if (vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) {
status =
H5Lcreate_external("fake_filename", "fake_path", file_id, "/A/B20/grp", create_id, access_id);
CHECK(status, FAIL, "H5Lcreate_external");
tri_status = H5Lexists(file_id, "/A/B20/grp", access_id);
VERIFY(tri_status, TRUE, "H5Lexists");
}
/**********************************************************************
* test H5Lcreate_ud()
**********************************************************************/
#ifndef NO_USER_DEFINED_LINKS
status =
H5Lcreate_ud(file_id, "/A/B21/grp", H5L_TYPE_EXTERNAL, "file\0obj", (size_t)9, create_id, access_id);
CHECK(status, FAIL, "H5Lcreate_ud");
tri_status = H5Lexists(file_id, "/A/B21/grp", access_id);
VERIFY(tri_status, TRUE, "H5Lexists");
#endif
if (vol_cap_flags_g & H5VL_CAP_FLAG_UD_LINKS) {
status = H5Lcreate_ud(file_id, "/A/B21/grp", H5L_TYPE_EXTERNAL, "file\0obj", (size_t)9, create_id,
access_id);
CHECK(status, FAIL, "H5Lcreate_ud");
tri_status = H5Lexists(file_id, "/A/B21/grp", access_id);
VERIFY(tri_status, TRUE, "H5Lexists");
}
/**********************************************************************
* close
**********************************************************************/

View File

@ -2018,7 +2018,6 @@ test_reference_region_1D(H5F_libver_t libver_low, H5F_libver_t libver_high)
static void
test_reference_obj_deleted(void)
{
#ifndef NO_REFERENCE_TO_DELETED
hid_t fid1; /* HDF5 File IDs */
hid_t dataset, /* Dataset ID */
dset2; /* Dereferenced dataset ID */
@ -2026,88 +2025,90 @@ test_reference_obj_deleted(void)
H5R_ref_t oref; /* Object Reference to test */
H5O_type_t obj_type; /* Object type */
herr_t ret; /* Generic return value */
#endif
MESSAGE(5, ("Testing References to Deleted Objects - SKIPPED for now due to no support\n"));
#ifndef NO_REFERENCE_TO_DELETED
/* Create file */
fid1 = H5Fcreate(FILE_REF_OBJ_DEL, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
/* Create scalar dataspace for datasets */
sid1 = H5Screate_simple(0, NULL, NULL);
CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
MESSAGE(5, ("Testing References to Deleted Objects\n"));
/* Create a dataset to reference (deleted later) */
dataset = H5Dcreate2(fid1, "Dataset1", H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
if ((vol_cap_flags_g & H5VL_CAP_FLAG_REF_BASIC) && (vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) &&
(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) && (vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC)) {
/* Create file */
fid1 = H5Fcreate(FILE_REF_OBJ_DEL, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
/* Close Dataset */
ret = H5Dclose(dataset);
CHECK(ret, FAIL, "H5Dclose");
/* Create scalar dataspace for datasets */
sid1 = H5Screate_simple(0, NULL, NULL);
CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
/* Create a dataset */
dataset = H5Dcreate2(fid1, "Dataset2", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
/* Create a dataset to reference (deleted later) */
dataset = H5Dcreate2(fid1, "Dataset1", H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
/* Create reference to dataset */
ret = H5Rcreate_object(fid1, "/Dataset1", H5P_DEFAULT, &oref);
CHECK(ret, FAIL, "H5Rcreate_object");
ret = H5Rget_obj_type3(&oref, H5P_DEFAULT, &obj_type);
CHECK(ret, FAIL, "H5Rget_obj_type3");
VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
/* Close Dataset */
ret = H5Dclose(dataset);
CHECK(ret, FAIL, "H5Dclose");
/* Write selection to disk */
ret = H5Dwrite(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, &oref);
CHECK(ret, FAIL, "H5Dwrite");
/* Create a dataset */
dataset = H5Dcreate2(fid1, "Dataset2", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
/* Close Dataset */
ret = H5Dclose(dataset);
CHECK(ret, FAIL, "H5Dclose");
/* Create reference to dataset */
ret = H5Rcreate_object(fid1, "/Dataset1", H5P_DEFAULT, &oref);
CHECK(ret, FAIL, "H5Rcreate_object");
ret = H5Rget_obj_type3(&oref, H5P_DEFAULT, &obj_type);
CHECK(ret, FAIL, "H5Rget_obj_type3");
VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
/* Delete referenced dataset */
ret = H5Ldelete(fid1, "/Dataset1", H5P_DEFAULT);
CHECK(ret, FAIL, "H5Ldelete");
/* Write selection to disk */
ret = H5Dwrite(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, &oref);
CHECK(ret, FAIL, "H5Dwrite");
/* Close disk dataspace */
ret = H5Sclose(sid1);
CHECK(ret, FAIL, "H5Sclose");
/* Close Dataset */
ret = H5Dclose(dataset);
CHECK(ret, FAIL, "H5Dclose");
/* Close file */
ret = H5Fclose(fid1);
CHECK(ret, FAIL, "H5Fclose");
/* Delete referenced dataset */
ret = H5Ldelete(fid1, "/Dataset1", H5P_DEFAULT);
CHECK(ret, FAIL, "H5Ldelete");
/* Destroy reference */
ret = H5Rdestroy(&oref);
CHECK(ret, FAIL, "H5Rdestroy");
/* Close disk dataspace */
ret = H5Sclose(sid1);
CHECK(ret, FAIL, "H5Sclose");
/* Re-open the file */
fid1 = H5Fopen(FILE_REF_OBJ_DEL, H5F_ACC_RDWR, H5P_DEFAULT);
CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
/* Close file */
ret = H5Fclose(fid1);
CHECK(ret, FAIL, "H5Fclose");
/* Open the dataset */
dataset = H5Dopen2(fid1, "/Dataset2", H5P_DEFAULT);
CHECK(ret, H5I_INVALID_HID, "H5Dopen2");
/* Destroy reference */
ret = H5Rdestroy(&oref);
CHECK(ret, FAIL, "H5Rdestroy");
/* Read selection from disk */
ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, &oref);
CHECK(ret, FAIL, "H5Dread");
/* Re-open the file */
fid1 = H5Fopen(FILE_REF_OBJ_DEL, H5F_ACC_RDWR, H5P_DEFAULT);
CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
/* Open deleted dataset object */
dset2 = H5Ropen_object(&oref, H5P_DEFAULT, H5P_DEFAULT);
VERIFY(dset2, H5I_INVALID_HID, "H5Ropen_object");
/* Open the dataset */
dataset = H5Dopen2(fid1, "/Dataset2", H5P_DEFAULT);
CHECK(ret, H5I_INVALID_HID, "H5Dopen2");
/* Close Dataset */
ret = H5Dclose(dataset);
CHECK(ret, FAIL, "H5Dclose");
/* Read selection from disk */
ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, &oref);
CHECK(ret, FAIL, "H5Dread");
/* Close file */
ret = H5Fclose(fid1);
CHECK(ret, FAIL, "H5Fclose");
/* Open deleted dataset object */
dset2 = H5Ropen_object(&oref, H5P_DEFAULT, H5P_DEFAULT);
VERIFY(dset2, H5I_INVALID_HID, "H5Ropen_object");
/* Destroy reference */
ret = H5Rdestroy(&oref);
CHECK(ret, FAIL, "H5Rdestroy");
#endif
/* Close Dataset */
ret = H5Dclose(dataset);
CHECK(ret, FAIL, "H5Dclose");
/* Close file */
ret = H5Fclose(fid1);
CHECK(ret, FAIL, "H5Fclose");
/* Destroy reference */
ret = H5Rdestroy(&oref);
CHECK(ret, FAIL, "H5Rdestroy");
}
} /* test_reference_obj_deleted() */
/****************************************************************

View File

@ -871,7 +871,6 @@ test_vl_rewrite(void)
static void
test_write_same_element(void)
{
#ifndef NO_WRITE_SAME_ELEMENT_TWICE
hid_t file1, dataset1;
hid_t mspace, fspace, dtype;
hsize_t fdim[] = {SPACE1_DIM1};
@ -880,86 +879,84 @@ test_write_same_element(void)
hsize_t marray[] = {NUMP};
hsize_t coord[SPACE1_RANK][NUMP];
herr_t ret;
#endif
MESSAGE(
5,
("Testing writing to same element of VL string dataset twice - SKIPPED for now due to no support\n"));
#ifndef NO_WRITE_SAME_ELEMENT_TWICE
file1 = H5Fcreate(DATAFILE3, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
CHECK(file1, FAIL, "H5Fcreate");
MESSAGE(5, ("Testing writing to same element of VL string dataset twice\n"));
dtype = H5Tcopy(H5T_C_S1);
CHECK(dtype, FAIL, "H5Tcopy");
if ((vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) && (vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
file1 = H5Fcreate(DATAFILE3, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
CHECK(file1, FAIL, "H5Fcreate");
ret = H5Tset_size(dtype, H5T_VARIABLE);
CHECK(ret, FAIL, "H5Tset_size");
dtype = H5Tcopy(H5T_C_S1);
CHECK(dtype, FAIL, "H5Tcopy");
fspace = H5Screate_simple(SPACE1_RANK, fdim, NULL);
CHECK(fspace, FAIL, "H5Screate_simple");
ret = H5Tset_size(dtype, H5T_VARIABLE);
CHECK(ret, FAIL, "H5Tset_size");
dataset1 = H5Dcreate2(file1, DATASET, dtype, fspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dataset1, FAIL, "H5Dcreate");
fspace = H5Screate_simple(SPACE1_RANK, fdim, NULL);
CHECK(fspace, FAIL, "H5Screate_simple");
ret = H5Dwrite(dataset1, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
CHECK(ret, FAIL, "H5Dwrite");
dataset1 = H5Dcreate2(file1, DATASET, dtype, fspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dataset1, FAIL, "H5Dcreate");
ret = H5Dclose(dataset1);
CHECK(ret, FAIL, "H5Dclose");
ret = H5Dwrite(dataset1, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
CHECK(ret, FAIL, "H5Dwrite");
ret = H5Tclose(dtype);
CHECK(ret, FAIL, "H5Tclose");
ret = H5Dclose(dataset1);
CHECK(ret, FAIL, "H5Dclose");
ret = H5Sclose(fspace);
CHECK(ret, FAIL, "H5Sclose");
ret = H5Tclose(dtype);
CHECK(ret, FAIL, "H5Tclose");
ret = H5Fclose(file1);
CHECK(ret, FAIL, "H5Fclose");
ret = H5Sclose(fspace);
CHECK(ret, FAIL, "H5Sclose");
/*
* Open the file. Select the same points, write values to those point locations.
*/
file1 = H5Fopen(DATAFILE3, H5F_ACC_RDWR, H5P_DEFAULT);
CHECK(file1, FAIL, "H5Fopen");
ret = H5Fclose(file1);
CHECK(ret, FAIL, "H5Fclose");
dataset1 = H5Dopen2(file1, DATASET, H5P_DEFAULT);
CHECK(dataset1, FAIL, "H5Dopen");
/*
* Open the file. Select the same points, write values to those point locations.
*/
file1 = H5Fopen(DATAFILE3, H5F_ACC_RDWR, H5P_DEFAULT);
CHECK(file1, FAIL, "H5Fopen");
fspace = H5Dget_space(dataset1);
CHECK(fspace, FAIL, "H5Dget_space");
dataset1 = H5Dopen2(file1, DATASET, H5P_DEFAULT);
CHECK(dataset1, FAIL, "H5Dopen");
dtype = H5Dget_type(dataset1);
CHECK(dtype, FAIL, "H5Dget_type");
fspace = H5Dget_space(dataset1);
CHECK(fspace, FAIL, "H5Dget_space");
mspace = H5Screate_simple(1, marray, NULL);
CHECK(mspace, FAIL, "H5Screate_simple");
dtype = H5Dget_type(dataset1);
CHECK(dtype, FAIL, "H5Dget_type");
coord[0][0] = 0;
coord[0][1] = 2;
coord[0][2] = 2;
coord[0][3] = 0;
mspace = H5Screate_simple(1, marray, NULL);
CHECK(mspace, FAIL, "H5Screate_simple");
ret = H5Sselect_elements(fspace, H5S_SELECT_SET, NUMP, (const hsize_t *)&coord);
CHECK(ret, FAIL, "H5Sselect_elements");
coord[0][0] = 0;
coord[0][1] = 2;
coord[0][2] = 2;
coord[0][3] = 0;
ret = H5Dwrite(dataset1, dtype, mspace, fspace, H5P_DEFAULT, val);
CHECK(ret, FAIL, "H5Dwrite");
ret = H5Sselect_elements(fspace, H5S_SELECT_SET, NUMP, (const hsize_t *)&coord);
CHECK(ret, FAIL, "H5Sselect_elements");
ret = H5Tclose(dtype);
CHECK(ret, FAIL, "H5Tclose");
ret = H5Dwrite(dataset1, dtype, mspace, fspace, H5P_DEFAULT, val);
CHECK(ret, FAIL, "H5Dwrite");
ret = H5Dclose(dataset1);
CHECK(ret, FAIL, "H5Dclose");
ret = H5Tclose(dtype);
CHECK(ret, FAIL, "H5Tclose");
ret = H5Sclose(fspace);
CHECK(ret, FAIL, "H5Dclose");
ret = H5Dclose(dataset1);
CHECK(ret, FAIL, "H5Dclose");
ret = H5Sclose(mspace);
CHECK(ret, FAIL, "H5Sclose");
ret = H5Sclose(fspace);
CHECK(ret, FAIL, "H5Dclose");
ret = H5Fclose(file1);
CHECK(ret, FAIL, "H5Fclose");
#endif
ret = H5Sclose(mspace);
CHECK(ret, FAIL, "H5Sclose");
ret = H5Fclose(file1);
CHECK(ret, FAIL, "H5Fclose");
}
} /* test_write_same_element */
/****************************************************************

View File

@ -49,6 +49,22 @@ if (BUILD_STATIC_LIBS)
endif ()
H5_SET_LIB_OPTIONS (${HDF5_TEST_LIB_TARGET} ${HDF5_TEST_LIB_NAME} STATIC 0)
set_target_properties (${HDF5_TEST_LIB_TARGET} PROPERTIES FOLDER libraries/test)
if (HDF5_EXPORTED_TARGETS AND HDF5_TEST_API_INSTALL)
INSTALL_TARGET_PDB (${HDF5_TEST_LIB_TARGET} ${HDF5_INSTALL_LIB_DIR} libraries)
install (
TARGETS
${HDF5_TEST_LIB_TARGET}
EXPORT
${HDF5_EXPORTED_TARGETS}
LIBRARY DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries
ARCHIVE DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries
RUNTIME DESTINATION ${HDF5_INSTALL_BIN_DIR} COMPONENT libraries
FRAMEWORK DESTINATION ${HDF5_INSTALL_FWRK_DIR} COMPONENT libraries
INCLUDES DESTINATION include
)
endif ()
endif ()
if (BUILD_SHARED_LIBS)
@ -75,6 +91,22 @@ if (BUILD_SHARED_LIBS)
endif ()
H5_SET_LIB_OPTIONS (${HDF5_TEST_LIBSH_TARGET} ${HDF5_TEST_LIB_NAME} SHARED "LIB")
set_target_properties (${HDF5_TEST_LIBSH_TARGET} PROPERTIES FOLDER libraries/test)
if (HDF5_EXPORTED_TARGETS AND HDF5_TEST_API_INSTALL)
INSTALL_TARGET_PDB (${HDF5_TEST_LIBSH_TARGET} ${HDF5_INSTALL_BIN_DIR} libraries)
install (
TARGETS
${HDF5_TEST_LIBSH_TARGET}
EXPORT
${HDF5_EXPORTED_TARGETS}
LIBRARY DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries
ARCHIVE DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries
RUNTIME DESTINATION ${HDF5_INSTALL_BIN_DIR} COMPONENT libraries
FRAMEWORK DESTINATION ${HDF5_INSTALL_FWRK_DIR} COMPONENT libraries
INCLUDES DESTINATION include
)
endif ()
endif ()
#-----------------------------------------------------------------------------

View File

@ -125,6 +125,21 @@ if (HDF5_ENABLE_FORMATTERS)
clang_format (HDF5_TEST_h5_api_test_parallel_FORMAT h5_api_test_parallel)
endif ()
if (HDF5_TEST_API_INSTALL)
install (
TARGETS
h5_api_test_parallel
EXPORT
${HDF5_EXPORTED_TARGETS}
DESTINATION
${HDF5_INSTALL_BIN_DIR}
PERMISSIONS
OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
COMPONENT
tests
)
endif ()
#-----------------------------------------------------------------------------
# Build the ported HDF5 test executables
#-----------------------------------------------------------------------------
@ -203,6 +218,21 @@ foreach (api_test_extra ${HDF5_API_PAR_TESTS_EXTRA})
if (HDF5_ENABLE_FORMATTERS)
clang_format (HDF5_TEST_h5_api_test_parallel_${api_test_extra}_FORMAT h5_api_test_parallel_${api_test_extra})
endif ()
if (HDF5_TEST_API_INSTALL)
install (
TARGETS
h5_api_test_parallel_${api_test_extra}
EXPORT
${HDF5_EXPORTED_TARGETS}
DESTINATION
${HDF5_INSTALL_BIN_DIR}
PERMISSIONS
OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
COMPONENT
tests
)
endif ()
endforeach ()
#-----------------------------------------------------------------------------
@ -229,7 +259,7 @@ if (HDF5_TEST_PARALLEL)
)
endif ()
set(last_api_test "")
set (last_api_test "")
foreach (api_test ${HDF5_API_TESTS})
add_test (
NAME "h5_api_test_parallel_${api_test}"
@ -240,9 +270,9 @@ if (HDF5_TEST_PARALLEL)
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
set_tests_properties("h5_api_test_parallel_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
set_tests_properties ("h5_api_test_parallel_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
set(last_api_test "h5_api_test_parallel_${api_test}")
set (last_api_test "h5_api_test_parallel_${api_test}")
endforeach ()
foreach (hdf5_test ${HDF5_API_PAR_TESTS_EXTRA})
@ -267,8 +297,99 @@ if (HDF5_TEST_PARALLEL)
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
endforeach ()
# Add tests for each external VOL connector that was built
foreach (external_vol_tgt ${HDF5_EXTERNAL_VOL_TARGETS})
# Determine whether connector should be tested with parallel tests
get_target_property (vol_test_parallel "${external_vol_tgt}" HDF5_VOL_TEST_PARALLEL)
if (${vol_test_parallel})
# Determine environment variables that need to be set for testing
set (vol_test_env "")
set (vol_plugin_paths "${CMAKE_BINARY_DIR}/${HDF5_INSTALL_BIN_DIR}")
get_target_property (vol_test_string "${external_vol_tgt}" HDF5_VOL_NAME)
list (APPEND vol_test_env "HDF5_VOL_CONNECTOR=${vol_test_string}")
get_target_property (vol_lib_targets "${external_vol_tgt}" HDF5_VOL_TARGETS)
foreach (lib_target ${vol_lib_targets})
get_target_property (lib_target_output_dir "${lib_target}" LIBRARY_OUTPUT_DIRECTORY)
if (NOT "${lib_target_output_dir}" STREQUAL "lib_target_output_dir-NOTFOUND"
AND NOT "${lib_target_output_dir}" STREQUAL ""
AND NOT "${lib_target_output_dir}" STREQUAL "${CMAKE_BINARY_DIR}/${HDF5_INSTALL_BIN_DIR}")
set (vol_plugin_paths "${vol_plugin_paths}${CMAKE_SEP}${lib_target_output_dir}")
endif ()
endforeach ()
list (APPEND vol_test_env "HDF5_PLUGIN_PATH=${vol_plugin_paths}")
# Add main API tests
set (last_api_test "")
foreach (api_test ${HDF5_API_TESTS})
add_test (
NAME "${external_vol_tgt}-h5_api_test_parallel_${api_test}"
COMMAND $<TARGET_FILE:h5_api_test_driver>
--server ${HDF5_TEST_API_SERVER}
--client $<TARGET_FILE:h5_api_test_parallel> "${api_test}"
--serial
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
set_tests_properties (
"${external_vol_tgt}-h5_api_test_parallel_${api_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
DEPENDS
"${last_api_test}"
)
set (last_api_test "${external_vol_tgt}-h5_api_test_parallel_${api_test}")
endforeach ()
# Add any extra HDF5 tests
foreach (hdf5_test ${HDF5_API_PAR_TESTS_EXTRA})
add_test (
NAME "${external_vol_tgt}-h5_api_test_parallel_${hdf5_test}"
COMMAND $<TARGET_FILE:h5_api_test_driver>
--server ${HDF5_TEST_API_SERVER}
--client $<TARGET_FILE:h5_api_test_parallel_${hdf5_test}>
--serial
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
set_tests_properties (
"${external_vol_tgt}-h5_api_test_parallel_${hdf5_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
)
endforeach ()
# Hook external tests to same test suite
foreach (ext_api_test ${HDF5_API_EXT_PARALLEL_TESTS})
add_test (
NAME "${external_vol_tgt}-h5_api_ext_test_parallel_${ext_api_test}"
COMMAND $<TARGET_FILE:h5_api_test_driver>
--server ${HDF5_TEST_API_SERVER}
--client $<TARGET_FILE:${ext_api_test}>
--serial
${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
)
set_tests_properties (
"${external_vol_tgt}-h5_api_ext_test_parallel_${ext_api_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
)
endforeach ()
endif ()
endforeach ()
else ()
set(last_api_test "")
set (last_api_test "")
foreach (api_test ${HDF5_API_TESTS})
add_test (
NAME "h5_api_test_parallel_${api_test}"
@ -277,9 +398,9 @@ if (HDF5_TEST_PARALLEL)
${MPIEXEC_POSTFLAGS}
)
set_tests_properties("h5_api_test_parallel_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
set_tests_properties ("h5_api_test_parallel_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
set(last_api_test "h5_api_test_parallel_${api_test}")
set (last_api_test "h5_api_test_parallel_${api_test}")
endforeach ()
foreach (hdf5_test ${HDF5_API_PAR_TESTS_EXTRA})
@ -290,5 +411,72 @@ if (HDF5_TEST_PARALLEL)
${MPIEXEC_POSTFLAGS}
)
endforeach ()
# Add tests for each external VOL connector that was built
foreach (external_vol_tgt ${HDF5_EXTERNAL_VOL_TARGETS})
# Determine whether connector should be tested with parallel tests
get_target_property (vol_test_parallel "${external_vol_tgt}" HDF5_VOL_TEST_PARALLEL)
if (${vol_test_parallel})
# Determine environment variables that need to be set for testing
set (vol_test_env "")
set (vol_plugin_paths "${CMAKE_BINARY_DIR}/${HDF5_INSTALL_BIN_DIR}")
get_target_property (vol_test_string "${external_vol_tgt}" HDF5_VOL_NAME)
list (APPEND vol_test_env "HDF5_VOL_CONNECTOR=${vol_test_string}")
get_target_property (vol_lib_targets "${external_vol_tgt}" HDF5_VOL_TARGETS)
foreach (lib_target ${vol_lib_targets})
get_target_property (lib_target_output_dir "${lib_target}" LIBRARY_OUTPUT_DIRECTORY)
if (NOT "${lib_target_output_dir}" STREQUAL "lib_target_output_dir-NOTFOUND"
AND NOT "${lib_target_output_dir}" STREQUAL ""
AND NOT "${lib_target_output_dir}" STREQUAL "${CMAKE_BINARY_DIR}/${HDF5_INSTALL_BIN_DIR}")
set (vol_plugin_paths "${vol_plugin_paths}${CMAKE_SEP}${lib_target_output_dir}")
endif ()
endforeach ()
list (APPEND vol_test_env "HDF5_PLUGIN_PATH=${vol_plugin_paths}")
# Add main API tests
set (last_api_test "")
foreach (api_test ${HDF5_API_TESTS})
add_test (
NAME "${external_vol_tgt}-h5_api_test_parallel_${api_test}"
COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS}
${MPIEXEC_PREFLAGS} $<TARGET_FILE:h5_api_test_parallel> "${api_test}"
${MPIEXEC_POSTFLAGS}
)
set_tests_properties (
"${external_vol_tgt}-h5_api_test_parallel_${api_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
DEPENDS
"${last_api_test}"
)
set (last_api_test "${external_vol_tgt}-h5_api_test_parallel_${api_test}")
endforeach ()
# Add any extra HDF5 tests
foreach (hdf5_test ${HDF5_API_PAR_TESTS_EXTRA})
add_test (
NAME "${external_vol_tgt}-h5_api_test_parallel_${hdf5_test}"
COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS}
${MPIEXEC_PREFLAGS} $<TARGET_FILE:h5_api_test_parallel_${hdf5_test}>
${MPIEXEC_POSTFLAGS}
)
set_tests_properties (
"${external_vol_tgt}-h5_api_test_parallel_${hdf5_test}"
PROPERTIES
ENVIRONMENT
"${vol_test_env}"
WORKING_DIRECTORY
"${HDF5_TEST_BINARY_DIR}/${external_vol_tgt}"
)
endforeach ()
endif ()
endforeach ()
endif ()
endif ()

View File

@ -77,7 +77,7 @@ test_one_dataset_io(void)
int *write_buf = NULL;
int *read_buf = NULL;
TESTING_MULTIPART("single dataset I/O")
TESTING_MULTIPART("single dataset I/O");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
@ -442,7 +442,7 @@ test_multi_dataset_io(void)
int *write_buf = NULL;
int *read_buf = NULL;
TESTING_MULTIPART("multi dataset I/O")
TESTING_MULTIPART("multi dataset I/O");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
@ -768,7 +768,7 @@ test_multi_file_dataset_io(void)
int *write_buf = NULL;
int *read_buf = NULL;
TESTING_MULTIPART("multi file dataset I/O")
TESTING_MULTIPART("multi file dataset I/O");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
@ -1191,7 +1191,7 @@ test_multi_file_grp_dset_io(void)
int *write_buf = NULL;
int *read_buf = NULL;
TESTING_MULTIPART("multi file dataset I/O with groups")
TESTING_MULTIPART("multi file dataset I/O with groups");
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
@ -2024,7 +2024,7 @@ test_attribute_exists(void)
if (exists1)
FAIL_PUTS_ERROR(" H5Aexists returned TRUE for an attribute that should not exist")
if (!exists2)
FAIL_PUTS_ERROR(" H5Aexists returned FALSE for an attribute that should exist")
FAIL_PUTS_ERROR(" H5Aexists returned FALSE for an attribute that should exist");
/* Close */
if (H5Aclose_async(attr_id, es_id) < 0)
@ -2913,11 +2913,10 @@ test_group(void)
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
!(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
if (MAINPROCESS) {
SKIPPED();
HDprintf(" API functions for basic file, group, group more, creation order, or flush aren't "
HDprintf(" API functions for basic file, group, group more or flush aren't "
"supported with this connector\n");
}
@ -2931,9 +2930,11 @@ test_group(void)
if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0)
TEST_ERROR;
/* Track creation order */
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0)
TEST_ERROR;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
/* Track creation order */
if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0)
TEST_ERROR;
}
/* Create event stack */
if ((es_id = H5EScreate()) < 0)
@ -2997,10 +2998,12 @@ test_group(void)
if (H5Gget_info_async(group_id, &info1, es_id) < 0)
TEST_ERROR;
/* Test H5Gget_info_by_idx_async */
if (H5Gget_info_by_idx_async(parent_group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &info2,
H5P_DEFAULT, es_id) < 0)
TEST_ERROR;
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
/* Test H5Gget_info_by_idx_async */
if (H5Gget_info_by_idx_async(parent_group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &info2,
H5P_DEFAULT, es_id) < 0)
TEST_ERROR;
}
/* Test H5Gget_info_by_name_async */
if (H5Gget_info_by_name_async(parent_group_id, "group3", &info3, H5P_DEFAULT, es_id) < 0)
@ -3014,11 +3017,13 @@ test_group(void)
/* Verify group infos */
if (info1.nlinks != 0)
FAIL_PUTS_ERROR(" incorrect number of links")
if (info2.nlinks != 1)
FAIL_PUTS_ERROR(" incorrect number of links")
FAIL_PUTS_ERROR(" incorrect number of links");
if (vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) {
if (info2.nlinks != 1)
FAIL_PUTS_ERROR(" incorrect number of links");
}
if (info3.nlinks != 2)
FAIL_PUTS_ERROR(" incorrect number of links")
FAIL_PUTS_ERROR(" incorrect number of links");
/* Close */
if (H5Gclose_async(group_id, es_id) < 0)
@ -3271,17 +3276,17 @@ test_link(void)
/* Check if existence returns were correct */
if (!existsh1)
FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist")
FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist");
if (!existss1)
FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist")
FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist");
if (!existsh2)
FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist")
FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist");
if (existss2)
FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist")
FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist");
if (existsh3)
FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist")
FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist");
if (existsh3)
FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist")
FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist");
/* Close */
if (H5Gclose_async(parent_group_id, es_id) < 0)

View File

@ -172,14 +172,34 @@ error:
int
main(int argc, char **argv)
{
const char *vol_connector_string;
const char *vol_connector_name;
unsigned seed;
hid_t fapl_id = H5I_INVALID_HID;
hid_t fapl_id = H5I_INVALID_HID;
hid_t default_con_id = H5I_INVALID_HID;
hid_t registered_con_id = H5I_INVALID_HID;
char *vol_connector_string_copy = NULL;
char *vol_connector_info = NULL;
int required = MPI_THREAD_MULTIPLE;
int provided;
/*
* Attempt to initialize with MPI_THREAD_MULTIPLE for VOL connectors
* that require that level of threading support in MPI
*/
if (MPI_SUCCESS != MPI_Init_thread(&argc, &argv, required, &provided)) {
HDfprintf(stderr, "MPI_Init_thread failed\n");
HDexit(EXIT_FAILURE);
}
MPI_Init(&argc, &argv);
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
if (provided < required) {
if (MAINPROCESS)
HDprintf("** INFO: couldn't initialize with MPI_THREAD_MULTIPLE threading support **\n");
}
/* Simple argument checking, TODO can improve that later */
if (argc > 1) {
enum H5_api_test_type i = H5_api_test_name_to_type(argv[1]);
@ -209,7 +229,7 @@ main(int argc, char **argv)
if (mpi_size > 1) {
if (MPI_SUCCESS != MPI_Bcast(&seed, 1, MPI_UNSIGNED, 0, MPI_COMM_WORLD)) {
if (MAINPROCESS)
HDprintf("Couldn't broadcast test seed\n");
HDfprintf(stderr, "Couldn't broadcast test seed\n");
goto error;
}
}
@ -222,14 +242,45 @@ main(int argc, char **argv)
HDsnprintf(H5_api_test_parallel_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s%s", test_path_prefix,
PARALLEL_TEST_FILE_NAME);
if (NULL == (vol_connector_name = HDgetenv(HDF5_VOL_CONNECTOR))) {
if (NULL == (vol_connector_string = HDgetenv(HDF5_VOL_CONNECTOR))) {
if (MAINPROCESS)
HDprintf("No VOL connector selected; using native VOL connector\n");
vol_connector_name = "native";
vol_connector_info = NULL;
}
else {
char *token = NULL;
BEGIN_INDEPENDENT_OP(copy_connector_string)
{
if (NULL == (vol_connector_string_copy = HDstrdup(vol_connector_string))) {
if (MAINPROCESS)
HDfprintf(stderr, "Unable to copy VOL connector string\n");
INDEPENDENT_OP_ERROR(copy_connector_string);
}
}
END_INDEPENDENT_OP(copy_connector_string);
BEGIN_INDEPENDENT_OP(get_connector_name)
{
if (NULL == (token = HDstrtok(vol_connector_string_copy, " "))) {
if (MAINPROCESS)
HDfprintf(stderr, "Error while parsing VOL connector string\n");
INDEPENDENT_OP_ERROR(get_connector_name);
}
}
END_INDEPENDENT_OP(get_connector_name);
vol_connector_name = token;
if (NULL != (token = HDstrtok(NULL, " "))) {
vol_connector_info = token;
}
}
if (MAINPROCESS) {
HDprintf("Running parallel API tests with VOL connector '%s'\n\n", vol_connector_name);
HDprintf("Running parallel API tests with VOL connector '%s' and info string '%s'\n\n",
vol_connector_name, vol_connector_info ? vol_connector_info : "");
HDprintf("Test parameters:\n");
HDprintf(" - Test file name: '%s'\n", H5_api_test_parallel_filename);
HDprintf(" - Number of MPI ranks: %d\n", mpi_size);
@ -237,17 +288,74 @@ main(int argc, char **argv)
HDprintf("\n\n");
}
BEGIN_INDEPENDENT_OP(create_fapl)
{
if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, FALSE)) < 0) {
if (MAINPROCESS)
HDfprintf(stderr, "Unable to create FAPL\n");
INDEPENDENT_OP_ERROR(create_fapl);
}
}
END_INDEPENDENT_OP(create_fapl);
BEGIN_INDEPENDENT_OP(check_vol_register)
{
/*
* If using a VOL connector other than the native
* connector, check whether the VOL connector was
* successfully registered before running the tests.
* Otherwise, HDF5 will default to running the tests
* with the native connector, which could be misleading.
*/
if (0 != HDstrcmp(vol_connector_name, "native")) {
htri_t is_registered;
if ((is_registered = H5VLis_connector_registered_by_name(vol_connector_name)) < 0) {
if (MAINPROCESS)
HDfprintf(stderr, "Unable to determine if VOL connector is registered\n");
INDEPENDENT_OP_ERROR(check_vol_register);
}
if (!is_registered) {
if (MAINPROCESS)
HDfprintf(stderr, "Specified VOL connector '%s' wasn't correctly registered!\n",
vol_connector_name);
INDEPENDENT_OP_ERROR(check_vol_register);
}
else {
/*
* If the connector was successfully registered, check that
* the connector ID set on the default FAPL matches the ID
* for the registered connector before running the tests.
*/
if (H5Pget_vol_id(fapl_id, &default_con_id) < 0) {
if (MAINPROCESS)
HDfprintf(stderr, "Couldn't retrieve ID of VOL connector set on default FAPL\n");
INDEPENDENT_OP_ERROR(check_vol_register);
}
if ((registered_con_id = H5VLget_connector_id_by_name(vol_connector_name)) < 0) {
if (MAINPROCESS)
HDfprintf(stderr, "Couldn't retrieve ID of registered VOL connector\n");
INDEPENDENT_OP_ERROR(check_vol_register);
}
if (default_con_id != registered_con_id) {
if (MAINPROCESS)
HDfprintf(stderr,
"VOL connector set on default FAPL didn't match specified VOL connector\n");
INDEPENDENT_OP_ERROR(check_vol_register);
}
}
}
}
END_INDEPENDENT_OP(check_vol_register);
/* Retrieve the VOL cap flags - work around an HDF5
* library issue by creating a FAPL
*/
BEGIN_INDEPENDENT_OP(get_capability_flags)
{
if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, FALSE)) < 0) {
if (MAINPROCESS)
HDfprintf(stderr, "Unable to create FAPL\n");
INDEPENDENT_OP_ERROR(get_capability_flags);
}
vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0) {
if (MAINPROCESS)
@ -265,7 +373,8 @@ main(int argc, char **argv)
{
if (MAINPROCESS) {
if (create_test_container(H5_api_test_parallel_filename, vol_cap_flags_g) < 0) {
HDprintf(" failed to create testing container file '%s'\n", H5_api_test_parallel_filename);
HDfprintf(stderr, " failed to create testing container file '%s'\n",
H5_api_test_parallel_filename);
INDEPENDENT_OP_ERROR(create_test_container);
}
}
@ -314,9 +423,19 @@ main(int argc, char **argv)
}
}
if (default_con_id >= 0 && H5VLclose(default_con_id) < 0) {
if (MAINPROCESS)
HDfprintf(stderr, " failed to close VOL connector ID\n");
}
if (registered_con_id >= 0 && H5VLclose(registered_con_id) < 0) {
if (MAINPROCESS)
HDfprintf(stderr, " failed to close VOL connector ID\n");
}
if (fapl_id >= 0 && H5Pclose(fapl_id) < 0) {
if (MAINPROCESS)
HDprintf(" failed to close MPI FAPL\n");
HDfprintf(stderr, " failed to close MPI FAPL\n");
}
H5close();
@ -326,8 +445,12 @@ main(int argc, char **argv)
HDexit(EXIT_SUCCESS);
error:
HDfree(vol_connector_string_copy);
H5E_BEGIN_TRY
{
H5VLclose(default_con_id);
H5VLclose(registered_con_id);
H5Pclose(fapl_id);
}
H5E_END_TRY;

View File

@ -3623,6 +3623,19 @@ test_no_collective_cause_mode(int selection_mode)
MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
if (MAINPROCESS) {
puts("SKIPPED");
printf(" API functions for basic file, dataset, or dataset more aren't supported with this "
"connector\n");
fflush(stdout);
}
return;
}
MPI_Barrier(MPI_COMM_WORLD);
HDassert(mpi_size >= 1);

View File

@ -994,6 +994,18 @@ test_delete(void)
MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE)) {
if (MAINPROCESS) {
puts("SKIPPED");
printf(" API functions for basic file or file more aren't supported with this "
"connector\n");
fflush(stdout);
}
return;
}
/* setup file access plist */
fapl_id = H5Pcreate(H5P_FILE_ACCESS);
VRFY((fapl_id != H5I_INVALID_HID), "H5Pcreate");

View File

@ -85,6 +85,20 @@ file_image_daisy_chain_test(void)
MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
if (MAINPROCESS) {
puts("SKIPPED");
printf(" API functions for basic file, dataset, or dataset more aren't supported with this "
"connector\n");
fflush(stdout);
}
return;
}
/* setup file name */
HDsnprintf(file_name, 1024, "file_image_daisy_chain_test_%05d.h5", (int)mpi_rank);

View File

@ -1037,6 +1037,19 @@ independent_group_read(void)
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
/* Make sure the connector supports the API functions being tested */
if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
!(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
if (MAINPROCESS) {
puts("SKIPPED");
printf(
" API functions for basic file, group, or dataset aren't supported with this connector\n");
fflush(stdout);
}
return;
}
plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
H5Pset_all_coll_metadata_ops(plist, FALSE);

View File

@ -17,7 +17,6 @@
#include "H5private.h"
#include "testpar.h"
#include "H5_api_tests_disabled.h"
/*
* Define parameters for various tests since we do not have access to