mirror of
https://github.com/HDFGroup/hdf5.git
synced 2025-02-17 16:10:24 +08:00
spelling fixes (#1561)
This commit is contained in:
parent
870ee8feee
commit
e8ea850004
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@ -12,4 +12,4 @@ jobs:
|
||||
- uses: codespell-project/actions-codespell@master
|
||||
with:
|
||||
skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c
|
||||
ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum
|
||||
ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum,ro,oce
|
||||
|
@ -156,7 +156,7 @@ FileAccPropList::getDriver() const
|
||||
// Function: FileAccPropList::setDriver
|
||||
///\brief Set file driver for this property list.
|
||||
///\param new_driver_id - IN: File driver
|
||||
///\param new_driver_info - IN: Struct containing the driver-specific properites
|
||||
///\param new_driver_info - IN: Struct containing the driver-specific properties
|
||||
///\exception H5::PropListIException
|
||||
///\par Description
|
||||
/// For information, please refer to the H5Pset_driver API in
|
||||
|
@ -732,7 +732,7 @@ Elena> "Free-space object"
|
||||
only does the level indicate whether child pointers
|
||||
point to sub-trees or to data, but it can also be used
|
||||
to help file consistency checking utilities reconstruct
|
||||
damanged trees.</td>
|
||||
damaged trees.</td>
|
||||
</tr>
|
||||
|
||||
<tr valign=top>
|
||||
|
@ -1059,7 +1059,7 @@ TABLE.list TD { border:none; }
|
||||
only does the level indicate whether child pointers
|
||||
point to sub-trees or to data, but it can also be used
|
||||
to help file consistency checking utilities reconstruct
|
||||
damanged trees.
|
||||
damaged trees.
|
||||
</P>
|
||||
</td>
|
||||
</tr>
|
||||
|
@ -541,7 +541,7 @@ SUBROUTINE test_chunk_cache(cleanup, total_error)
|
||||
CALL H5Dclose_f(dsid, error)
|
||||
CALL H5Oopen_f(fid, "dset", dsid, error, dapl1)
|
||||
|
||||
! Retrieve dapl from dataset, verfiy cache values are the same as on dapl1
|
||||
! Retrieve dapl from dataset, verify cache values are the same as on dapl1
|
||||
!
|
||||
! Note we rely on the knowledge that H5Pget_chunk_cache retrieves these
|
||||
! values directly from the dataset structure, and not from a copy of the
|
||||
@ -563,7 +563,7 @@ SUBROUTINE test_chunk_cache(cleanup, total_error)
|
||||
CALL H5Oopen_f(fid, "dset", dsid, error)
|
||||
CALL check("H5Oopen_f", error, total_error)
|
||||
|
||||
! Retrieve dapl from dataset, verfiy cache values are the same as on fapl_local
|
||||
! Retrieve dapl from dataset, verify cache values are the same as on fapl_local
|
||||
|
||||
CALL H5Dget_access_plist_f(dsid, dapl2, error)
|
||||
CALL check("H5Dget_access_plist_f", error, total_error)
|
||||
|
@ -1245,7 +1245,7 @@ Known Problems
|
||||
causes failures in several HDF5 library tests.
|
||||
* For HPUX 11.23 many tools tests failed for 64-bit version when linked to the
|
||||
shared libraries (tested for 1.8.0-beta2)
|
||||
* For SNL, Red Storm: only paralle HDF5 is supported. The serial tests pass
|
||||
* For SNL, Red Storm: only parallel HDF5 is supported. The serial tests pass
|
||||
and the parallel tests also pass with lots of non-fatal error messages.
|
||||
* For LLNL, uP: both serial and parallel pass. Zeus: serial passes but
|
||||
parallel fails with a known proglem in MPI. ubgl: serial passes but
|
||||
|
@ -1581,7 +1581,7 @@ Known Problems
|
||||
causes failures in several HDF5 library tests.
|
||||
* For HPUX 11.23 many tools tests failed for 64-bit version when linked to the
|
||||
shared libraries (tested for 1.8.0-beta2)
|
||||
* For SNL, Red Storm: only paralle HDF5 is supported. The serial tests pass
|
||||
* For SNL, Red Storm: only parallel HDF5 is supported. The serial tests pass
|
||||
and the parallel tests also pass with lots of non-fatal error messages.
|
||||
* on SUN 5.10 C++ test fails in the "Testing Shared Datatypes with Attributes" test
|
||||
* configuring with --enable-debug=all produces compiler errors on most
|
||||
|
@ -17,7 +17,7 @@
|
||||
* Dec 01 2016
|
||||
* Quincey Koziol
|
||||
*
|
||||
* Purpose: Routines for managing v2 B-tree internal ndoes.
|
||||
* Purpose: Routines for managing v2 B-tree internal nodes.
|
||||
*
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
|
@ -17,7 +17,7 @@
|
||||
* Dec 01 2016
|
||||
* Quincey Koziol
|
||||
*
|
||||
* Purpose: Routines for managing v2 B-tree leaf ndoes.
|
||||
* Purpose: Routines for managing v2 B-tree leaf nodes.
|
||||
*
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
|
@ -1185,7 +1185,7 @@ done:
|
||||
*
|
||||
* Purpose: Set the file driver (DRIVER_ID) for a file access
|
||||
* property list (PLIST_ID) and supply an optional
|
||||
* struct containing the driver-specific properites
|
||||
* struct containing the driver-specific properties
|
||||
* (DRIVER_INFO). The driver properties will be copied into the
|
||||
* property list and the reference count on the driver will be
|
||||
* incremented, allowing the caller to close the driver ID but
|
||||
|
@ -1060,7 +1060,7 @@ H5S__point_get_version_enc_size(const H5S_t *space, uint32_t *version, uint8_t *
|
||||
hsize_t bounds_start[H5S_MAX_RANK]; /* Starting coordinate of bounding box */
|
||||
hsize_t bounds_end[H5S_MAX_RANK]; /* Opposite coordinate of bounding box */
|
||||
hsize_t max_size = 0; /* Maximum selection size */
|
||||
unsigned u; /* Local index veriable */
|
||||
unsigned u; /* Local index variable */
|
||||
herr_t ret_value = SUCCEED; /* Return value */
|
||||
|
||||
FUNC_ENTER_STATIC
|
||||
|
@ -11,7 +11,7 @@ atomic_reader.c: is the "read" part of the test.
|
||||
|
||||
Building the Tests
|
||||
------------------
|
||||
The two test parts are automically built during configure and make process.
|
||||
The two test parts are automatically built during configure and make process.
|
||||
But to build them individually, you can do in test/ directory:
|
||||
$ gcc atomic_writer
|
||||
$ gcc atomic_reader
|
||||
|
@ -8845,7 +8845,7 @@ test_chunk_cache(hid_t fapl)
|
||||
if ((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl1)) < 0)
|
||||
FAIL_STACK_ERROR
|
||||
|
||||
/* Retrieve dapl from dataset, verfiy cache values are the same as on fapl_local */
|
||||
/* Retrieve dapl from dataset, verify cache values are the same as on fapl_local */
|
||||
if ((dapl2 = H5Dget_access_plist(dsid)) < 0)
|
||||
FAIL_STACK_ERROR
|
||||
if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0)
|
||||
@ -8869,7 +8869,7 @@ test_chunk_cache(hid_t fapl)
|
||||
if ((dsid = H5Oopen(fid, "dset", dapl1)) < 0)
|
||||
FAIL_STACK_ERROR
|
||||
|
||||
/* Retrieve dapl from dataset, verfiy cache values are the same as on dapl1 */
|
||||
/* Retrieve dapl from dataset, verify cache values are the same as on dapl1 */
|
||||
/* Note we rely on the knowledge that H5Pget_chunk_cache retrieves these
|
||||
* values directly from the dataset structure, and not from a copy of the
|
||||
* dapl used to open the dataset (which is not preserved).
|
||||
@ -8889,7 +8889,7 @@ test_chunk_cache(hid_t fapl)
|
||||
if ((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0)
|
||||
FAIL_STACK_ERROR
|
||||
|
||||
/* Retrieve dapl from dataset, verfiy cache values are the same on fapl_local */
|
||||
/* Retrieve dapl from dataset, verify cache values are the same on fapl_local */
|
||||
if ((dapl2 = H5Dget_access_plist(dsid)) < 0)
|
||||
FAIL_STACK_ERROR
|
||||
if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0)
|
||||
|
@ -370,7 +370,7 @@ error:
|
||||
* 1) verifying that API errors are caught.
|
||||
*
|
||||
* 2) verifying that the page buffer behaves more or less
|
||||
* as advertized.
|
||||
* as advertised.
|
||||
*
|
||||
* Any data mis-matches or unexpected failures or successes
|
||||
* reported by the HDF5 library result in test failure.
|
||||
|
@ -275,7 +275,7 @@ read_records(const char *filename, hbool_t verbose, FILE *verbose_file, unsigned
|
||||
if ((fapl = h5_fileaccess()) < 0)
|
||||
return -1;
|
||||
|
||||
/* Log I/O when verbose output it enbabled */
|
||||
/* Log I/O when verbose output it enabled */
|
||||
if (use_log_vfd) {
|
||||
char verbose_name[1024];
|
||||
|
||||
|
@ -3644,7 +3644,7 @@ test_actual_io_mode(int selection_mode)
|
||||
/* Set the threshold number of processes per chunk to twice mpi_size.
|
||||
* This will prevent the threshold from ever being met, thus forcing
|
||||
* multi chunk io instead of link chunk io.
|
||||
* This is via deault.
|
||||
* This is via default.
|
||||
*/
|
||||
if (multi_chunk_io) {
|
||||
/* force multi-chunk-io by threshold */
|
||||
|
@ -6724,7 +6724,7 @@ smoke_check_6(int metadata_write_strategy)
|
||||
if (FALSE != entry_ptr->header.coll_access) {
|
||||
nerrors++;
|
||||
if (verbose) {
|
||||
HDfprintf(stdout, "%d:%s: Entry inserted indepedently marked as collective.\n",
|
||||
HDfprintf(stdout, "%d:%s: Entry inserted independently marked as collective.\n",
|
||||
world_mpi_rank, __func__);
|
||||
}
|
||||
}
|
||||
@ -6780,7 +6780,7 @@ smoke_check_6(int metadata_write_strategy)
|
||||
if (FALSE != entry_ptr->header.coll_access) {
|
||||
nerrors++;
|
||||
if (verbose) {
|
||||
HDfprintf(stdout, "%d:%s: Entry inserted indepedently marked as collective.\n",
|
||||
HDfprintf(stdout, "%d:%s: Entry inserted independently marked as collective.\n",
|
||||
world_mpi_rank, __func__);
|
||||
}
|
||||
}
|
||||
|
@ -3202,7 +3202,7 @@ test_actual_io_mode(int selection_mode)
|
||||
/* Set the threshold number of processes per chunk to twice mpi_size.
|
||||
* This will prevent the threshold from ever being met, thus forcing
|
||||
* multi chunk io instead of link chunk io.
|
||||
* This is via deault.
|
||||
* This is via default.
|
||||
*/
|
||||
if (multi_chunk_io) {
|
||||
/* force multi-chunk-io by threshold */
|
||||
|
@ -2522,7 +2522,7 @@ done:
|
||||
* were borrowed from the GNU less(1).
|
||||
*
|
||||
* Return: Success: Number of columns.
|
||||
* Failure: Some default number of columms.
|
||||
* Failure: Some default number of columns.
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
static int
|
||||
|
@ -1736,7 +1736,7 @@ main(int argc, char *argv[])
|
||||
warn_msg("Unable to retrieve file size\n");
|
||||
HDassert(iter.filesize != 0);
|
||||
|
||||
/* Get storge info for file-level structures */
|
||||
/* Get storage info for file-level structures */
|
||||
if (H5Fget_info2(fid, &finfo) < 0)
|
||||
warn_msg("Unable to retrieve file info\n");
|
||||
else {
|
||||
|
@ -923,7 +923,7 @@ out:
|
||||
/*-------------------------------------------------------------------------
|
||||
* Function: Test_Extlink_Copy
|
||||
*
|
||||
* Purpose: gerenate external link files
|
||||
* Purpose: generate external link files
|
||||
*
|
||||
*------------------------------------------------------------------------*/
|
||||
static void
|
||||
|
Loading…
Reference in New Issue
Block a user