mirror of
https://github.com/HDFGroup/hdf5.git
synced 2025-04-12 17:31:09 +08:00
Fix codespell issues (#5256)
This commit is contained in:
parent
09186b6c61
commit
d84337ad8c
@ -1,6 +1,6 @@
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
[codespell]
|
||||
skip = .git,*.svg,.codespellrc,./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./config/sanitizer/sanitizers.cmake,./tools/test/h5import/testfiles/*.conf,./tools/test/h5repack/testfiles/*.dat,./test/API/driver,./configure,./bin/ltmain.sh,./bin/depcomp,./bin/config.guess,./bin/config.sub,./autom4te.cache,./m4/libtool.m4,./c++/src/*.html,./HDF5Examples/depcomp
|
||||
skip = .git,*.svg,.codespellrc,./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./config/sanitizer/sanitizers.cmake,./tools/test/h5import/testfiles/*.conf,./tools/test/h5repack/testfiles/*.dat,./test/API/driver,./configure,./bin/ltmain.sh,./bin/depcomp,./bin/config.guess,./bin/config.sub,./autom4te.cache,./m4/libtool.m4,./c++/src/*.html,./HDF5Examples/depcomp,./release_docs/HISTORY-*.txt
|
||||
check-hidden = true
|
||||
# ignore-regex =
|
||||
ignore-words-list = ot,isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,ake,gord,numer,ro,oce,msdos,TEXTIN,FLE
|
||||
ignore-words-list = ot,isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,ake,gord,numer,ro,oce,msdos,TEXTIN,indx,FLE
|
||||
|
@ -430,7 +430,7 @@ and blue components that make up each color.
|
||||
<table>
|
||||
<tr>
|
||||
<td>
|
||||
\image html Palettes.fm.anc.gif
|
||||
\image html Palettes_fm_anc.gif
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
Before Width: | Height: | Size: 4.6 KiB After Width: | Height: | Size: 4.6 KiB |
@ -33,7 +33,7 @@
|
||||
#define DSET_SCALAR "DSET_SCALAR"
|
||||
|
||||
/* Selected compound field members for testing */
|
||||
#define VALID_FIELDS1 "field1,field2.a,field3,field4" /* TEMPORORAY */
|
||||
#define VALID_FIELDS1 "field1,field2.a,field3,field4" /* TEMPORARY */
|
||||
#define VALID_FIELDS2 "field2.b.a,field2.c,field4.b"
|
||||
|
||||
#define INVALID_FIELDS1 "field2.k.a,field2.c,field4.k"
|
||||
|
@ -599,7 +599,7 @@ H5B2__get_node_depth_test(H5B2_t *bt2, void *udata)
|
||||
/* Check arguments. */
|
||||
assert(bt2);
|
||||
|
||||
/* Get information abou the node */
|
||||
/* Get information about the node */
|
||||
if (H5B2__get_node_info_test(bt2, udata, &ninfo) < 0)
|
||||
HGOTO_ERROR(H5E_BTREE, H5E_NOTFOUND, (-1), "error looking up node info");
|
||||
|
||||
|
@ -466,7 +466,7 @@ H5D__chunk_disjoint(unsigned n, const hsize_t *scaled1, const hsize_t *scaled2)
|
||||
assert(scaled1);
|
||||
assert(scaled2);
|
||||
|
||||
/* Loop over two chunks, detecting disjointness and getting out quickly */
|
||||
/* Loop over two chunks, detecting disjointedness and getting out quickly */
|
||||
for (u = 0; u < n; u++)
|
||||
if ((scaled1[u] + 1) <= scaled2[u] || (scaled2[u] + 1) <= scaled1[u])
|
||||
HGOTO_DONE(true);
|
||||
|
@ -5029,7 +5029,7 @@ H5D__chunk_allocate(const H5D_t *dset, bool full_overwrite, const hsize_t old_di
|
||||
fill_buf = &fb_info.fill_buf;
|
||||
|
||||
/* Check if there are filters which need to be applied to the chunk */
|
||||
/* (only do this in advance when the chunk info can be re-used (i.e.
|
||||
/* (only do this in advance when the chunk info can be reused (i.e.
|
||||
* it doesn't contain any non-default VL datatype fill values)
|
||||
*/
|
||||
if (!fb_info.has_vlen_fill_type && pline->nused > 0) {
|
||||
|
@ -437,7 +437,7 @@ done:
|
||||
* checksum value will change.
|
||||
*
|
||||
* Entry data is copied into separate memory region; user pointer
|
||||
* can be safley re-used or discarded after operation.
|
||||
* can be safley reused or discarded after operation.
|
||||
*
|
||||
* Return: SUCCEED/FAIL
|
||||
*-----------------------------------------------------------------------------
|
||||
|
@ -861,7 +861,7 @@ H5FD__s3comms_s3r_configure_aws(s3r_t *handle, const H5FD_ros3_fapl_t *fa, const
|
||||
if (H5FD__s3comms_make_iso_8661_string(time(NULL), iso8601) < 0)
|
||||
HGOTO_ERROR(H5E_VFL, H5E_BADVALUE, FAIL, "could not construct ISO-8601 string");
|
||||
|
||||
/* Compute signing key (part of AWS/S3 REST API). Can be re-used by
|
||||
/* Compute signing key (part of AWS/S3 REST API). Can be reused by
|
||||
* user/key for 7 days after creation.
|
||||
*/
|
||||
if (H5FD__s3comms_make_aws_signing_key(signing_key, (const char *)fa->secret_key,
|
||||
|
@ -320,7 +320,7 @@ typedef struct {
|
||||
*
|
||||
* Instantiated through `H5FD__s3comms_s3r_open()`, copies data into self.
|
||||
*
|
||||
* Intended to be re-used for operations on a remote object.
|
||||
* Intended to be reused for operations on a remote object.
|
||||
*
|
||||
* Cleaned up through `H5FD__s3comms_s3r_close()`.
|
||||
*
|
||||
@ -373,7 +373,7 @@ typedef struct {
|
||||
* key, generated via
|
||||
* `HMAC-SHA256(HMAC-SHA256(HMAC-SHA256(HMAC-SHA256("AWS4<secret_key>",
|
||||
* "<yyyyMMDD"), "<aws-region>"), "<aws-service>"), "aws4_request")`
|
||||
* which may be re-used for several (up to seven (7)) days from creation
|
||||
* which may be reused for several (up to seven (7)) days from creation
|
||||
*
|
||||
* Computed once upon file open from the secret key string in the fapl
|
||||
*
|
||||
|
@ -396,7 +396,7 @@ H5FD__ioc_async_completion(MPI_Request *mpi_reqs, size_t num_reqs)
|
||||
|
||||
H5_CHECK_OVERFLOW(num_reqs, size_t, int);
|
||||
|
||||
/* Have to supppress gcc warnings regarding MPI_STATUSES_IGNORE
|
||||
/* Have to suppress gcc warnings regarding MPI_STATUSES_IGNORE
|
||||
* with MPICH (https://github.com/pmodels/mpich/issues/5687)
|
||||
*/
|
||||
H5_GCC_DIAG_OFF("stringop-overflow")
|
||||
|
@ -2156,7 +2156,7 @@
|
||||
* image> \endcode
|
||||
*
|
||||
* If we can further arrange matters so that only the contents of the datasets in the HDF5 file image change,
|
||||
* but not the structure of the file itself, we can optimize still further by re-using the image and changing
|
||||
* but not the structure of the file itself, we can optimize still further by reusing the image and changing
|
||||
* only the contents of the datasets after the initial write to the buffer. The following pseudo code shows
|
||||
* how this might be done. Note that the code assumes that buf already contains the image of the HDF5 file
|
||||
* whose dataset contents are to be overwritten. Again, much error checking is omitted for clarity. Also,
|
||||
|
@ -2388,7 +2388,7 @@ done:
|
||||
* probably) use space in the file heap. If garbage collection
|
||||
* is on and the user passes in an uninitialized value in a
|
||||
* reference structure, the heap might get corrupted. When
|
||||
* garbage collection is off however and the user re-uses a
|
||||
* garbage collection is off however and the user reuses a
|
||||
* reference, the previous heap block will be orphaned and not
|
||||
* returned to the free heap space. When garbage collection is
|
||||
* on, the user must initialize the reference structures to 0 or
|
||||
|
@ -4910,7 +4910,7 @@ H5_DLL herr_t H5Pset_file_locking(hid_t fapl_id, hbool_t use_file_locking, hbool
|
||||
* HDF5 file's global heap. If garbage collection is on and the user
|
||||
* passes in an uninitialized value in a reference structure, the heap
|
||||
* might get corrupted. When garbage collection is off, however, and
|
||||
* the user re-uses a reference, the previous heap block will be
|
||||
* the user reuses a reference, the previous heap block will be
|
||||
* orphaned and not returned to the free heap space.
|
||||
*
|
||||
* When garbage collection is on, the user must initialize the
|
||||
|
@ -191,7 +191,7 @@ H5R__encode_token_region_compat(H5F_t *f, const H5O_token_t *obj_token, size_t t
|
||||
* file libver bounds, this is later retrieved in H5S hyper encode */
|
||||
H5CX_set_libver_bounds(f);
|
||||
|
||||
/* Zero the heap ID out, may leak heap space if user is re-using
|
||||
/* Zero the heap ID out, may leak heap space if user is reusing
|
||||
* reference and doesn't have garbage collection turned on
|
||||
*/
|
||||
memset(buf, 0, buf_size);
|
||||
|
@ -2575,11 +2575,11 @@ H5S_select_project_intersection(H5S_t *src_space, H5S_t *dst_space, H5S_t *src_i
|
||||
/* Advance iterators */
|
||||
if (H5S_SELECT_ITER_NEXT(ss_iter, 1) < 0)
|
||||
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTNEXT, FAIL,
|
||||
"can't advacne source selection iterator");
|
||||
"can't advance source selection iterator");
|
||||
ss_iter->elmt_left--;
|
||||
if (H5S_SELECT_ITER_NEXT(ds_iter, 1) < 0)
|
||||
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTNEXT, FAIL,
|
||||
"can't advacne destination selection iterator");
|
||||
"can't advance destination selection iterator");
|
||||
ds_iter->elmt_left--;
|
||||
} while (ss_iter->elmt_left > 0);
|
||||
assert(H5S_SELECT_ITER_NELMTS(ds_iter) == 0);
|
||||
|
@ -57,7 +57,7 @@
|
||||
*
|
||||
* Purpose: Wait at a barrier.
|
||||
*
|
||||
* Note: Similar to pthread_barrier_wait, a barrier may be re-used
|
||||
* Note: Similar to pthread_barrier_wait, a barrier may be reused
|
||||
* multiple times without intervening calls to H5TS_barrier_init.
|
||||
*
|
||||
* Return: Non-negative on success / Negative on failure
|
||||
|
@ -2445,7 +2445,7 @@ main(int argc, char **argv)
|
||||
/* TESTS */
|
||||
|
||||
if (nerrors == 0) {
|
||||
H5FD_mirror_xmit_t xmit_mock; /* Re-used header in various xmit tests */
|
||||
H5FD_mirror_xmit_t xmit_mock; /* Reused header in various xmit tests */
|
||||
|
||||
/* Set bogus values matching expected; encoding doesn't care
|
||||
* Use sequential values to easily generate the expected buffer with a
|
||||
|
@ -5625,7 +5625,7 @@ static int
|
||||
test_file_lock_swmr_concur(hid_t H5_ATTR_UNUSED in_fapl)
|
||||
{
|
||||
/* Output message about test being performed */
|
||||
TESTING("File open with different combintations of flags + SWMR flags--concurrent access");
|
||||
TESTING("File open with different combinations of flags + SWMR flags--concurrent access");
|
||||
SKIPPED();
|
||||
puts(" Test skipped due to fork or waitpid not defined.");
|
||||
return 0;
|
||||
@ -5647,7 +5647,7 @@ test_file_lock_swmr_concur(hid_t in_fapl)
|
||||
int notify = 0;
|
||||
|
||||
/* Output message about test being performed */
|
||||
TESTING("File open with different combintations of flags + SWMR flags--concurrent access");
|
||||
TESTING("File open with different combinations of flags + SWMR flags--concurrent access");
|
||||
|
||||
/* Set locking in the fapl */
|
||||
if ((fapl = H5Pcopy(in_fapl)) < 0)
|
||||
|
@ -354,7 +354,7 @@ typedef struct {
|
||||
/****************************************************************
|
||||
**
|
||||
** test_misc1(): test unlinking a dataset from a group and immediately
|
||||
** re-using the dataset name
|
||||
** reusing the dataset name
|
||||
**
|
||||
****************************************************************/
|
||||
static void
|
||||
@ -7132,7 +7132,7 @@ test_misc(void H5_ATTR_UNUSED *params)
|
||||
/* Output message about test being performed */
|
||||
MESSAGE(5, ("Testing Miscellaneous Routines\n"));
|
||||
|
||||
test_misc1(); /* Test unlinking a dataset & immediately re-using name */
|
||||
test_misc1(); /* Test unlinking a dataset & immediately reusing name */
|
||||
test_misc2(); /* Test storing a VL-derived datatype in two different files */
|
||||
test_misc3(); /* Test reading from chunked dataset with non-zero fill value */
|
||||
test_misc4(); /* Test retrieving the fileno for various objects with H5Oget_info() */
|
||||
|
@ -62,7 +62,7 @@ thread_main(void H5_ATTR_UNUSED *arg)
|
||||
goto pre_barrier_error;
|
||||
}
|
||||
|
||||
/* Verify that the thread ID hasn't been re-used */
|
||||
/* Verify that the thread ID hasn't been reused */
|
||||
if (used[tid - 2]) {
|
||||
TestErrPrintf("reused tid %" PRIu64 " FAIL\n", tid);
|
||||
H5TS_mutex_unlock(&used_lock);
|
||||
|
Loading…
x
Reference in New Issue
Block a user