Fixes a stack overflow in the h5dump test generator (#1903)

* Fixes a stack overflow in the h5dump test generator

The opaque type size was declared to be the size of the "buffer of two
opaque elements", causing a memcpy call in H5Dwrite to read outside
the buffer, dumping garbage into the file.

Note that this only affected the test generator and not h5dump itself.

* Committing clang-format changes

Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
This commit is contained in:
Dana Robinson 2022-07-18 07:35:43 -07:00 committed by GitHub
parent 08b797de73
commit 079df9d216
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 31 additions and 30 deletions

View File

@ -2570,41 +2570,42 @@ gent_nestcomp(void)
static void
gent_opaque(void)
{
hid_t file, type, dataset, space;
char test[100][2];
int x;
hsize_t dim = 2;
hid_t file = H5I_INVALID_HID;
hid_t type = H5I_INVALID_HID;
hid_t dataset = H5I_INVALID_HID;
hid_t space = H5I_INVALID_HID;
for (x = 0; x < 100; x++) {
test[x][0] = (char)x;
test[x][1] = (char)(99 - x);
}
const uint8_t OPAQUE_NBYTES = 100;
const int N_ELEMENTS = 2;
/*
* Create the data space.
*/
space = H5Screate_simple(1, &dim, NULL);
/* The dataset contains N_ELEMENTS elements of OPAQUE_NBYTES bytes */
uint8_t data[OPAQUE_NBYTES][N_ELEMENTS];
hsize_t dim = N_ELEMENTS;
/*
* Create the file.
*/
file = H5Fcreate(FILE19, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
/*
* Create the memory datatype.
*/
type = H5Tcreate(H5T_OPAQUE, sizeof(char) * 100 * 2);
/* The opaque datatype is OPAQUE_NBYTES bytes in size */
type = H5Tcreate(H5T_OPAQUE, sizeof(uint8_t) * OPAQUE_NBYTES);
H5Tset_tag(type, "test opaque type");
/*
* Create the dataset.
*/
space = H5Screate_simple(1, &dim, NULL);
dataset = H5Dcreate2(file, "opaque test", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
/*
* Write data to the dataset;
/* Given the data fill algorithm, make sure that the number of bytes
* in the opaque type isn't so big that i or (OPAQUE_NBYTES - 1) - i
* don't fit in a uint8_t value..
*/
H5Dwrite(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, test);
HDcompile_assert(OPAQUE_NBYTES < UINT8_MAX);
/* Write out two opaque data elements with predictable data to
* the file.
*/
for (uint8_t i = 0; i < OPAQUE_NBYTES; i++) {
data[i][0] = i;
data[i][1] = (OPAQUE_NBYTES - 1) - i;
}
H5Dwrite(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
H5Tclose(type);
H5Sclose(space);

Binary file not shown.

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<hdf5:HDF5-File xmlns:hdf5="http://hdfgroup.org/HDF5/XML/schema/HDF5-File.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://hdfgroup.org/HDF5/XML/schema/HDF5-File http://www.hdfgroup.org/HDF5/XML/schema/HDF5-File.xsd">
<hdf5:RootGroup OBJ-XID="xid_696" H5Path="/">
<hdf5:Dataset Name="opaque test" OBJ-XID="xid_744" H5Path= "/opaque test" Parents="xid_696" H5ParentPaths="/">
<hdf5:RootGroup OBJ-XID="xid_96" H5Path="/">
<hdf5:Dataset Name="opaque test" OBJ-XID="xid_800" H5Path= "/opaque test" Parents="xid_96" H5ParentPaths="/">
<hdf5:StorageLayout>
<hdf5:ContiguousLayout/>
</hdf5:StorageLayout>
@ -17,13 +17,13 @@
</hdf5:Dataspace>
<hdf5:DataType>
<hdf5:AtomicType>
<hdf5:OpaqueType Tag="test opaque type" Size="200"/>
<hdf5:OpaqueType Tag="test opaque type" Size="100"/>
</hdf5:AtomicType>
</hdf5:DataType>
<hdf5:Data>
<hdf5:DataFromFile>
00:63:01:62:02:61:03:60:04:5f:05:5e:06:5d:07:5c:08:5b:09:5a:0a:59:0b:58:0c:57:0d:56:0e:55:0f:54:10:53:11:52:12:51:13:50:14:4f:15:4e:16:4d:17:4c:18:4b:19:4a:1a:49:1b:48:1c:47:1d:46:1e:45:1f:44:20:43:21:42:22:41:23:40:24:3f:25:3e:26:3d:27:3c:28:3b:29:3a:2a:39:2b:38:2c:37:2d:36:2e:35:2f:34:30:33:31:32:32:31:33:30:34:2f:35:2e:36:2d:37:2c:38:2b:39:2a:3a:29:3b:28:3c:27:3d:26:3e:25:3f:24:40:23:41:22:42:21:43:20:44:1f:45:1e:46:1d:47:1c:48:1b:49:1a:4a:19:4b:18:4c:17:4d:16:4e:15:4f:14:50:13:51:12:52:11:53:10:54:0f:55:0e:56:0d:57:0c:58:0b:59:0a:5a:09:5b:08:5c:07:5d:06:5e:05:5f:04:60:03:61:02:62:01:63:00
38:00:00:2c:3c:00:00:27:34:00:01:ea:04:00:00:12:00:00:00:00:00:08:a4:64:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:ef:7e:c9:c0:00:00:00:01:ef:ff:fa:84:ef:ff:fa:8c:00:03:fc:00:00:00:00:00:00:00:00:00:ef:ff:fa:20:00:01:2e:84:ef:ff:fa:20:00:01:2e:58:00:00:00:03:ef:ff:fa:84:00:00:00:04:ef:ff:fa:8c:00:00:00:05:ef:ff:fa:f0:00:00:00:00:00:00:00:00:00:00:00:01:ef:ff:fa:84:ef:ff:fa:8c:00:03:fc:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
00:63:01:62:02:61:03:60:04:5f:05:5e:06:5d:07:5c:08:5b:09:5a:0a:59:0b:58:0c:57:0d:56:0e:55:0f:54:10:53:11:52:12:51:13:50:14:4f:15:4e:16:4d:17:4c:18:4b:19:4a:1a:49:1b:48:1c:47:1d:46:1e:45:1f:44:20:43:21:42:22:41:23:40:24:3f:25:3e:26:3d:27:3c:28:3b:29:3a:2a:39:2b:38:2c:37:2d:36:2e:35:2f:34:30:33:31:32
32:31:33:30:34:2f:35:2e:36:2d:37:2c:38:2b:39:2a:3a:29:3b:28:3c:27:3d:26:3e:25:3f:24:40:23:41:22:42:21:43:20:44:1f:45:1e:46:1d:47:1c:48:1b:49:1a:4a:19:4b:18:4c:17:4d:16:4e:15:4f:14:50:13:51:12:52:11:53:10:54:0f:55:0e:56:0d:57:0c:58:0b:59:0a:5a:09:5b:08:5c:07:5d:06:5e:05:5f:04:60:03:61:02:62:01:63:00
</hdf5:DataFromFile>
</hdf5:Data>
</hdf5:Dataset>