mirror of
https://github.com/Unidata/netcdf-c.git
synced 2024-11-21 03:13:42 +08:00
(1) improve INI parser (2) Fix make discheck
This commit is contained in:
parent
0f93766599
commit
dc2ecc74ac
@ -32,8 +32,10 @@ endif
|
||||
|
||||
EXTRA_DIST = CMakeLists.txt XGetopt.h netcdf_meta.h.in netcdf_dispatch.h.in
|
||||
|
||||
DISTCLEANFILES = netcdf_json.h
|
||||
|
||||
BUILT_SOURCES = netcdf_json.h
|
||||
netcdf_json.h: Makefile ncjson.h ${srcdir}/../libdispatch/ncjson.c
|
||||
sed -e 's/NCJSON_H/NETCDF_JSON_H/' -e '/ncjson.h/d' <ncjson.h > $@
|
||||
netcdf_json.h: Makefile ${srcdir}/ncjson.h ${srcdir}/../libdispatch/ncjson.c
|
||||
sed -e 's/NCJSON_H/NETCDF_JSON_H/' -e '/ncjson.h/d' <${srcdir}/ncjson.h > $@
|
||||
sed -e '/ncjson.h/d' < ${srcdir}/../libdispatch/ncjson.c >> $@
|
||||
|
||||
|
@ -27,6 +27,14 @@ See COPYRIGHT for license information.
|
||||
#undef LEXDEBUG
|
||||
#undef PARSEDEBUG
|
||||
#undef AWSDEBUG
|
||||
#undef CATCH
|
||||
|
||||
#if defined(CATCH)
|
||||
#define THROW(e) ((e) == NC_NOERR ? (e) : ncbreakpoint(e))
|
||||
#else
|
||||
#define THROW(e) (e)
|
||||
#endif
|
||||
|
||||
|
||||
#define RTAG ']'
|
||||
#define LTAG '['
|
||||
@ -726,24 +734,37 @@ clearS3credentials(struct S3credentials* creds)
|
||||
}
|
||||
|
||||
/**
|
||||
Parser for aws credentials.
|
||||
The .aws/config and .aws/credentials files
|
||||
are in INI format (https://en.wikipedia.org/wiki/INI_file).
|
||||
This format is not well defined, so the grammar used
|
||||
here is restrictive. Here, the term "profile" is the same
|
||||
as the INI term "section".
|
||||
|
||||
The grammar used is as follows:
|
||||
|
||||
Grammar:
|
||||
|
||||
credsfile: profilelist ;
|
||||
inifile: profilelist ;
|
||||
profilelist: profile | profilelist profile ;
|
||||
profile: '[' profilename ']'
|
||||
entries ;
|
||||
profile: '[' profilename ']' EOL entries ;
|
||||
entries: empty | entries entry ;
|
||||
entry: WORD = WORD ;
|
||||
entry: WORD = WORD EOL ;
|
||||
profilename: WORD ;
|
||||
Lexical:
|
||||
WORD sequence of printable characters - [ \[\]=]+
|
||||
EOL '\n' | ';'
|
||||
|
||||
Note:
|
||||
1. The semicolon at beginning of a line signals a comment.
|
||||
2. # comments are not allowed
|
||||
3. Duplicate profiles or keys are ignored.
|
||||
4. Escape characters are not supported.
|
||||
*/
|
||||
|
||||
#define AWS_EOF (-1)
|
||||
#define AWS_ERR (0)
|
||||
#define AWS_WORD (1)
|
||||
#define AWS_WORD (0x10001)
|
||||
#define AWS_EOL (0x10002)
|
||||
|
||||
#ifdef LEXDEBUG
|
||||
static const char*
|
||||
@ -766,6 +787,7 @@ typedef struct AWSparser {
|
||||
size_t yylen; /* |yytext| */
|
||||
NCbytes* yytext;
|
||||
int token; /* last token found */
|
||||
int pushback; /* allow 1-token pushback */
|
||||
} AWSparser;
|
||||
|
||||
static int
|
||||
@ -776,19 +798,41 @@ awslex(AWSparser* parser)
|
||||
char* start;
|
||||
size_t count;
|
||||
|
||||
ncbytesclear(parser->yytext);
|
||||
parser->token = AWS_ERR;
|
||||
ncbytesclear(parser->yytext);
|
||||
ncbytesnull(parser->yytext);
|
||||
|
||||
if(parser->pushback != AWS_ERR) {
|
||||
token = parser->pushback;
|
||||
parser->pushback = AWS_ERR;
|
||||
goto done;
|
||||
}
|
||||
|
||||
while(token == 0) { /* avoid need to goto when retrying */
|
||||
c = *parser->pos;
|
||||
if(c == '\0') {
|
||||
token = AWS_EOF;
|
||||
} else if(c == '\n') {
|
||||
parser->pos++;
|
||||
token = AWS_EOL;
|
||||
} else if(c <= ' ' || c == '\177') {
|
||||
parser->pos++;
|
||||
continue; /* ignore whitespace */
|
||||
} else if(c == ';') {
|
||||
char* p = parser->pos - 1;
|
||||
if(*p == '\n') {
|
||||
/* Skip comment */
|
||||
do {p++;} while(*p != '\n' && *p != '\0');
|
||||
parser->pos = p;
|
||||
token = (*p == '\n'?AWS_EOL:AWS_EOF);
|
||||
} else {
|
||||
token = ';';
|
||||
ncbytesappend(parser->yytext,';');
|
||||
parser->pos++;
|
||||
}
|
||||
} else if(c == '[' || c == ']' || c == '=') {
|
||||
ncbytesclear(parser->yytext);
|
||||
ncbytesappend(parser->yytext,c);
|
||||
ncbytesnull(parser->yytext);
|
||||
token = c;
|
||||
parser->pos++;
|
||||
} else { /*Assume a word*/
|
||||
@ -809,6 +853,7 @@ fprintf(stderr,"%s(%d): |%s|\n",tokenname(token),token,ncbytescontents(parser->y
|
||||
#endif
|
||||
} /*for(;;)*/
|
||||
|
||||
done:
|
||||
parser->token = token;
|
||||
return token;
|
||||
}
|
||||
@ -838,54 +883,69 @@ awsparse(const char* text, NClist* profiles)
|
||||
if(parser == NULL)
|
||||
{stat = (NC_ENOMEM); goto done;}
|
||||
len = strlen(text);
|
||||
parser->text = (char*)malloc(len+1+1);
|
||||
parser->text = (char*)malloc(len+1+1+1); /* double nul term plus leading EOL */
|
||||
if(parser->text == NULL)
|
||||
{stat = (NC_EINVAL); goto done;}
|
||||
strcpy(parser->text,text);
|
||||
{stat = (THROW(NC_EINVAL)); goto done;}
|
||||
parser->pos = parser->text;
|
||||
parser->pos[0] = '\n'; /* So we can test for comment unconditionally */
|
||||
parser->pos++;
|
||||
strcpy(parser->text+1,text);
|
||||
parser->pos += len;
|
||||
/* Double nul terminate */
|
||||
parser->text[len] = '\0';
|
||||
parser->text[len+1] = '\0';
|
||||
parser->pos = &parser->text[0];
|
||||
parser->pos[0] = '\0';
|
||||
parser->pos[1] = '\0';
|
||||
parser->pos = &parser->text[0]; /* reset */
|
||||
parser->yytext = ncbytesnew();
|
||||
parser->pushback = AWS_ERR;
|
||||
|
||||
/* Do not need recursion, use simple loops */
|
||||
token = awslex(parser); /* make token always be defined */
|
||||
for(;;) {
|
||||
token = awslex(parser); /* make token always be defined */
|
||||
if(token == AWS_EOF) break; /* finished */
|
||||
if(token != LBR) {stat = NC_EINVAL; goto done;}
|
||||
if(token == AWS_EOL) {continue;} /* blank line */
|
||||
if(token != LBR) {stat = THROW(NC_EINVAL); goto done;}
|
||||
/* parse [profile name] */
|
||||
token = awslex(parser);
|
||||
if(token != AWS_WORD) {stat = NC_EINVAL; goto done;}
|
||||
if(token != AWS_WORD) {stat = THROW(NC_EINVAL); goto done;}
|
||||
assert(profile == NULL);
|
||||
if((profile = (struct AWSprofile*)calloc(1,sizeof(struct AWSprofile)))==NULL)
|
||||
{stat = NC_ENOMEM; goto done;}
|
||||
profile->name = ncbytesextract(parser->yytext);
|
||||
profile->entries = nclistnew();
|
||||
token = awslex(parser);
|
||||
if(token != RBR) {stat = NC_EINVAL; goto done;}
|
||||
if(token != RBR) {stat = THROW(NC_EINVAL); goto done;}
|
||||
#ifdef PARSEDEBUG
|
||||
fprintf(stderr,">>> parse: profile=%s\n",profile->name);
|
||||
#endif
|
||||
/* The fields can be in any order */
|
||||
for(;;) {
|
||||
struct AWSentry* entry = NULL;
|
||||
token = awslex(parser); /* prime parser */
|
||||
if(token == AWS_EOF || token == LBR)
|
||||
break;
|
||||
if(token != AWS_WORD) {stat = NC_EINVAL; goto done;}
|
||||
key = ncbytesextract(parser->yytext);
|
||||
token = awslex(parser);
|
||||
if(token != '=') {stat = NC_EINVAL; goto done;}
|
||||
token = awslex(parser);
|
||||
if(token != AWS_WORD) {stat = NC_EINVAL; goto done;}
|
||||
value = ncbytesextract(parser->yytext);
|
||||
if((entry = (struct AWSentry*)calloc(1,sizeof(struct AWSentry)))==NULL)
|
||||
{stat = NC_ENOMEM; goto done;}
|
||||
entry->key = key; key = NULL;
|
||||
entry->value = value; value = NULL;
|
||||
if(token == AWS_EOL) {
|
||||
continue; /* ignore empty lines */
|
||||
} else if(token == AWS_EOF) {
|
||||
break;
|
||||
} else if(token == LBR) {/* start of next profile */
|
||||
parser->pushback = token;
|
||||
break;
|
||||
} else if(token == AWS_WORD) {
|
||||
key = ncbytesextract(parser->yytext);
|
||||
token = awslex(parser);
|
||||
if(token != '=') {stat = THROW(NC_EINVAL); goto done;}
|
||||
token = awslex(parser);
|
||||
if(token != AWS_EOL && token != AWS_WORD) {stat = THROW(NC_EINVAL); goto done;}
|
||||
value = ncbytesextract(parser->yytext);
|
||||
if((entry = (struct AWSentry*)calloc(1,sizeof(struct AWSentry)))==NULL)
|
||||
{stat = NC_ENOMEM; goto done;}
|
||||
entry->key = key; key = NULL;
|
||||
entry->value = value; value = NULL;
|
||||
#ifdef PARSEDEBUG
|
||||
fprintf(stderr,">>> parse: entry=(%s,%s)\n",entry->key,entry->value);
|
||||
#endif
|
||||
nclistpush(profile->entries,entry); entry = NULL;
|
||||
nclistpush(profile->entries,entry); entry = NULL;
|
||||
if(token == AWS_WORD) token = awslex(parser); /* finish the line */
|
||||
} else
|
||||
{stat = THROW(NC_EINVAL); goto done;}
|
||||
}
|
||||
|
||||
/* If this profile already exists, then ignore new one */
|
||||
|
@ -116,7 +116,7 @@ perftest.txt bigmeta.nc bigvars.nc *.gz MSGCPP_*.nc \
|
||||
floats*.nc floats*.cdl shorts*.nc shorts*.cdl ints*.nc ints*.cdl \
|
||||
testfilter_reg.nc filterrepeat.txt tmp_fillonly.nc \
|
||||
testfilter_order.nc crfilterorder.txt rdfilterorder.txt 1 \
|
||||
tmp_*.txt tmp_*.nc
|
||||
tmp_*.txt tmp_*.nc tmp*.dump tmp*.cdl tmp*.txt tmp*.tmp tmp_bzip2.c bzip2.nc
|
||||
|
||||
DISTCLEANFILES = findplugin.sh run_par_test.sh
|
||||
|
||||
|
@ -181,7 +181,7 @@ fi
|
||||
|
||||
if test "x$UNK" = x1 ; then
|
||||
echo "*** Testing access to filter info when filter dll is not available"
|
||||
rm -f bzip2.nc ./tst_filter.txt
|
||||
rm -f bzip2.nc ./tmp_filter.txt
|
||||
# xfail build bzip2.nc
|
||||
hidebzip2
|
||||
if ${NCGEN} -lb -4 -o bzip2.nc ${srcdir}/bzip2.cdl ; then
|
||||
@ -194,9 +194,9 @@ unhidebzip2
|
||||
${NCGEN} -lb -4 -o bzip2.nc ${srcdir}/bzip2.cdl
|
||||
# Now hide the filter code
|
||||
hidebzip2
|
||||
rm -f ./tst_filter.txt
|
||||
rm -f ./tmp_filter.txt
|
||||
# This will xfail
|
||||
if ${NCDUMP} -s bzip2.nc > ./tst_filter.txt ; then
|
||||
if ${NCDUMP} -s bzip2.nc > ./tmp_filter.txt ; then
|
||||
echo "*** FAIL: ncdump -hs bzip2.nc"
|
||||
else
|
||||
echo "*** XFAIL: ncdump -hs bzip2.nc"
|
||||
@ -204,8 +204,8 @@ fi
|
||||
# Restore the filter code
|
||||
unhidebzip2
|
||||
# Verify we can see filter when using -h
|
||||
rm -f ./tst_filter.txt
|
||||
${NCDUMP} -hs bzip2.nc > ./tst_filter.txt
|
||||
rm -f ./tmp_filter.txt
|
||||
${NCDUMP} -hs bzip2.nc > ./tmp_filter.txt
|
||||
echo "*** Pass: unknown filter"
|
||||
fi
|
||||
|
||||
@ -264,6 +264,8 @@ diff -b -w ${srcdir}/ref_filter_order_read.txt tmp_rdfilterorder.txt
|
||||
|
||||
fi
|
||||
|
||||
# Cleanup
|
||||
|
||||
echo "*** Pass: all selected tests passed"
|
||||
|
||||
exit 0
|
||||
|
@ -151,12 +151,11 @@ ref_groups_regular.cdl ref_byte.cdl ref_byte_fill_value_null.cdl
|
||||
# Interoperability files
|
||||
EXTRA_DIST += ref_power_901_constants.zip ref_power_901_constants.cdl ref_quotes.zip ref_quotes.cdl
|
||||
|
||||
CLEANFILES = ut_*.txt ut*.cdl tmp*.nc tmp*.cdl tmp*.txt tmp*.dmp tmp*.zip tmp*.nc
|
||||
CLEANFILES = ut_*.txt ut*.cdl tmp*.nc tmp*.cdl tmp*.txt tmp*.dmp tmp*.zip tmp*.nc tmp*.dump tmp*.tmp tmp_ngc.c
|
||||
|
||||
# Remove directories
|
||||
clean-local:
|
||||
rm -fr tmp*.file results.file results.s3 results.zip
|
||||
rm -fr power_901_constants.file
|
||||
rm -fr rcmiscdir
|
||||
|
||||
DISTCLEANFILES = findplugin.sh
|
||||
|
@ -61,4 +61,9 @@ testallcases file
|
||||
if test "x$FEATURE_NCZARR_ZIP" = xyes ; then testallcases zip; fi
|
||||
#No examples yet: if test "x$FEATURE_S3TESTS" = xyes ; then testallcases s3; fi
|
||||
|
||||
# Cleanup
|
||||
rm -fr ${execdir}/ref_power_901_constants.file
|
||||
rm -f ${execdir}/ref_power_901_constants.zip
|
||||
rm -f ${execdir}/ref_quotes.zip
|
||||
|
||||
exit 0
|
||||
|
@ -12,15 +12,15 @@ BZIP2SRC = blocksort.c huffman.c crctable.c randtable.c compress.c decompress.c
|
||||
|
||||
EXTRA_DIST = CMakeLists.txt \
|
||||
H5Ztemplate.c H5Zmisc.c H5Zutil.c H5Znoop.c h5noop.h NCZmisc.c \
|
||||
NCZdefault.c \
|
||||
H5Zbzip2.c H5Zbzip2.h H5Zblosc.c H5Zblosc.h \
|
||||
H5Zshuffle.c H5Zfletcher32.h H5Zdeflate.c H5Zszip.c H5Zszip.h \
|
||||
NCZdefaults.c \
|
||||
H5Zbzip2.c h5bzip2.h H5Zblosc.c H5Zblosc.h \
|
||||
H5Zshuffle.c H5Zdeflate.c H5Zszip.c H5Zszip.h \
|
||||
${BZIP2SRC} H5checksum.c
|
||||
|
||||
if ENABLE_FILTER_TESTING
|
||||
|
||||
lib_LTLIBRARIES = libh5bzip2.la
|
||||
libh5bzip2_la_SOURCES = H5Zbzip2.c H5Zbzip2.h ${BZIP2SRC}
|
||||
libh5bzip2_la_SOURCES = H5Zbzip2.c h5bzip2.h ${BZIP2SRC}
|
||||
|
||||
noinst_LTLIBRARIES = libh5misc.la libh5noop.la libh5noop1.la libnczmisc.la libnczdefaults.la \
|
||||
libh5shuffle.la libh5fletcher32.la libh5deflate.la
|
||||
|
@ -18,10 +18,14 @@ cat >.aws/config <<EOF
|
||||
aws_secret_access_key=NCAR/ef0ghijklmnopqr/ncarxxxxxxxxxxxxxxx
|
||||
[unidata]
|
||||
aws_access_key_id=ACCESSKEYUNIDATAXXXX
|
||||
|
||||
aws_secret_access_key=UNIDATA/ef0ghijklmnopqr/unidataxxxxxxxxx
|
||||
; comment1
|
||||
aws_region=us-west-1
|
||||
;comment2
|
||||
EOF
|
||||
|
||||
${execdir}/test_aws
|
||||
|
||||
rm -fr .aws
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user