mirror of
https://github.com/curl/curl.git
synced 2025-02-23 15:10:03 +08:00
http2: upgrade tests and add fix for non-existing stream
- check in h2 filter recv that stream actually exists and return error if not - add test for parallel, extreme h2 upgrades that fail if connections get reused before fully switched - add h2 upgrade upload test just for completeness Closes #11563
This commit is contained in:
parent
db70846e2e
commit
944e219f10
14
lib/http2.c
14
lib/http2.c
@ -1826,7 +1826,19 @@ static ssize_t cf_h2_recv(struct Curl_cfilter *cf, struct Curl_easy *data,
|
||||
ssize_t nread = -1;
|
||||
CURLcode result;
|
||||
struct cf_call_data save;
|
||||
DEBUGASSERT(stream);
|
||||
|
||||
if(!stream) {
|
||||
/* Abnormal call sequence: either this transfer has never opened a stream
|
||||
* (unlikely) or the transfer has been done, cleaned up its resources, but
|
||||
* a read() is called anyway. It is not clear what the calling sequence
|
||||
* is for such a case. */
|
||||
failf(data, "[%zd-%zd], http/2 recv on a transfer never opened "
|
||||
"or already cleared", (ssize_t)data->id,
|
||||
(ssize_t)cf->conn->connection_id);
|
||||
*err = CURLE_HTTP2;
|
||||
return -1;
|
||||
}
|
||||
|
||||
CF_DATA_SAVE(save, cf, data);
|
||||
|
||||
nread = stream_recv(cf, data, buf, len, err);
|
||||
|
3
tests/http/clients/.gitignore
vendored
3
tests/http/clients/.gitignore
vendored
@ -5,4 +5,5 @@
|
||||
h2-serverpush
|
||||
h2-download
|
||||
ws-data
|
||||
ws-pingpong
|
||||
ws-pingpong
|
||||
h2-upgrade-extreme
|
@ -27,4 +27,5 @@ check_PROGRAMS = \
|
||||
h2-serverpush \
|
||||
h2-download \
|
||||
ws-data \
|
||||
ws-pingpong
|
||||
ws-pingpong \
|
||||
h2-upgrade-extreme
|
||||
|
249
tests/http/clients/h2-upgrade-extreme.c
Normal file
249
tests/http/clients/h2-upgrade-extreme.c
Normal file
@ -0,0 +1,249 @@
|
||||
/***************************************************************************
|
||||
* _ _ ____ _
|
||||
* Project ___| | | | _ \| |
|
||||
* / __| | | | |_) | |
|
||||
* | (__| |_| | _ <| |___
|
||||
* \___|\___/|_| \_\_____|
|
||||
*
|
||||
* Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||
*
|
||||
* This software is licensed as described in the file COPYING, which
|
||||
* you should have received as part of this distribution. The terms
|
||||
* are also available at https://curl.se/docs/copyright.html.
|
||||
*
|
||||
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||
* copies of the Software, and permit persons to whom the Software is
|
||||
* furnished to do so, under the terms of the COPYING file.
|
||||
*
|
||||
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||
* KIND, either express or implied.
|
||||
*
|
||||
* SPDX-License-Identifier: curl
|
||||
*
|
||||
***************************************************************************/
|
||||
/* <DESC>
|
||||
* HTTP/2 Upgrade test
|
||||
* </DESC>
|
||||
*/
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <inttypes.h>
|
||||
/* #include <error.h> */
|
||||
#include <errno.h>
|
||||
#include <curl/curl.h>
|
||||
#include <curl/mprintf.h>
|
||||
|
||||
|
||||
static void log_line_start(FILE *log, const char *idsbuf, curl_infotype type)
|
||||
{
|
||||
/*
|
||||
* This is the trace look that is similar to what libcurl makes on its
|
||||
* own.
|
||||
*/
|
||||
static const char * const s_infotype[] = {
|
||||
"* ", "< ", "> ", "{ ", "} ", "{ ", "} "
|
||||
};
|
||||
if(idsbuf && *idsbuf)
|
||||
fprintf(log, "%s%s", idsbuf, s_infotype[type]);
|
||||
else
|
||||
fputs(s_infotype[type], log);
|
||||
}
|
||||
|
||||
#define TRC_IDS_FORMAT_IDS_1 "[%" CURL_FORMAT_CURL_OFF_T "-x] "
|
||||
#define TRC_IDS_FORMAT_IDS_2 "[%" CURL_FORMAT_CURL_OFF_T "-%" \
|
||||
CURL_FORMAT_CURL_OFF_T "] "
|
||||
/*
|
||||
** callback for CURLOPT_DEBUGFUNCTION
|
||||
*/
|
||||
static int debug_cb(CURL *handle, curl_infotype type,
|
||||
char *data, size_t size,
|
||||
void *userdata)
|
||||
{
|
||||
FILE *output = stderr;
|
||||
static int newl = 0;
|
||||
static int traced_data = 0;
|
||||
char idsbuf[60];
|
||||
curl_off_t xfer_id, conn_id;
|
||||
|
||||
(void)handle; /* not used */
|
||||
(void)userdata;
|
||||
|
||||
if(!curl_easy_getinfo(handle, CURLINFO_XFER_ID, &xfer_id) && xfer_id >= 0) {
|
||||
if(!curl_easy_getinfo(handle, CURLINFO_CONN_ID, &conn_id) &&
|
||||
conn_id >= 0) {
|
||||
curl_msnprintf(idsbuf, sizeof(idsbuf), TRC_IDS_FORMAT_IDS_2,
|
||||
xfer_id, conn_id);
|
||||
}
|
||||
else {
|
||||
curl_msnprintf(idsbuf, sizeof(idsbuf), TRC_IDS_FORMAT_IDS_1, xfer_id);
|
||||
}
|
||||
}
|
||||
else
|
||||
idsbuf[0] = 0;
|
||||
|
||||
switch(type) {
|
||||
case CURLINFO_HEADER_OUT:
|
||||
if(size > 0) {
|
||||
size_t st = 0;
|
||||
size_t i;
|
||||
for(i = 0; i < size - 1; i++) {
|
||||
if(data[i] == '\n') { /* LF */
|
||||
if(!newl) {
|
||||
log_line_start(output, idsbuf, type);
|
||||
}
|
||||
(void)fwrite(data + st, i - st + 1, 1, output);
|
||||
st = i + 1;
|
||||
newl = 0;
|
||||
}
|
||||
}
|
||||
if(!newl)
|
||||
log_line_start(output, idsbuf, type);
|
||||
(void)fwrite(data + st, i - st + 1, 1, output);
|
||||
}
|
||||
newl = (size && (data[size - 1] != '\n')) ? 1 : 0;
|
||||
traced_data = 0;
|
||||
break;
|
||||
case CURLINFO_TEXT:
|
||||
case CURLINFO_HEADER_IN:
|
||||
if(!newl)
|
||||
log_line_start(output, idsbuf, type);
|
||||
(void)fwrite(data, size, 1, output);
|
||||
newl = (size && (data[size - 1] != '\n')) ? 1 : 0;
|
||||
traced_data = 0;
|
||||
break;
|
||||
case CURLINFO_DATA_OUT:
|
||||
case CURLINFO_DATA_IN:
|
||||
case CURLINFO_SSL_DATA_IN:
|
||||
case CURLINFO_SSL_DATA_OUT:
|
||||
if(!traced_data) {
|
||||
if(!newl)
|
||||
log_line_start(output, idsbuf, type);
|
||||
fprintf(output, "[%ld bytes data]\n", (long)size);
|
||||
newl = 0;
|
||||
traced_data = 1;
|
||||
}
|
||||
break;
|
||||
default: /* nada */
|
||||
newl = 0;
|
||||
traced_data = 1;
|
||||
break;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static size_t write_cb(char *ptr, size_t size, size_t nmemb, void *opaque)
|
||||
{
|
||||
(void)ptr;
|
||||
(void)opaque;
|
||||
return size * nmemb;
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
const char *url;
|
||||
CURLM *multi;
|
||||
CURL *easy;
|
||||
CURLMcode mc;
|
||||
int running_handles = 0, start_count, numfds;
|
||||
CURLMsg *msg;
|
||||
int msgs_in_queue;
|
||||
char range[128];
|
||||
|
||||
if(argc != 2) {
|
||||
fprintf(stderr, "%s URL\n", argv[0]);
|
||||
exit(2);
|
||||
}
|
||||
|
||||
url = argv[1];
|
||||
multi = curl_multi_init();
|
||||
if(!multi) {
|
||||
fprintf(stderr, "curl_multi_init failed\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
start_count = 200;
|
||||
do {
|
||||
if(start_count) {
|
||||
easy = curl_easy_init();
|
||||
if(!easy) {
|
||||
fprintf(stderr, "curl_easy_init failed\n");
|
||||
exit(1);
|
||||
}
|
||||
curl_easy_setopt(easy, CURLOPT_VERBOSE, 1L);
|
||||
curl_easy_setopt(easy, CURLOPT_DEBUGFUNCTION, debug_cb);
|
||||
curl_easy_setopt(easy, CURLOPT_URL, url);
|
||||
curl_easy_setopt(easy, CURLOPT_NOSIGNAL, 1L);
|
||||
curl_easy_setopt(easy, CURLOPT_AUTOREFERER, 1L);
|
||||
curl_easy_setopt(easy, CURLOPT_FAILONERROR, 1L);
|
||||
curl_easy_setopt(easy, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2_0);
|
||||
curl_easy_setopt(easy, CURLOPT_WRITEFUNCTION, write_cb);
|
||||
curl_easy_setopt(easy, CURLOPT_WRITEDATA, NULL);
|
||||
curl_easy_setopt(easy, CURLOPT_HTTPGET, 1L);
|
||||
curl_msnprintf(range, sizeof(range), "%" PRIu64 "-%" PRIu64,
|
||||
UINT64_C(0), UINT64_C(16384));
|
||||
curl_easy_setopt(easy, CURLOPT_RANGE, range);
|
||||
|
||||
mc = curl_multi_add_handle(multi, easy);
|
||||
if(mc != CURLM_OK) {
|
||||
fprintf(stderr, "curl_multi_add_handle: %s\n",
|
||||
curl_multi_strerror(mc));
|
||||
exit(1);
|
||||
}
|
||||
--start_count;
|
||||
}
|
||||
|
||||
mc = curl_multi_perform(multi, &running_handles);
|
||||
if(mc != CURLM_OK) {
|
||||
fprintf(stderr, "curl_multi_perform: %s\n",
|
||||
curl_multi_strerror(mc));
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if(running_handles) {
|
||||
mc = curl_multi_poll(multi, NULL, 0, 1000000, &numfds);
|
||||
if(mc != CURLM_OK) {
|
||||
fprintf(stderr, "curl_multi_poll: %s\n",
|
||||
curl_multi_strerror(mc));
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/* Check for finished handles and remove. */
|
||||
while((msg = curl_multi_info_read(multi, &msgs_in_queue))) {
|
||||
if(msg->msg == CURLMSG_DONE) {
|
||||
long status = 0;
|
||||
curl_off_t xfer_id;
|
||||
curl_easy_getinfo(msg->easy_handle, CURLINFO_XFER_ID, &xfer_id);
|
||||
curl_easy_getinfo(msg->easy_handle, CURLINFO_RESPONSE_CODE, &status);
|
||||
if(msg->data.result == CURLE_SEND_ERROR ||
|
||||
msg->data.result == CURLE_RECV_ERROR) {
|
||||
/* We get these if the server had a GOAWAY in transit on
|
||||
* re-using a connection */
|
||||
}
|
||||
else if(msg->data.result) {
|
||||
fprintf(stderr, "transfer #%" CURL_FORMAT_CURL_OFF_T
|
||||
": failed with %d\n", xfer_id, msg->data.result);
|
||||
exit(1);
|
||||
}
|
||||
else if(status != 206) {
|
||||
fprintf(stderr, "transfer #%" CURL_FORMAT_CURL_OFF_T
|
||||
": wrong http status %ld (expected 206)\n", xfer_id, status);
|
||||
exit(1);
|
||||
}
|
||||
curl_multi_remove_handle(multi, msg->easy_handle);
|
||||
curl_easy_cleanup(msg->easy_handle);
|
||||
fprintf(stderr, "transfer #%" CURL_FORMAT_CURL_OFF_T" retiring "
|
||||
"(%d now running)\n", xfer_id, running_handles);
|
||||
}
|
||||
}
|
||||
|
||||
fprintf(stderr, "running_handles=%d, yet_to_start=%d\n",
|
||||
running_handles, start_count);
|
||||
|
||||
} while(running_handles > 0 || start_count);
|
||||
|
||||
fprintf(stderr, "exiting\n");
|
||||
exit(EXIT_SUCCESS);
|
||||
}
|
@ -350,6 +350,22 @@ class TestDownload:
|
||||
assert r.duration > timedelta(seconds=4), \
|
||||
f'rate limited transfer should take more than 4s, not {r.duration}'
|
||||
|
||||
# make extreme paralllel h2 upgrades, check invalid conn reuse
|
||||
# before protocol switch has happened
|
||||
def test_02_25_h2_upgrade_x(self, env: Env, httpd, repeat):
|
||||
# not locally reproducable timeouts with certain SSL libs
|
||||
# Since this test is about connection reuse handling, we skip
|
||||
# it on these builds. Although we would certainly like to understand
|
||||
# why this happens.
|
||||
if env.curl_uses_lib('bearssl'):
|
||||
pytest.skip('CI workflows timeout on bearssl build')
|
||||
url = f'http://localhost:{env.http_port}/data-100k'
|
||||
client = LocalClient(name='h2-upgrade-extreme', env=env, timeout=15)
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
r = client.run(args=[url])
|
||||
assert r.exit_code == 0, f'{client.dump_logs()}'
|
||||
|
||||
def check_downloads(self, client, srcfile: str, count: int,
|
||||
complete: bool = True):
|
||||
for i in range(count):
|
||||
|
@ -305,6 +305,21 @@ class TestUpload:
|
||||
assert r.exit_code == 0, r.dump_logs()
|
||||
r.check_stats(1, 200)
|
||||
|
||||
# upload large data on a h1 to h2 upgrade
|
||||
def test_07_35_h1_h2_upgrade_upload(self, env: Env, httpd, nghttpx, repeat):
|
||||
fdata = os.path.join(env.gen_dir, 'data-100k')
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/curltest/echo?id=[0-0]'
|
||||
r = curl.http_upload(urls=[url], data=f'@{fdata}', extra_args=[
|
||||
'--http2'
|
||||
])
|
||||
r.check_response(count=1, http_status=200)
|
||||
# apache does not Upgrade on request with a body
|
||||
assert r.stats[0]['http_version'] == '1.1', f'{r}'
|
||||
indata = open(fdata).readlines()
|
||||
respdata = open(curl.response_file(0)).readlines()
|
||||
assert respdata == indata
|
||||
|
||||
def check_download(self, count, srcfile, curl):
|
||||
for i in range(count):
|
||||
dfile = curl.download_file(i)
|
||||
|
@ -61,6 +61,10 @@ class LocalClient:
|
||||
def run_dir(self) -> str:
|
||||
return self._run_dir
|
||||
|
||||
@property
|
||||
def stderr_file(self) -> str:
|
||||
return self._stderrfile
|
||||
|
||||
def exists(self) -> bool:
|
||||
return os.path.exists(self.path)
|
||||
|
||||
@ -103,3 +107,12 @@ class LocalClient:
|
||||
return ExecResult(args=myargs, exit_code=exitcode, exception=exception,
|
||||
stdout=coutput, stderr=cerrput,
|
||||
duration=datetime.now() - start)
|
||||
|
||||
def dump_logs(self):
|
||||
lines = []
|
||||
lines.append('>>--stdout ----------------------------------------------\n')
|
||||
lines.extend(open(self._stdoutfile).readlines())
|
||||
lines.append('>>--stderr ----------------------------------------------\n')
|
||||
lines.extend(open(self._stderrfile).readlines())
|
||||
lines.append('<<-------------------------------------------------------\n')
|
||||
return ''.join(lines)
|
||||
|
@ -242,8 +242,9 @@ class Httpd:
|
||||
f'PidFile httpd.pid',
|
||||
f'ErrorLog {self._error_log}',
|
||||
f'LogLevel {self._get_log_level()}',
|
||||
f'StartServers 4',
|
||||
f'H2MinWorkers 16',
|
||||
f'H2MaxWorkers 128',
|
||||
f'H2MaxWorkers 256',
|
||||
f'H2Direct on',
|
||||
f'Listen {self.env.http_port}',
|
||||
f'Listen {self.env.https_port}',
|
||||
|
Loading…
Reference in New Issue
Block a user