pytest: check overlarge response headers

Add test_01 11, 12 and 13 to check various very long response
headers (accumulated and single) with http/1.1 and h2.

Closes #16541
This commit is contained in:
Stefan Eissing 2025-03-03 12:14:20 +01:00 committed by Daniel Stenberg
parent cee9cefa76
commit 35cfb081d2
No known key found for this signature in database
GPG Key ID: 5CC908FDB71E12C2
2 changed files with 87 additions and 2 deletions

View File

@ -37,9 +37,11 @@ log = logging.getLogger(__name__)
class TestBasic:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, nghttpx):
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
httpd.clear_extra_configs()
httpd.reload()
# simple http: GET
def test_01_01_http_get(self, env: Env, httpd):
@ -149,3 +151,55 @@ class TestBasic:
assert len(r.responses) == 1, f'{r.responses}'
assert r.responses[0]['status'] == 200, f'{r.responses[1]}'
assert r.responses[0]['protocol'] == 'HTTP/2', f'{r.responses[1]}'
# http: large response headers
# send 48KB+ sized response headers to check we handle that correctly
# larger than 64KB headers expose a bug in Apache HTTP/2 that is not
# RSTing the stream correclty when its internal limits are exceeded.
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_01_11_large_resp_headers(self, env: Env, httpd, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?x-hd={48 * 1024}'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[])
r.check_exit_code(0)
assert len(r.responses) == 1, f'{r.responses}'
assert r.responses[0]['status'] == 200, f'{r.responses}'
# http: response headers larger than what curl buffers for
@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'),
reason='httpd must be at least 2.4.64')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_01_12_xlarge_resp_headers(self, env: Env, httpd, proto):
httpd.set_extra_config('base', [
f'H2MaxHeaderBlockLen {130 * 1024}',
])
httpd.reload()
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?x-hd={128 * 1024}'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[])
r.check_exit_code(0)
assert len(r.responses) == 1, f'{r.responses}'
assert r.responses[0]['status'] == 200, f'{r.responses}'
# http: 1 response header larger than what curl buffers for
@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'),
reason='httpd must be at least 2.4.64')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_01_13_megalarge_resp_headers(self, env: Env, httpd, proto):
httpd.set_extra_config('base', [
'LogLevel http2:trace2',
f'H2MaxHeaderBlockLen {130 * 1024}',
])
httpd.reload()
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?x-hd1={128 * 1024}'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[])
if proto == 'h2':
r.check_exit_code(16) # CURLE_HTTP2
else:
r.check_exit_code(100) # CURLE_TOO_LARGE

View File

@ -335,6 +335,7 @@ static int curltest_tweak_handler(request_rec *r)
int http_status = 200;
apr_status_t error = APR_SUCCESS, body_error = APR_SUCCESS;
int close_conn = 0, with_cl = 0;
int x_hd_len = 0, x_hd1_len = 0;
if(strcmp(r->handler, "curltest-tweak")) {
return DECLINED;
@ -418,6 +419,14 @@ static int curltest_tweak_handler(request_rec *r)
continue;
}
}
else if(!strcmp("x-hd", arg)) {
x_hd_len = (int)apr_atoi64(val);
continue;
}
else if(!strcmp("x-hd1", arg)) {
x_hd1_len = (int)apr_atoi64(val);
continue;
}
}
else if(!strcmp("close", arg)) {
/* we are asked to close the connection */
@ -450,9 +459,31 @@ static int curltest_tweak_handler(request_rec *r)
apr_table_unset(r->headers_out, "Content-Length");
/* Discourage content-encodings */
apr_table_unset(r->headers_out, "Content-Encoding");
if(x_hd_len > 0) {
int i, hd_len = (16 * 1024);
int n = (x_hd_len / hd_len);
char *hd_val = apr_palloc(r->pool, x_hd_len);
memset(hd_val, 'X', hd_len);
hd_val[hd_len - 1] = 0;
for(i = 0; i < n; ++i) {
apr_table_setn(r->headers_out,
apr_psprintf(r->pool, "X-Header-%d", i), hd_val);
}
if(x_hd_len % hd_len) {
hd_val[(x_hd_len % hd_len)] = 0;
apr_table_setn(r->headers_out,
apr_psprintf(r->pool, "X-Header-%d", i), hd_val);
}
}
if(x_hd1_len > 0) {
char *hd_val = apr_palloc(r->pool, x_hd1_len);
memset(hd_val, 'Y', x_hd1_len);
hd_val[x_hd1_len - 1] = 0;
apr_table_setn(r->headers_out, "X-Mega-Header", hd_val);
}
apr_table_setn(r->subprocess_env, "no-brotli", "1");
apr_table_setn(r->subprocess_env, "no-gzip", "1");
ap_set_content_type(r, "application/octet-stream");
bb = apr_brigade_create(r->pool, c->bucket_alloc);