mirror of
https://github.com/curl/curl.git
synced 2025-01-30 14:22:33 +08:00
transfer: fix upload rate limiting, add test cases
- add test cases for rate limiting uploads for all http versions - fix transfer loop handling of limits. Signal a re-receive attempt only on exhausting maxloops without an EAGAIN - fix `data->state.selectbits` forcing re-receive to also set re-sending when transfer is doing this. Reported-by: Karthikdasari0423 on github Fixes #12559 Closes #12586
This commit is contained in:
parent
8b1d229835
commit
e492c7c524
@ -428,7 +428,7 @@ static CURLcode readwrite_data(struct Curl_easy *data,
|
||||
size_t blen;
|
||||
size_t consumed;
|
||||
int maxloops = 10;
|
||||
curl_off_t max_recv = data->set.max_recv_speed ? 0 : CURL_OFF_T_MAX;
|
||||
curl_off_t total_received = 0;
|
||||
bool data_eof_handled = FALSE;
|
||||
|
||||
DEBUGASSERT(data->state.buffer);
|
||||
@ -439,6 +439,7 @@ static CURLcode readwrite_data(struct Curl_easy *data,
|
||||
do {
|
||||
bool is_empty_data = FALSE;
|
||||
size_t bytestoread = data->set.buffer_size;
|
||||
|
||||
/* For HTTP/2 and HTTP/3, read data without caring about the content
|
||||
length. This is safe because body in HTTP/2 is always segmented
|
||||
thanks to its framing layer. Meanwhile, we have to call Curl_read
|
||||
@ -447,6 +448,15 @@ static CURLcode readwrite_data(struct Curl_easy *data,
|
||||
bool is_http3 = Curl_conn_is_http3(data, conn, FIRSTSOCKET);
|
||||
data_eof_handled = is_http3 || Curl_conn_is_http2(data, conn, FIRSTSOCKET);
|
||||
|
||||
if(data->set.max_recv_speed) {
|
||||
/* Limit the amount we read here, break on reaching it */
|
||||
curl_off_t net_limit = data->set.max_recv_speed - total_received;
|
||||
if(net_limit <= 0)
|
||||
break;
|
||||
if((size_t)net_limit < bytestoread)
|
||||
bytestoread = (size_t)net_limit;
|
||||
}
|
||||
|
||||
/* Each loop iteration starts with a fresh buffer and handles
|
||||
* all data read into it. */
|
||||
buf = data->state.buffer;
|
||||
@ -654,7 +664,7 @@ static CURLcode readwrite_data(struct Curl_easy *data,
|
||||
}
|
||||
#endif /* CURL_DISABLE_HTTP */
|
||||
|
||||
max_recv -= blen;
|
||||
total_received += blen;
|
||||
|
||||
if(!k->chunk && (blen || k->badheader || is_empty_data)) {
|
||||
/* If this is chunky transfer, it was already written */
|
||||
@ -712,11 +722,13 @@ static CURLcode readwrite_data(struct Curl_easy *data,
|
||||
break;
|
||||
}
|
||||
|
||||
} while((max_recv > 0) && data_pending(data) && maxloops--);
|
||||
} while(maxloops-- && data_pending(data));
|
||||
|
||||
if(maxloops <= 0 || max_recv <= 0) {
|
||||
/* we mark it as read-again-please */
|
||||
if(maxloops <= 0) {
|
||||
/* did not read until EAGAIN, mark read-again-please */
|
||||
data->state.select_bits = CURL_CSELECT_IN;
|
||||
if((k->keepon & KEEP_SENDBITS) == KEEP_SEND)
|
||||
data->state.select_bits |= CURL_CSELECT_OUT;
|
||||
}
|
||||
|
||||
if(((k->keepon & (KEEP_RECV|KEEP_SEND)) == KEEP_SEND) &&
|
||||
|
@ -461,3 +461,42 @@ class TestUpload:
|
||||
tofile=dfile,
|
||||
n=1))
|
||||
assert False, f'download {dfile} differs:\n{diff}'
|
||||
|
||||
# speed limited on put handler
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
def test_07_50_put_speed_limit(self, env: Env, httpd, nghttpx, proto, repeat):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 1
|
||||
fdata = os.path.join(env.gen_dir, 'data-100k')
|
||||
up_len = 100 * 1024
|
||||
speed_limit = 20 * 1024
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]'
|
||||
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
|
||||
with_headers=True, extra_args=[
|
||||
'--limit-rate', f'{speed_limit}'
|
||||
])
|
||||
r.check_response(count=count, http_status=200)
|
||||
assert r.responses[0]['header']['received-length'] == f'{up_len}', f'{r.responses[0]}'
|
||||
up_speed = r.stats[0]['speed_upload']
|
||||
assert (speed_limit * 0.5) <= up_speed <= (speed_limit * 1.5), f'{r.stats[0]}'
|
||||
|
||||
# speed limited on echo handler
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
def test_07_51_echo_speed_limit(self, env: Env, httpd, nghttpx, proto, repeat):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 1
|
||||
fdata = os.path.join(env.gen_dir, 'data-100k')
|
||||
speed_limit = 20 * 1024
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
|
||||
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
|
||||
with_headers=True, extra_args=[
|
||||
'--limit-rate', f'{speed_limit}'
|
||||
])
|
||||
r.check_response(count=count, http_status=200)
|
||||
up_speed = r.stats[0]['speed_upload']
|
||||
assert (speed_limit * 0.5) <= up_speed <= (speed_limit * 1.5), f'{r.stats[0]}'
|
||||
|
||||
|
@ -423,6 +423,7 @@ static int curltest_put_handler(request_rec *r)
|
||||
char buffer[16*1024];
|
||||
const char *ct;
|
||||
apr_off_t rbody_len = 0;
|
||||
const char *s_rbody_len;
|
||||
const char *request_id = "none";
|
||||
apr_time_t chunk_delay = 0;
|
||||
apr_array_header_t *args = NULL;
|
||||
@ -491,7 +492,9 @@ static int curltest_put_handler(request_rec *r)
|
||||
}
|
||||
}
|
||||
/* we are done */
|
||||
rv = apr_brigade_printf(bb, NULL, NULL, "%"APR_OFF_T_FMT, rbody_len);
|
||||
s_rbody_len = apr_psprintf(r->pool, "%"APR_OFF_T_FMT, rbody_len);
|
||||
apr_table_setn(r->headers_out, "Received-Length", s_rbody_len);
|
||||
rv = apr_brigade_puts(bb, NULL, NULL, s_rbody_len);
|
||||
if(APR_SUCCESS != rv) goto cleanup;
|
||||
b = apr_bucket_eos_create(c->bucket_alloc);
|
||||
APR_BRIGADE_INSERT_TAIL(bb, b);
|
||||
|
Loading…
Reference in New Issue
Block a user