mirror of
https://github.com/curl/curl.git
synced 2024-12-15 06:40:09 +08:00
544abeea83
- ngtcp2: using bufq for recv stream data - internal stream_ctx instead of `struct HTTP` members for quiche, ngtcp2 and msh3 - no more QUIC related members in `struct HTTP` - experimental use of recvmmsg(), disabled by default - testing on my old debian box shows no throughput improvements. - leaving it in, but disabled, for future revisit - vquic: common UDP receive code for ngtcp2 and quiche - vquic: common UDP send code for ngtcp2 and quiche - added pytest skips for known msh3 failures - fix unit2601 to survive torture testing - quiche: using latest `master` from quiche and enabling large download tests, now that key change is supported - fixing test_07_21 where retry handling of starting a stream was faulty - msh3: use bufq for recv buffering headers and data - msh3: replace fprintf debug logging with LOG_CF where possible - msh3: force QUIC expire timers on recv/send to have more than 1 request per second served Closes #10772
252 lines
11 KiB
Python
252 lines
11 KiB
Python
#!/usr/bin/env python3
|
|
# -*- coding: utf-8 -*-
|
|
#***************************************************************************
|
|
# _ _ ____ _
|
|
# Project ___| | | | _ \| |
|
|
# / __| | | | |_) | |
|
|
# | (__| |_| | _ <| |___
|
|
# \___|\___/|_| \_\_____|
|
|
#
|
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
|
#
|
|
# This software is licensed as described in the file COPYING, which
|
|
# you should have received as part of this distribution. The terms
|
|
# are also available at https://curl.se/docs/copyright.html.
|
|
#
|
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
|
# copies of the Software, and permit persons to whom the Software is
|
|
# furnished to do so, under the terms of the COPYING file.
|
|
#
|
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
|
# KIND, either express or implied.
|
|
#
|
|
# SPDX-License-Identifier: curl
|
|
#
|
|
###########################################################################
|
|
#
|
|
import difflib
|
|
import filecmp
|
|
import logging
|
|
import os
|
|
import pytest
|
|
|
|
from testenv import Env, CurlClient
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
class TestUpload:
|
|
|
|
@pytest.fixture(autouse=True, scope='class')
|
|
def _class_scope(self, env, httpd, nghttpx):
|
|
if env.have_h3():
|
|
nghttpx.start_if_needed()
|
|
env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
|
|
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
|
|
httpd.clear_extra_configs()
|
|
httpd.reload()
|
|
|
|
# upload small data, check that this is what was echoed
|
|
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
|
def test_07_01_upload_1_small(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 fails here")
|
|
data = '0123456789'
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
|
|
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=1, exp_status=200)
|
|
respdata = open(curl.response_file(0)).readlines()
|
|
assert respdata == [data]
|
|
|
|
# upload large data, check that this is what was echoed
|
|
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
|
def test_07_02_upload_1_large(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 fails here")
|
|
fdata = os.path.join(env.gen_dir, 'data-100k')
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
|
|
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=1, exp_status=200)
|
|
indata = open(fdata).readlines()
|
|
respdata = open(curl.response_file(0)).readlines()
|
|
assert respdata == indata
|
|
|
|
# upload data sequentially, check that they were echoed
|
|
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
|
def test_07_10_upload_sequential(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 stalls here")
|
|
count = 50
|
|
data = '0123456789'
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
|
|
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=count, exp_status=200)
|
|
for i in range(count):
|
|
respdata = open(curl.response_file(i)).readlines()
|
|
assert respdata == [data]
|
|
|
|
# upload data parallel, check that they were echoed
|
|
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
|
def test_07_11_upload_parallel(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 stalls here")
|
|
# limit since we use a separate connection in h1
|
|
count = 50
|
|
data = '0123456789'
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
|
|
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
|
|
extra_args=['--parallel'])
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=count, exp_status=200)
|
|
for i in range(count):
|
|
respdata = open(curl.response_file(i)).readlines()
|
|
assert respdata == [data]
|
|
|
|
# upload large data sequentially, check that this is what was echoed
|
|
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
|
def test_07_20_upload_seq_large(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 stalls here")
|
|
fdata = os.path.join(env.gen_dir, 'data-100k')
|
|
count = 50
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
|
|
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=count, exp_status=200)
|
|
indata = open(fdata).readlines()
|
|
r.check_stats(count=count, exp_status=200)
|
|
for i in range(count):
|
|
respdata = open(curl.response_file(i)).readlines()
|
|
assert respdata == indata
|
|
|
|
# upload very large data sequentially, check that this is what was echoed
|
|
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
|
def test_07_12_upload_seq_large(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 stalls here")
|
|
fdata = os.path.join(env.gen_dir, 'data-10m')
|
|
count = 2
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
|
|
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=count, exp_status=200)
|
|
indata = open(fdata).readlines()
|
|
r.check_stats(count=count, exp_status=200)
|
|
for i in range(count):
|
|
respdata = open(curl.response_file(i)).readlines()
|
|
assert respdata == indata
|
|
|
|
# upload data parallel, check that they were echoed
|
|
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
|
def test_07_20_upload_parallel(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 stalls here")
|
|
# limit since we use a separate connection in h1
|
|
count = 50
|
|
data = '0123456789'
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
|
|
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
|
|
extra_args=['--parallel'])
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=count, exp_status=200)
|
|
for i in range(count):
|
|
respdata = open(curl.response_file(i)).readlines()
|
|
assert respdata == [data]
|
|
|
|
# upload large data parallel, check that this is what was echoed
|
|
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
|
def test_07_21_upload_parallel_large(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 stalls here")
|
|
fdata = os.path.join(env.gen_dir, 'data-100k')
|
|
# limit since we use a separate connection in h1
|
|
count = 50
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
|
|
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
|
|
extra_args=['--parallel'])
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=count, exp_status=200)
|
|
self.check_download(count, fdata, curl)
|
|
|
|
# PUT 100k
|
|
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
|
def test_07_30_put_100k(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 fails here")
|
|
fdata = os.path.join(env.gen_dir, 'data-100k')
|
|
count = 1
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
|
|
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
|
|
extra_args=['--parallel'])
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=count, exp_status=200)
|
|
exp_data = [f'{os.path.getsize(fdata)}']
|
|
r.check_stats(count=count, exp_status=200)
|
|
for i in range(count):
|
|
respdata = open(curl.response_file(i)).readlines()
|
|
assert respdata == exp_data
|
|
|
|
# PUT 10m
|
|
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
|
def test_07_31_put_10m(self, env: Env, httpd, nghttpx, repeat, proto):
|
|
if proto == 'h3' and not env.have_h3():
|
|
pytest.skip("h3 not supported")
|
|
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
|
pytest.skip("msh3 fails here")
|
|
fdata = os.path.join(env.gen_dir, 'data-10m')
|
|
count = 1
|
|
curl = CurlClient(env=env)
|
|
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]&chunk_delay=10ms'
|
|
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
|
|
extra_args=['--parallel'])
|
|
r.check_exit_code(0)
|
|
r.check_stats(count=count, exp_status=200)
|
|
exp_data = [f'{os.path.getsize(fdata)}']
|
|
r.check_stats(count=count, exp_status=200)
|
|
for i in range(count):
|
|
respdata = open(curl.response_file(i)).readlines()
|
|
assert respdata == exp_data
|
|
|
|
def check_download(self, count, srcfile, curl):
|
|
for i in range(count):
|
|
dfile = curl.download_file(i)
|
|
assert os.path.exists(dfile)
|
|
if not filecmp.cmp(srcfile, dfile, shallow=False):
|
|
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
|
|
b=open(dfile).readlines(),
|
|
fromfile=srcfile,
|
|
tofile=dfile,
|
|
n=1))
|
|
assert False, f'download {dfile} differs:\n{diff}'
|