curl/tests/http/test_07_upload.py
Stefan Eissing 4ae2d9f24d
proxy: http2 proxy tunnel implementation
- currently only on debug build and when env variable
  CURL_PROXY_TUNNEL_H2 is present.
- will ALPN negotiate with the proxy server and switch
  tunnel filter based on the protocol negotiated.
- http/1.1 tunnel code moved into cf-h1-proxy.[ch]
- http/2 tunnel code implemented in cf-h2-proxy.[ch]
- tunnel start and ALPN set remains in http_proxy.c
- moving all haproxy related code into cf-haproxy.[ch]

VTLS changes
- SSL filters rely solely on the "alpn" specification they
  are created with and no longer check conn->bits.tls_enable_alpn.
- checks on which ALPN specification to use (or none at all) are
  done in vtls.c when creating the filter.

Testing
- added a nghttpx forward proxy to the pytest setup that
  speaks HTTP/2 and forwards all requests to the Apache httpd
  forward proxy server.
- extending test coverage in test_10 cases
- adding proxy tests for direct/tunnel h1/h2 use of basic auth.
- adding test for http/1.1 and h2 proxy tunneling to pytest

Closes #10780
2023-04-06 13:04:46 +02:00

241 lines
11 KiB
Python

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import difflib
import filecmp
import logging
import os
import pytest
from testenv import Env, CurlClient
log = logging.getLogger(__name__)
class TestUpload:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
httpd.clear_extra_configs()
httpd.reload()
# upload small data, check that this is what was echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_01_upload_1_small(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 fails here")
data = '0123456789'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
r.check_response(count=1, http_status=200)
respdata = open(curl.response_file(0)).readlines()
assert respdata == [data]
# upload large data, check that this is what was echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_02_upload_1_large(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 fails here")
fdata = os.path.join(env.gen_dir, 'data-100k')
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
r.check_response(count=1, http_status=200)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
# upload data sequentially, check that they were echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_10_upload_sequential(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 stalls here")
count = 50
data = '0123456789'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# upload data parallel, check that they were echoed
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_07_11_upload_parallel(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 stalls here")
# limit since we use a separate connection in h1
count = 50
data = '0123456789'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
extra_args=['--parallel'])
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# upload large data sequentially, check that this is what was echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_20_upload_seq_large(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 stalls here")
fdata = os.path.join(env.gen_dir, 'data-100k')
count = 50
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
r.check_response(count=count, http_status=200)
indata = open(fdata).readlines()
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == indata
# upload very large data sequentially, check that this is what was echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_12_upload_seq_large(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 stalls here")
fdata = os.path.join(env.gen_dir, 'data-10m')
count = 2
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
r.check_response(count=count, http_status=200)
indata = open(fdata).readlines()
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == indata
# upload data parallel, check that they were echoed
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_07_20_upload_parallel(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 stalls here")
# limit since we use a separate connection in h1
count = 50
data = '0123456789'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
extra_args=['--parallel'])
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# upload large data parallel, check that this is what was echoed
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_07_21_upload_parallel_large(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 stalls here")
fdata = os.path.join(env.gen_dir, 'data-100k')
# limit since we use a separate connection in h1
count = 50
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
extra_args=['--parallel'])
r.check_response(count=count, http_status=200)
self.check_download(count, fdata, curl)
# PUT 100k
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_30_put_100k(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 fails here")
fdata = os.path.join(env.gen_dir, 'data-100k')
count = 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
extra_args=['--parallel'])
r.check_response(count=count, http_status=200)
exp_data = [f'{os.path.getsize(fdata)}']
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
# PUT 10m
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_31_put_10m(self, env: Env, httpd, nghttpx, repeat, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('msh3'):
pytest.skip("msh3 fails here")
fdata = os.path.join(env.gen_dir, 'data-10m')
count = 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]&chunk_delay=10ms'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
extra_args=['--parallel'])
r.check_response(count=count, http_status=200)
exp_data = [f'{os.path.getsize(fdata)}']
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
def check_download(self, count, srcfile, curl):
for i in range(count):
dfile = curl.download_file(i)
assert os.path.exists(dfile)
if not filecmp.cmp(srcfile, dfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dfile).readlines(),
fromfile=srcfile,
tofile=dfile,
n=1))
assert False, f'download {dfile} differs:\n{diff}'