mirror of
https://github.com/curl/curl.git
synced 2025-02-23 15:10:03 +08:00
CI: run with standard mod_http2
We used to include a special mod_h2 in our CI that supports the directive H2MaxDataFrameLen for test_02_20. Since then, ubuntu-lastest includes a more recent apache httpd. Let's see if we can live without the special. Closes #15353
This commit is contained in:
parent
c2e2636773
commit
943df95ae7
26
.github/workflows/http3-linux.yml
vendored
26
.github/workflows/http3-linux.yml
vendored
@ -57,8 +57,6 @@ env:
|
||||
nghttp2-version: 1.62.1
|
||||
# renovate: datasource=github-tags depName=cloudflare/quiche versioning=semver registryUrl=https://github.com
|
||||
quiche-version: 0.22.0
|
||||
# renovate: datasource=github-tags depName=icing/mod_h2 versioning=semver registryUrl=https://github.com
|
||||
mod_h2-version: 2.0.29
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
@ -517,30 +515,6 @@ jobs:
|
||||
run: |
|
||||
sudo python3 -m pip install --break-system-packages -r tests/http/requirements.txt
|
||||
|
||||
- name: cache mod_h2
|
||||
uses: actions/cache@3624ceb22c1c5a301c8db4169662070a689d9ea8 # v4
|
||||
id: cache-mod_h2
|
||||
env:
|
||||
cache-name: cache-mod_h2
|
||||
with:
|
||||
path: /home/runner/mod_h2
|
||||
key: ${{ runner.os }}-http3-build-${{ env.cache-name }}-${{ env.mod_h2-version }}
|
||||
|
||||
- name: 'build mod_h2'
|
||||
if: steps.cache-mod_h2.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cd $HOME
|
||||
git clone --quiet --depth=1 -b v${{ env.mod_h2-version }} https://github.com/icing/mod_h2
|
||||
cd mod_h2
|
||||
autoreconf -fi
|
||||
./configure
|
||||
make
|
||||
|
||||
- name: 'install mod_h2'
|
||||
run: |
|
||||
cd $HOME/mod_h2
|
||||
sudo make install
|
||||
|
||||
- name: 'run pytest'
|
||||
env:
|
||||
TFLAGS: "${{ matrix.build.tflags }}"
|
||||
|
28
.github/workflows/linux.yml
vendored
28
.github/workflows/linux.yml
vendored
@ -46,8 +46,6 @@ env:
|
||||
# renovate: datasource=github-tags depName=Mbed-TLS/mbedtls versioning=semver registryUrl=https://github.com
|
||||
mbedtls-version: 3.6.2
|
||||
# renovate: datasource=github-tags depName=icing/mod_h2 versioning=semver registryUrl=https://github.com
|
||||
mod_h2-version: 2.0.29
|
||||
# renovate: datasource=github-tags depName=nibanks/msh3 versioning=semver registryUrl=https://github.com
|
||||
msh3-version: 0.6.0
|
||||
# renovate: datasource=github-tags depName=awslabs/aws-lc versioning=semver registryUrl=https://github.com
|
||||
awslc-version: 1.37.0
|
||||
@ -605,32 +603,6 @@ jobs:
|
||||
run: |
|
||||
sudo python3 -m pip install --break-system-packages -r tests/http/requirements.txt
|
||||
|
||||
- name: cache mod_h2
|
||||
if: contains(matrix.build.install_steps, 'pytest')
|
||||
uses: actions/cache@3624ceb22c1c5a301c8db4169662070a689d9ea8 # v4
|
||||
id: cache-mod_h2
|
||||
env:
|
||||
cache-name: cache-mod_h2
|
||||
with:
|
||||
path: /home/runner/mod_h2
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.mod_h2-version }}
|
||||
|
||||
- name: 'build mod_h2'
|
||||
if: contains(matrix.build.install_steps, 'pytest') && steps.cache-mod_h2.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cd $HOME
|
||||
git clone --quiet --depth=1 -b v${{ env.mod_h2-version }} https://github.com/icing/mod_h2
|
||||
cd mod_h2
|
||||
autoreconf -fi
|
||||
./configure
|
||||
make
|
||||
|
||||
- name: 'install mod_h2'
|
||||
if: contains(matrix.build.install_steps, 'pytest')
|
||||
run: |
|
||||
cd $HOME/mod_h2
|
||||
sudo make install
|
||||
|
||||
- name: 'run pytest'
|
||||
if: contains(matrix.build.install_steps, 'pytest')
|
||||
env:
|
||||
|
@ -294,7 +294,6 @@ class TestDownload:
|
||||
remote_ip='127.0.0.1')
|
||||
|
||||
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
|
||||
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
|
||||
def test_02_20_h2_small_frames(self, env: Env, httpd, repeat):
|
||||
# Test case to reproduce content corruption as observed in
|
||||
# https://github.com/curl/curl/issues/10525
|
||||
|
Loading…
Reference in New Issue
Block a user