mirror of
https://github.com/curl/curl.git
synced 2024-12-09 06:30:06 +08:00
ee8016b3de
By properly keeping track of the last entry in the list of URLs/uploads to handle, curl now avoids many meaningless traverses of the list which speeds up many-URL handling *MASSIVELY* (several magnitudes on 100K URLs). Added test 1291, to verify that it doesn't take ages - but we don't have any detection of "too slow" command in the test suite. Reported-by: arainchik on github Fixes #1959 Closes #2052
52 lines
859 B
Plaintext
52 lines
859 B
Plaintext
# This test case is primarily meant to verify that parsing and adding the 100K
|
|
# files is a swift operation.
|
|
#
|
|
<testcase>
|
|
<info>
|
|
<keywords>
|
|
HTTP
|
|
HTTP PUT
|
|
</keywords>
|
|
</info>
|
|
|
|
#
|
|
# Server-side
|
|
<reply>
|
|
<data>
|
|
</data>
|
|
</reply>
|
|
|
|
# Client-side
|
|
<client>
|
|
<server>
|
|
none
|
|
</server>
|
|
<name>
|
|
Attempt to upload 100K files but fail immediately
|
|
</name>
|
|
<command>
|
|
-K log/cmd1291 --fail-early
|
|
</command>
|
|
<file name="log/upload-this">
|
|
XXXXXXXx
|
|
</file>
|
|
# generate the config file
|
|
<precheck>
|
|
perl -e 'for(1 .. 100000) { printf("upload-file=log/upload-this\nurl=htttttp://non-existing-host.haxx.se/upload/1291\n", $_);}' > log/cmd1291;
|
|
</precheck>
|
|
</client>
|
|
|
|
# Verify data after the test has been "shot"
|
|
<verify>
|
|
<errorcode>
|
|
1
|
|
</errorcode>
|
|
|
|
# we disable valgrind here since it takes 40+ seconds even on a fairly snappy
|
|
# machine
|
|
<valgrind>
|
|
disable
|
|
</valgrind>
|
|
</verify>
|
|
</testcase>
|