Update networking layer w/ CURL and emscripten impl

This commit is contained in:
2025-11-08 01:50:36 +11:00
parent a17925904d
commit f6874dc55a
4105 changed files with 694617 additions and 179 deletions
+62
View File
@@ -0,0 +1,62 @@
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
find_program(CADDY "caddy") # /usr/bin/caddy
if(NOT CADDY)
set(CADDY "")
endif()
mark_as_advanced(CADDY)
find_program(VSFTPD "vsftpd") # /usr/sbin/vsftpd
if(NOT VSFTPD)
set(VSFTPD "")
endif()
mark_as_advanced(VSFTPD)
find_program(HTTPD NAMES "/usr/sbin/apache2" "httpd" "apache2")
if(NOT HTTPD)
set(HTTPD "")
endif()
mark_as_advanced(HTTPD)
find_program(APXS "apxs")
if(NOT APXS)
set(APXS "")
endif()
mark_as_advanced(APXS)
find_program(HTTPD_NGHTTPX "nghttpx" PATHS "/usr/bin" "/usr/local/bin")
if(NOT HTTPD_NGHTTPX)
set(HTTPD_NGHTTPX "")
endif()
mark_as_advanced(HTTPD_NGHTTPX)
find_program(DANTED "danted")
if(NOT DANTED)
set(DANTED "")
endif()
mark_as_advanced(DANTED)
# Consumed variables: APXS, CADDY, HTTPD, HTTPD_NGHTTPX, DANTED, VSFTPD
configure_file("config.ini.in" "${CMAKE_CURRENT_BINARY_DIR}/config.ini" @ONLY)
+78
View File
@@ -0,0 +1,78 @@
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
TESTENV = \
testenv/__init__.py \
testenv/caddy.py \
testenv/certs.py \
testenv/client.py \
testenv/curl.py \
testenv/dante.py \
testenv/env.py \
testenv/httpd.py \
testenv/mod_curltest/mod_curltest.c \
testenv/nghttpx.py \
testenv/ports.py \
testenv/vsftpd.py \
testenv/ws_echo_server.py
EXTRA_DIST = \
CMakeLists.txt \
conftest.py \
requirements.txt \
scorecard.py \
test_01_basic.py \
test_02_download.py \
test_03_goaway.py \
test_04_stuttered.py \
test_05_errors.py \
test_06_eyeballs.py \
test_07_upload.py \
test_08_caddy.py \
test_09_push.py \
test_10_proxy.py \
test_11_unix.py \
test_12_reuse.py \
test_13_proxy_auth.py \
test_14_auth.py \
test_15_tracing.py \
test_16_info.py \
test_17_ssl_use.py \
test_18_methods.py \
test_19_shutdown.py \
test_20_websockets.py \
test_30_vsftpd.py \
test_31_vsftpds.py \
test_32_ftps_vsftpd.py \
test_40_socks.py \
$(TESTENV)
clean-local:
rm -rf *.pyc __pycache__
rm -rf gen
check: libtests
libtests:
@(cd ../libtests; $(MAKE) check)
+586
View File
@@ -0,0 +1,586 @@
# Makefile.in generated by automake 1.16.5 from Makefile.am.
# @configure_input@
# Copyright (C) 1994-2021 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
@SET_MAKE@
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
VPATH = @srcdir@
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/@PACKAGE@
pkgincludedir = $(includedir)/@PACKAGE@
pkglibdir = $(libdir)/@PACKAGE@
pkglibexecdir = $(libexecdir)/@PACKAGE@
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
subdir = tests/http
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/curl-amissl.m4 \
$(top_srcdir)/m4/curl-apple-sectrust.m4 \
$(top_srcdir)/m4/curl-compilers.m4 \
$(top_srcdir)/m4/curl-confopts.m4 \
$(top_srcdir)/m4/curl-functions.m4 \
$(top_srcdir)/m4/curl-gnutls.m4 \
$(top_srcdir)/m4/curl-mbedtls.m4 \
$(top_srcdir)/m4/curl-openssl.m4 \
$(top_srcdir)/m4/curl-override.m4 \
$(top_srcdir)/m4/curl-reentrant.m4 \
$(top_srcdir)/m4/curl-rustls.m4 \
$(top_srcdir)/m4/curl-schannel.m4 \
$(top_srcdir)/m4/curl-sysconfig.m4 \
$(top_srcdir)/m4/curl-wolfssl.m4 $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/m4/xc-am-iface.m4 \
$(top_srcdir)/m4/xc-cc-check.m4 \
$(top_srcdir)/m4/xc-lt-iface.m4 \
$(top_srcdir)/m4/xc-val-flgs.m4 \
$(top_srcdir)/m4/zz40-xc-ovr.m4 \
$(top_srcdir)/m4/zz50-xc-ovr.m4 \
$(top_srcdir)/m4/zz60-xc-ovr.m4 $(top_srcdir)/acinclude.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = $(top_builddir)/lib/curl_config.h
CONFIG_CLEAN_FILES = config.ini
CONFIG_CLEAN_VPATH_FILES =
AM_V_P = $(am__v_P_@AM_V@)
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_@AM_V@)
am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_@AM_V@)
am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
SOURCES =
DIST_SOURCES =
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/config.ini.in
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
APXS = @APXS@
AR = @AR@
AR_FLAGS = @AR_FLAGS@
AS = @AS@
AUTOCONF = @AUTOCONF@
AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
BLANK_AT_MAKETIME = @BLANK_AT_MAKETIME@
CADDY = @CADDY@
CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CFLAG_CURL_SYMBOL_HIDING = @CFLAG_CURL_SYMBOL_HIDING@
CONFIGURE_OPTIONS = @CONFIGURE_OPTIONS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
CSCOPE = @CSCOPE@
CTAGS = @CTAGS@
CURLVERSION = @CURLVERSION@
CURL_CA_BUNDLE = @CURL_CA_BUNDLE@
CURL_CA_EMBED = @CURL_CA_EMBED@
CURL_CFLAG_EXTRAS = @CURL_CFLAG_EXTRAS@
CURL_CPP = @CURL_CPP@
CURL_LIBCURL_VERSIONED_SYMBOLS_PREFIX = @CURL_LIBCURL_VERSIONED_SYMBOLS_PREFIX@
CURL_LIBCURL_VERSIONED_SYMBOLS_SONAME = @CURL_LIBCURL_VERSIONED_SYMBOLS_SONAME@
CURL_NETWORK_AND_TIME_LIBS = @CURL_NETWORK_AND_TIME_LIBS@
CYGPATH_W = @CYGPATH_W@
DANTED = @DANTED@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
DLLTOOL = @DLLTOOL@
DSYMUTIL = @DSYMUTIL@
DUMPBIN = @DUMPBIN@
ECHO_C = @ECHO_C@
ECHO_N = @ECHO_N@
ECHO_T = @ECHO_T@
EGREP = @EGREP@
ENABLE_SHARED = @ENABLE_SHARED@
ENABLE_STATIC = @ENABLE_STATIC@
ETAGS = @ETAGS@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
FILECMD = @FILECMD@
FISH_FUNCTIONS_DIR = @FISH_FUNCTIONS_DIR@
GCOV = @GCOV@
GREP = @GREP@
HAVE_LIBZ = @HAVE_LIBZ@
HTTPD = @HTTPD@
HTTPD_NGHTTPX = @HTTPD_NGHTTPX@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
LCOV = @LCOV@
LD = @LD@
LDFLAGS = @LDFLAGS@
LIBCURL_PC_CFLAGS = @LIBCURL_PC_CFLAGS@
LIBCURL_PC_CFLAGS_PRIVATE = @LIBCURL_PC_CFLAGS_PRIVATE@
LIBCURL_PC_LDFLAGS_PRIVATE = @LIBCURL_PC_LDFLAGS_PRIVATE@
LIBCURL_PC_LIBS = @LIBCURL_PC_LIBS@
LIBCURL_PC_LIBS_PRIVATE = @LIBCURL_PC_LIBS_PRIVATE@
LIBCURL_PC_REQUIRES = @LIBCURL_PC_REQUIRES@
LIBCURL_PC_REQUIRES_PRIVATE = @LIBCURL_PC_REQUIRES_PRIVATE@
LIBOBJS = @LIBOBJS@
LIBS = @LIBS@
LIBTOOL = @LIBTOOL@
LIPO = @LIPO@
LN_S = @LN_S@
LTLIBOBJS = @LTLIBOBJS@
LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
MAINT = @MAINT@
MAKEINFO = @MAKEINFO@
MANIFEST_TOOL = @MANIFEST_TOOL@
MKDIR_P = @MKDIR_P@
NM = @NM@
NMEDIT = @NMEDIT@
OBJDUMP = @OBJDUMP@
OBJEXT = @OBJEXT@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
PACKAGE = @PACKAGE@
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_STRING = @PACKAGE_STRING@
PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_URL = @PACKAGE_URL@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
PERL = @PERL@
PKGCONFIG = @PKGCONFIG@
RANLIB = @RANLIB@
RC = @RC@
SED = @SED@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
SSL_BACKENDS = @SSL_BACKENDS@
STRIP = @STRIP@
SUPPORT_FEATURES = @SUPPORT_FEATURES@
SUPPORT_PROTOCOLS = @SUPPORT_PROTOCOLS@
TEST_NGHTTPX = @TEST_NGHTTPX@
VERSION = @VERSION@
VERSIONNUM = @VERSIONNUM@
VSFTPD = @VSFTPD@
ZLIB_LIBS = @ZLIB_LIBS@
ZSH_FUNCTIONS_DIR = @ZSH_FUNCTIONS_DIR@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
ac_ct_AR = @ac_ct_AR@
ac_ct_CC = @ac_ct_CC@
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
am__include = @am__include@
am__leading_dot = @am__leading_dot@
am__quote = @am__quote@
am__tar = @am__tar@
am__untar = @am__untar@
bindir = @bindir@
build = @build@
build_alias = @build_alias@
build_cpu = @build_cpu@
build_os = @build_os@
build_vendor = @build_vendor@
builddir = @builddir@
datadir = @datadir@
datarootdir = @datarootdir@
docdir = @docdir@
dvidir = @dvidir@
exec_prefix = @exec_prefix@
host = @host@
host_alias = @host_alias@
host_cpu = @host_cpu@
host_os = @host_os@
host_vendor = @host_vendor@
htmldir = @htmldir@
includedir = @includedir@
infodir = @infodir@
install_sh = @install_sh@
libdir = @libdir@
libexecdir = @libexecdir@
libext = @libext@
localedir = @localedir@
localstatedir = @localstatedir@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
pdfdir = @pdfdir@
prefix = @prefix@
program_transform_name = @program_transform_name@
psdir = @psdir@
runstatedir = @runstatedir@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@
target_alias = @target_alias@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
TESTENV = \
testenv/__init__.py \
testenv/caddy.py \
testenv/certs.py \
testenv/client.py \
testenv/curl.py \
testenv/dante.py \
testenv/env.py \
testenv/httpd.py \
testenv/mod_curltest/mod_curltest.c \
testenv/nghttpx.py \
testenv/ports.py \
testenv/vsftpd.py \
testenv/ws_echo_server.py
EXTRA_DIST = \
CMakeLists.txt \
conftest.py \
requirements.txt \
scorecard.py \
test_01_basic.py \
test_02_download.py \
test_03_goaway.py \
test_04_stuttered.py \
test_05_errors.py \
test_06_eyeballs.py \
test_07_upload.py \
test_08_caddy.py \
test_09_push.py \
test_10_proxy.py \
test_11_unix.py \
test_12_reuse.py \
test_13_proxy_auth.py \
test_14_auth.py \
test_15_tracing.py \
test_16_info.py \
test_17_ssl_use.py \
test_18_methods.py \
test_19_shutdown.py \
test_20_websockets.py \
test_30_vsftpd.py \
test_31_vsftpds.py \
test_32_ftps_vsftpd.py \
test_40_socks.py \
$(TESTENV)
all: all-am
.SUFFIXES:
$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu tests/http/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --gnu tests/http/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
config.ini: $(top_builddir)/config.status $(srcdir)/config.ini.in
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
tags TAGS:
ctags CTAGS:
cscope cscopelist:
distdir: $(BUILT_SOURCES)
$(MAKE) $(AM_MAKEFLAGS) distdir-am
distdir-am: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile
installdirs:
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool clean-local mostlyclean-am
distclean: distclean-am
-rm -f Makefile
distclean-am: clean-am distclean-generic
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am:
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am:
.MAKE: install-am install-strip
.PHONY: all all-am check check-am clean clean-generic clean-libtool \
clean-local cscopelist-am ctags-am distclean distclean-generic \
distclean-libtool distdir dvi dvi-am html html-am info info-am \
install install-am install-data install-data-am install-dvi \
install-dvi-am install-exec install-exec-am install-html \
install-html-am install-info install-info-am install-man \
install-pdf install-pdf-am install-ps install-ps-am \
install-strip installcheck installcheck-am installdirs \
maintainer-clean maintainer-clean-generic mostlyclean \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags-am uninstall uninstall-am
.PRECIOUS: Makefile
clean-local:
rm -rf *.pyc __pycache__
rm -rf gen
check: libtests
libtests:
@(cd ../libtests; $(MAKE) check)
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
+42
View File
@@ -0,0 +1,42 @@
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
[global]
[httpd]
apxs = @APXS@
httpd = @HTTPD@
[nghttpx]
nghttpx = @HTTPD_NGHTTPX@
[caddy]
caddy = @CADDY@
[vsftpd]
vsftpd = @VSFTPD@
[danted]
danted = @DANTED@
+157
View File
@@ -0,0 +1,157 @@
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import sys
import platform
from typing import Generator, Union
import pytest
from testenv.env import EnvConfig
sys.path.append(os.path.join(os.path.dirname(__file__), '.'))
from testenv import Env, Nghttpx, Httpd, NghttpxQuic, NghttpxFwd
log = logging.getLogger(__name__)
def pytest_report_header(config):
# Env inits its base properties only once, we can report them here
env = Env()
report = [
f'Testing curl {env.curl_version()}',
f' platform: {platform.platform()}',
f' curl: Version: {env.curl_version_string()}',
f' curl: Features: {env.curl_features_string()}',
f' curl: Protocols: {env.curl_protocols_string()}',
f' httpd: {env.httpd_version()}',
f' httpd-proxy: {env.httpd_version()}'
]
if env.have_h3():
report.extend([
f' nghttpx: {env.nghttpx_version()}'
])
if env.has_caddy():
report.extend([
f' Caddy: {env.caddy_version()}'
])
if env.has_vsftpd():
report.extend([
f' VsFTPD: {env.vsftpd_version()}'
])
buildinfo_fn = os.path.join(env.build_dir, 'buildinfo.txt')
if os.path.exists(buildinfo_fn):
with open(buildinfo_fn, 'r') as file_in:
for line in file_in:
line = line.strip()
if line and not line.startswith('#'):
report.extend([line])
return '\n'.join(report)
@pytest.fixture(scope='session')
def env_config(pytestconfig, testrun_uid, worker_id) -> EnvConfig:
env_config = EnvConfig(pytestconfig=pytestconfig,
testrun_uid=testrun_uid,
worker_id=worker_id)
return env_config
@pytest.fixture(scope='session', autouse=True)
def env(pytestconfig, env_config) -> Env:
env = Env(pytestconfig=pytestconfig, env_config=env_config)
level = logging.DEBUG if env.verbose > 0 else logging.INFO
logging.getLogger('').setLevel(level=level)
if not env.curl_has_protocol('http'):
pytest.skip("curl built without HTTP support")
if not env.curl_has_protocol('https'):
pytest.skip("curl built without HTTPS support")
if env.setup_incomplete():
pytest.skip(env.incomplete_reason())
env.setup()
return env
@pytest.fixture(scope='session')
def httpd(env) -> Generator[Httpd, None, None]:
httpd = Httpd(env=env)
if not httpd.exists():
pytest.skip(f'httpd not found: {env.httpd}')
httpd.clear_logs()
assert httpd.initial_start()
yield httpd
httpd.stop()
@pytest.fixture(scope='session')
def nghttpx(env, httpd) -> Generator[Union[Nghttpx,bool], None, None]:
nghttpx = NghttpxQuic(env=env)
if nghttpx.exists():
if not nghttpx.supports_h3() and env.have_h3_curl():
log.warning('nghttpx does not support QUIC, but curl does')
nghttpx.clear_logs()
assert nghttpx.initial_start()
yield nghttpx
nghttpx.stop()
else:
yield False
@pytest.fixture(scope='session')
def nghttpx_fwd(env, httpd) -> Generator[Union[Nghttpx,bool], None, None]:
nghttpx = NghttpxFwd(env=env)
if nghttpx.exists():
nghttpx.clear_logs()
assert nghttpx.initial_start()
yield nghttpx
nghttpx.stop()
else:
yield False
@pytest.fixture(scope='session')
def configures_httpd(env, httpd) -> Generator[bool, None, None]:
# include this fixture as test parameter if the test configures httpd itself
yield True
@pytest.fixture(scope='session')
def configures_nghttpx(env, httpd) -> Generator[bool, None, None]:
# include this fixture as test parameter if the test configures nghttpx itself
yield True
@pytest.fixture(autouse=True, scope='function')
def server_reset(request, env, httpd, nghttpx):
# make sure httpd is in default configuration when a test starts
if 'configures_httpd' not in request.node._fixtureinfo.argnames:
httpd.reset_config()
httpd.reload_if_config_changed()
if env.have_h3() and \
'nghttpx' in request.node._fixtureinfo.argnames and \
'configures_nghttpx' not in request.node._fixtureinfo.argnames:
nghttpx.reset_config()
nghttpx.reload_if_config_changed()
+10
View File
@@ -0,0 +1,10 @@
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# SPDX-License-Identifier: curl
cryptography==46.0.3
filelock==3.20.0
psutil==7.1.2
pytest==8.4.2
pytest-xdist==3.8.0
websockets==15.0.1
+935
View File
@@ -0,0 +1,935 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import argparse
import datetime
import json
import logging
import os
import re
import sys
from statistics import mean
from typing import Dict, Any, Optional, List
from testenv import Env, Httpd, CurlClient, Caddy, ExecResult, NghttpxQuic, RunProfile, Dante
log = logging.getLogger(__name__)
class ScoreCardError(Exception):
pass
class Card:
@classmethod
def fmt_ms(cls, tval):
return f'{int(tval*1000)} ms' if tval >= 0 else '--'
@classmethod
def fmt_size(cls, val):
if val >= (1024*1024*1024):
return f'{val / (1024*1024*1024):0.000f}GB'
elif val >= (1024 * 1024):
return f'{val / (1024*1024):0.000f}MB'
elif val >= 1024:
return f'{val / 1024:0.000f}KB'
else:
return f'{val:0.000f}B'
@classmethod
def fmt_mbs(cls, val):
if val is None or val < 0:
return '--'
if val >= (1024*1024):
return f'{val/(1024*1024):0.000f} MB/s'
elif val >= 1024:
return f'{val / 1024:0.000f} KB/s'
else:
return f'{val:0.000f} B/s'
@classmethod
def fmt_reqs(cls, val):
return f'{val:0.000f} r/s' if val >= 0 else '--'
@classmethod
def mk_mbs_cell(cls, samples, profiles, errors):
val = mean(samples) if len(samples) else -1
cell = {
'val': val,
'sval': Card.fmt_mbs(val) if val >= 0 else '--',
}
if len(profiles):
cell['stats'] = RunProfile.AverageStats(profiles)
if len(errors):
cell['errors'] = errors
return cell
@classmethod
def mk_reqs_cell(cls, samples, profiles, errors):
val = mean(samples) if len(samples) else -1
cell = {
'val': val,
'sval': Card.fmt_reqs(val) if val >= 0 else '--',
}
if len(profiles):
cell['stats'] = RunProfile.AverageStats(profiles)
if len(errors):
cell['errors'] = errors
return cell
@classmethod
def parse_size(cls, s):
m = re.match(r'(\d+)(mb|kb|gb)?', s, re.IGNORECASE)
if m is None:
raise Exception(f'unrecognized size: {s}')
size = int(m.group(1))
if not m.group(2):
pass
elif m.group(2).lower() == 'kb':
size *= 1024
elif m.group(2).lower() == 'mb':
size *= 1024 * 1024
elif m.group(2).lower() == 'gb':
size *= 1024 * 1024 * 1024
return size
@classmethod
def print_score(cls, score):
print(f'Scorecard curl, protocol {score["meta"]["protocol"]} '
f'via {score["meta"]["implementation"]}/'
f'{score["meta"]["implementation_version"]}')
print(f'Date: {score["meta"]["date"]}')
if 'curl_V' in score["meta"]:
print(f'Version: {score["meta"]["curl_V"]}')
if 'curl_features' in score["meta"]:
print(f'Features: {score["meta"]["curl_features"]}')
if 'limit-rate' in score['meta']:
print(f'--limit-rate: {score["meta"]["limit-rate"]}')
print(f'Samples Size: {score["meta"]["samples"]}')
if 'handshakes' in score:
print(f'{"Handshakes":<24} {"ipv4":25} {"ipv6":28}')
print(f' {"Host":<17} {"Connect":>12} {"Handshake":>12} '
f'{"Connect":>12} {"Handshake":>12} {"Errors":<20}')
for key, val in score["handshakes"].items():
print(f' {key:<17} {Card.fmt_ms(val["ipv4-connect"]):>12} '
f'{Card.fmt_ms(val["ipv4-handshake"]):>12} '
f'{Card.fmt_ms(val["ipv6-connect"]):>12} '
f'{Card.fmt_ms(val["ipv6-handshake"]):>12} '
f'{"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}'
)
for name in ['downloads', 'uploads', 'requests']:
if name in score:
Card.print_score_table(score[name])
@classmethod
def print_score_table(cls, score):
cols = score['cols']
rows = score['rows']
colw = []
statw = 13
errors = []
col_has_stats = []
for idx, col in enumerate(cols):
cellw = max([len(r[idx]["sval"]) for r in rows])
colw.append(max(cellw, len(col)))
col_has_stats.append(False)
for row in rows:
if 'stats' in row[idx]:
col_has_stats[idx] = True
break
if 'title' in score['meta']:
print(score['meta']['title'])
for idx, col in enumerate(cols):
if col_has_stats[idx]:
print(f' {col:>{colw[idx]}} {"[cpu/rss]":<{statw}}', end='')
else:
print(f' {col:>{colw[idx]}}', end='')
print('')
for row in rows:
for idx, cell in enumerate(row):
print(f' {cell["sval"]:>{colw[idx]}}', end='')
if col_has_stats[idx]:
if 'stats' in cell:
s = f'[{cell["stats"]["cpu"]:>.1f}%' \
f'/{Card.fmt_size(cell["stats"]["rss"])}]'
else:
s = ''
print(f' {s:<{statw}}', end='')
if 'errors' in cell:
errors.extend(cell['errors'])
print('')
if len(errors):
print(f'Errors: {errors}')
class ScoreRunner:
def __init__(self, env: Env,
protocol: str,
server_descr: str,
server_port: int,
verbose: int,
curl_verbose: int,
download_parallel: int = 0,
upload_parallel: int = 0,
server_addr: Optional[str] = None,
with_flame: bool = False,
socks_args: Optional[List[str]] = None,
limit_rate: Optional[str] = None):
self.verbose = verbose
self.env = env
self.protocol = protocol
self.server_descr = server_descr
self.server_addr = server_addr
self.server_port = server_port
self._silent_curl = not curl_verbose
self._download_parallel = download_parallel
self._upload_parallel = upload_parallel
self._with_flame = with_flame
self._socks_args = socks_args
self._limit_rate = limit_rate
def info(self, msg):
if self.verbose > 0:
sys.stderr.write(msg)
sys.stderr.flush()
def mk_curl_client(self):
return CurlClient(env=self.env, silent=self._silent_curl,
server_addr=self.server_addr,
with_flame=self._with_flame,
socks_args=self._socks_args)
def handshakes(self) -> Dict[str, Any]:
props = {}
sample_size = 5
self.info('TLS Handshake\n')
for authority in [
'curl.se', 'google.com', 'cloudflare.com', 'nghttp2.org'
]:
self.info(f' {authority}...')
props[authority] = {}
for ipv in ['ipv4', 'ipv6']:
self.info(f'{ipv}...')
c_samples = []
hs_samples = []
errors = []
for _ in range(sample_size):
curl = self.mk_curl_client()
args = [
'--http3-only' if self.protocol == 'h3' else '--http2',
f'--{ipv}', f'https://{authority}/'
]
r = curl.run_direct(args=args, with_stats=True)
if r.exit_code == 0 and len(r.stats) == 1:
c_samples.append(r.stats[0]['time_connect'])
hs_samples.append(r.stats[0]['time_appconnect'])
else:
errors.append(f'exit={r.exit_code}')
props[authority][f'{ipv}-connect'] = mean(c_samples) \
if len(c_samples) else -1
props[authority][f'{ipv}-handshake'] = mean(hs_samples) \
if len(hs_samples) else -1
props[authority][f'{ipv}-errors'] = errors
self.info('ok.\n')
return props
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
fpath = os.path.join(docs_dir, fname)
data1k = 1024*'x'
flen = 0
with open(fpath, 'w') as fd:
while flen < fsize:
fd.write(data1k)
flen += len(data1k)
return fpath
def setup_resources(self, server_docs: str,
downloads: Optional[List[int]] = None):
if downloads is not None:
for fsize in downloads:
label = Card.fmt_size(fsize)
fname = f'score{label}.data'
self._make_docs_file(docs_dir=server_docs,
fname=fname, fsize=fsize)
self._make_docs_file(docs_dir=server_docs,
fname='reqs10.data', fsize=10*1024)
def _check_downloads(self, r: ExecResult, count: int):
error = ''
if r.exit_code != 0:
error += f'exit={r.exit_code} '
if r.exit_code != 0 or len(r.stats) != count:
error += f'stats={len(r.stats)}/{count} '
fails = [s for s in r.stats if s['response_code'] != 200]
if len(fails) > 0:
error += f'{len(fails)} failed'
return error if len(error) > 0 else None
def dl_single(self, url: str, nsamples: int = 1):
count = 1
samples = []
errors = []
profiles = []
self.info('single...')
for _ in range(nsamples):
curl = self.mk_curl_client()
r = curl.http_download(urls=[url], alpn_proto=self.protocol,
no_save=True, with_headers=False,
with_profile=True,
limit_rate=self._limit_rate)
err = self._check_downloads(r, count)
if err:
errors.append(err)
else:
total_size = sum([s['size_download'] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return Card.mk_mbs_cell(samples, profiles, errors)
def dl_serial(self, url: str, count: int, nsamples: int = 1):
samples = []
errors = []
profiles = []
url = f'{url}?[0-{count - 1}]'
self.info('serial...')
for _ in range(nsamples):
curl = self.mk_curl_client()
r = curl.http_download(urls=[url], alpn_proto=self.protocol,
no_save=True,
with_headers=False,
with_profile=True,
limit_rate=self._limit_rate)
err = self._check_downloads(r, count)
if err:
errors.append(err)
else:
total_size = sum([s['size_download'] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return Card.mk_mbs_cell(samples, profiles, errors)
def dl_parallel(self, url: str, count: int, nsamples: int = 1):
samples = []
errors = []
profiles = []
max_parallel = self._download_parallel if self._download_parallel > 0 else count
url = f'{url}?[0-{count - 1}]'
self.info('parallel...')
for _ in range(nsamples):
curl = self.mk_curl_client()
r = curl.http_download(urls=[url], alpn_proto=self.protocol,
no_save=True,
with_headers=False,
with_profile=True,
limit_rate=self._limit_rate,
extra_args=[
'--parallel',
'--parallel-max', str(max_parallel)
])
err = self._check_downloads(r, count)
if err:
errors.append(err)
else:
total_size = sum([s['size_download'] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return Card.mk_mbs_cell(samples, profiles, errors)
def downloads(self, count: int, fsizes: List[int], meta: Dict[str, Any]) -> Dict[str, Any]:
nsamples = meta['samples']
max_parallel = self._download_parallel if self._download_parallel > 0 else count
cols = ['size']
if not self._download_parallel:
cols.append('single')
if count > 1:
cols.append(f'serial({count})')
if count > 1:
cols.append(f'parallel({count}x{max_parallel})')
rows = []
for fsize in fsizes:
row = [{
'val': fsize,
'sval': Card.fmt_size(fsize)
}]
self.info(f'{row[0]["sval"]} downloads...')
url = f'https://{self.env.domain1}:{self.server_port}/score{row[0]["sval"]}.data'
if 'single' in cols:
row.append(self.dl_single(url=url, nsamples=nsamples))
if count > 1:
if 'single' in cols:
row.append(self.dl_serial(url=url, count=count, nsamples=nsamples))
row.append(self.dl_parallel(url=url, count=count, nsamples=nsamples))
rows.append(row)
self.info('done.\n')
title = f'Downloads from {meta["server"]}'
if self._socks_args:
title += f' via {self._socks_args}'
return {
'meta': {
'title': title,
'count': count,
'max-parallel': max_parallel,
},
'cols': cols,
'rows': rows,
}
def _check_uploads(self, r: ExecResult, count: int):
error = ''
if r.exit_code != 0:
error += f'exit={r.exit_code} '
if r.exit_code != 0 or len(r.stats) != count:
error += f'stats={len(r.stats)}/{count} '
fails = [s for s in r.stats if s['response_code'] != 200]
if len(fails) > 0:
error += f'{len(fails)} failed'
for f in fails:
error += f'[{f["response_code"]}]'
return error if len(error) > 0 else None
def ul_single(self, url: str, fpath: str, nsamples: int = 1):
samples = []
errors = []
profiles = []
self.info('single...')
for _ in range(nsamples):
curl = self.mk_curl_client()
r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=self.protocol,
with_headers=False, with_profile=True)
err = self._check_uploads(r, 1)
if err:
errors.append(err)
else:
total_size = sum([s['size_upload'] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return Card.mk_mbs_cell(samples, profiles, errors)
def ul_serial(self, url: str, fpath: str, count: int, nsamples: int = 1):
samples = []
errors = []
profiles = []
url = f'{url}?id=[0-{count - 1}]'
self.info('serial...')
for _ in range(nsamples):
curl = self.mk_curl_client()
r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=self.protocol,
with_headers=False, with_profile=True)
err = self._check_uploads(r, count)
if err:
errors.append(err)
else:
total_size = sum([s['size_upload'] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return Card.mk_mbs_cell(samples, profiles, errors)
def ul_parallel(self, url: str, fpath: str, count: int, nsamples: int = 1):
samples = []
errors = []
profiles = []
max_parallel = self._download_parallel if self._download_parallel > 0 else count
url = f'{url}?id=[0-{count - 1}]'
self.info('parallel...')
for _ in range(nsamples):
curl = self.mk_curl_client()
r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=self.protocol,
with_headers=False, with_profile=True,
extra_args=[
'--parallel',
'--parallel-max', str(max_parallel)
])
err = self._check_uploads(r, count)
if err:
errors.append(err)
else:
total_size = sum([s['size_upload'] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return Card.mk_mbs_cell(samples, profiles, errors)
def uploads(self, count: int, fsizes: List[int], meta: Dict[str, Any]) -> Dict[str, Any]:
nsamples = meta['samples']
max_parallel = self._upload_parallel if self._upload_parallel > 0 else count
cols = ['size']
if not self._upload_parallel:
cols.append('single')
if count > 1:
cols.append(f'serial({count})')
if count > 1:
cols.append(f'parallel({count}x{max_parallel})')
rows = []
for fsize in fsizes:
row = [{
'val': fsize,
'sval': Card.fmt_size(fsize)
}]
self.info(f'{row[0]["sval"]} uploads...')
url = f'https://{self.env.domain2}:{self.server_port}/curltest/put'
fname = f'upload{row[0]["sval"]}.data'
fpath = self._make_docs_file(docs_dir=self.env.gen_dir,
fname=fname, fsize=fsize)
if 'single' in cols:
row.append(self.ul_single(url=url, fpath=fpath, nsamples=nsamples))
if count > 1:
if 'single' in cols:
row.append(self.ul_serial(url=url, fpath=fpath, count=count, nsamples=nsamples))
row.append(self.ul_parallel(url=url, fpath=fpath, count=count, nsamples=nsamples))
rows.append(row)
self.info('done.\n')
title = f'Uploads to {meta["server"]}'
if self._socks_args:
title += f' via {self._socks_args}'
return {
'meta': {
'title': title,
'count': count,
'max-parallel': max_parallel,
},
'cols': cols,
'rows': rows,
}
def do_requests(self, url: str, count: int, max_parallel: int = 1, nsamples: int = 1):
samples = []
errors = []
profiles = []
url = f'{url}?[0-{count - 1}]'
extra_args = [
'-w', '%{response_code},\\n',
]
if max_parallel > 1:
extra_args.extend([
'--parallel', '--parallel-max', str(max_parallel)
])
self.info(f'{max_parallel}...')
for _ in range(nsamples):
curl = self.mk_curl_client()
r = curl.http_download(urls=[url], alpn_proto=self.protocol, no_save=True,
with_headers=False, with_profile=True,
with_stats=False, extra_args=extra_args)
if r.exit_code != 0:
errors.append(f'exit={r.exit_code}')
else:
samples.append(count / r.duration.total_seconds())
non_200s = 0
for line in r.stdout.splitlines():
if not line.startswith('200,'):
non_200s += 1
if non_200s > 0:
errors.append(f'responses != 200: {non_200s}')
profiles.append(r.profile)
return Card.mk_reqs_cell(samples, profiles, errors)
def requests(self, count: int, meta: Dict[str, Any]) -> Dict[str, Any]:
url = f'https://{self.env.domain1}:{self.server_port}/reqs10.data'
fsize = 10*1024
cols = ['size', 'total']
rows = []
mparallel = meta['request_parallels']
cols.extend([f'{mp} max' for mp in mparallel])
row = [{
'val': fsize,
'sval': Card.fmt_size(fsize)
},{
'val': count,
'sval': f'{count}',
}]
self.info('requests, max parallel...')
row.extend([self.do_requests(url=url, count=count,
max_parallel=mp, nsamples=meta["samples"])
for mp in mparallel])
rows.append(row)
self.info('done.\n')
title = f'Requests in parallel to {meta["server"]}'
if self._socks_args:
title += f' via {self._socks_args}'
return {
'meta': {
'title': title,
'count': count,
},
'cols': cols,
'rows': rows,
}
def score(self,
handshakes: bool = True,
downloads: Optional[List[int]] = None,
download_count: int = 50,
uploads: Optional[List[int]] = None,
upload_count: int = 50,
req_count=5000,
request_parallels=None,
nsamples: int = 1,
requests: bool = True):
self.info(f"scoring {self.protocol} against {self.server_descr}\n")
score = {
'meta': {
'curl_version': self.env.curl_version(),
'curl_V': self.env.curl_fullname(),
'curl_features': self.env.curl_features_string(),
'os': self.env.curl_os(),
'server': self.server_descr,
'samples': nsamples,
'date': f'{datetime.datetime.now(tz=datetime.timezone.utc).isoformat()}',
}
}
if self._limit_rate:
score['meta']['limit-rate'] = self._limit_rate
if self.protocol == 'h3':
score['meta']['protocol'] = 'h3'
if not self.env.have_h3_curl():
raise ScoreCardError('curl does not support HTTP/3')
for lib in ['ngtcp2', 'quiche', 'nghttp3']:
if self.env.curl_uses_lib(lib):
score['meta']['implementation'] = lib
break
elif self.protocol == 'h2':
score['meta']['protocol'] = 'h2'
if not self.env.have_h2_curl():
raise ScoreCardError('curl does not support HTTP/2')
for lib in ['nghttp2']:
if self.env.curl_uses_lib(lib):
score['meta']['implementation'] = lib
break
elif self.protocol == 'h1' or self.protocol == 'http/1.1':
score['meta']['protocol'] = 'http/1.1'
score['meta']['implementation'] = 'native'
else:
raise ScoreCardError(f"unknown protocol: {self.protocol}")
if 'implementation' not in score['meta']:
raise ScoreCardError('did not recognized protocol lib')
score['meta']['implementation_version'] = Env.curl_lib_version(score['meta']['implementation'])
if handshakes:
score['handshakes'] = self.handshakes()
if downloads and len(downloads) > 0:
score['downloads'] = self.downloads(count=download_count,
fsizes=downloads,
meta=score['meta'])
if uploads and len(uploads) > 0:
score['uploads'] = self.uploads(count=upload_count,
fsizes=uploads,
meta=score['meta'])
if requests:
if request_parallels is None:
request_parallels = [1, 6, 25, 50, 100, 300]
score['meta']['request_parallels'] = request_parallels
score['requests'] = self.requests(count=req_count, meta=score['meta'])
return score
def run_score(args, protocol):
if protocol not in ['http/1.1', 'h1', 'h2', 'h3']:
sys.stderr.write(f'ERROR: protocol "{protocol}" not known to scorecard\n')
sys.exit(1)
if protocol == 'h1':
protocol = 'http/1.1'
handshakes = True
downloads = [1024 * 1024, 10 * 1024 * 1024, 100 * 1024 * 1024]
if args.download_sizes is not None:
downloads = []
for x in args.download_sizes:
downloads.extend([Card.parse_size(s) for s in x.split(',')])
uploads = [1024 * 1024, 10 * 1024 * 1024, 100 * 1024 * 1024]
if args.upload_sizes is not None:
uploads = []
for x in args.upload_sizes:
uploads.extend([Card.parse_size(s) for s in x.split(',')])
requests = True
request_parallels = None
if args.request_parallels:
request_parallels = []
for x in args.request_parallels:
request_parallels.extend([int(s) for s in x.split(',')])
if args.downloads or args.uploads or args.requests or args.handshakes:
handshakes = args.handshakes
if not args.downloads:
downloads = None
if not args.uploads:
uploads = None
requests = args.requests
test_httpd = protocol != 'h3'
test_caddy = protocol == 'h3'
if args.caddy or args.httpd:
test_caddy = args.caddy
test_httpd = args.httpd
rv = 0
env = Env()
env.setup()
env.test_timeout = None
sockd = None
socks_args = None
if args.socks4 and args.socks5:
raise ScoreCardError('unable to run --socks4 and --socks5 together')
elif args.socks4 or args.socks5:
sockd = Dante(env=env)
if sockd:
assert sockd.initial_start()
socks_args = [
'--socks4' if args.socks4 else '--socks5',
f'127.0.0.1:{sockd.port}',
]
httpd = None
nghttpx = None
caddy = None
try:
cards = []
if args.remote:
m = re.match(r'^(.+):(\d+)$', args.remote)
if m is None:
raise ScoreCardError(f'unable to parse ip:port from --remote {args.remote}')
test_httpd = False
test_caddy = False
remote_addr = m.group(1)
remote_port = int(m.group(2))
card = ScoreRunner(env=env,
protocol=protocol,
server_descr=f'Server at {args.remote}',
server_addr=remote_addr,
server_port=remote_port,
verbose=args.verbose,
curl_verbose=args.curl_verbose,
download_parallel=args.download_parallel,
upload_parallel=args.upload_parallel,
with_flame=args.flame,
socks_args=socks_args,
limit_rate=args.limit_rate)
cards.append(card)
if test_httpd:
httpd = Httpd(env=env)
assert httpd.exists(), \
f'httpd not found: {env.httpd}'
httpd.clear_logs()
server_docs = httpd.docs_dir
assert httpd.initial_start()
if protocol == 'h3':
nghttpx = NghttpxQuic(env=env)
nghttpx.clear_logs()
assert nghttpx.initial_start()
server_descr = f'nghttpx: https:{env.h3_port} [backend httpd/{env.httpd_version()}]'
server_port = env.h3_port
else:
server_descr = f'httpd/{env.httpd_version()}'
server_port = env.https_port
card = ScoreRunner(env=env,
protocol=protocol,
server_descr=server_descr,
server_port=server_port,
verbose=args.verbose, curl_verbose=args.curl_verbose,
download_parallel=args.download_parallel,
upload_parallel=args.upload_parallel,
with_flame=args.flame,
socks_args=socks_args,
limit_rate=args.limit_rate)
card.setup_resources(server_docs, downloads)
cards.append(card)
if test_caddy and env.caddy:
backend = ''
if uploads and httpd is None:
backend = f' [backend httpd: {env.httpd_version()}]'
httpd = Httpd(env=env)
assert httpd.exists(), \
f'httpd not found: {env.httpd}'
httpd.clear_logs()
assert httpd.initial_start()
caddy = Caddy(env=env)
caddy.clear_logs()
assert caddy.initial_start()
server_descr = f'Caddy/{env.caddy_version()} {backend}'
server_port = caddy.port
server_docs = caddy.docs_dir
card = ScoreRunner(env=env,
protocol=protocol,
server_descr=server_descr,
server_port=server_port,
verbose=args.verbose, curl_verbose=args.curl_verbose,
download_parallel=args.download_parallel,
upload_parallel=args.upload_parallel,
with_flame=args.flame,
socks_args=socks_args,
limit_rate=args.limit_rate)
card.setup_resources(server_docs, downloads)
cards.append(card)
if args.start_only:
print('started servers:')
for card in cards:
print(f'{card.server_descr}')
sys.stderr.write('press [RETURN] to finish')
sys.stderr.flush()
sys.stdin.readline()
else:
for card in cards:
score = card.score(handshakes=handshakes,
downloads=downloads,
download_count=args.download_count,
uploads=uploads,
upload_count=args.upload_count,
req_count=args.request_count,
requests=requests,
request_parallels=request_parallels,
nsamples=args.samples)
if args.json:
print(json.JSONEncoder(indent=2).encode(score))
else:
Card.print_score(score)
except ScoreCardError as ex:
sys.stderr.write(f"ERROR: {ex}\n")
rv = 1
except KeyboardInterrupt:
log.warning("aborted")
rv = 1
finally:
if caddy:
caddy.stop()
if nghttpx:
nghttpx.stop(wait_dead=False)
if httpd:
httpd.stop()
if sockd:
sockd.stop()
return rv
def print_file(filename):
if not os.path.exists(filename):
sys.stderr.write(f"ERROR: file does not exist {filename}\n")
return 1
with open(filename) as file:
data = json.load(file)
Card.print_score(data)
return 0
def main():
parser = argparse.ArgumentParser(prog='scorecard', description="""
Run a range of tests to give a scorecard for a HTTP protocol
'h3' or 'h2' implementation in curl.
""")
parser.add_argument("-v", "--verbose", action='count', default=1,
help="log more output on stderr")
parser.add_argument("-j", "--json", action='store_true',
default=False, help="print json instead of text")
parser.add_argument("--samples", action='store', type=int, metavar='number',
default=1, help="how many sample runs to make")
parser.add_argument("--httpd", action='store_true', default=False,
help="evaluate httpd server only")
parser.add_argument("--caddy", action='store_true', default=False,
help="evaluate caddy server only")
parser.add_argument("--curl-verbose", action='store_true',
default=False, help="run curl with `-v`")
parser.add_argument("--print", type=str, default=None, metavar='filename',
help="print the results from a JSON file")
parser.add_argument("protocol", default=None, nargs='?',
help="Name of protocol to score")
parser.add_argument("--start-only", action='store_true', default=False,
help="only start the servers")
parser.add_argument("--remote", action='store', type=str,
default=None, help="score against the remote server at <ip>:<port>")
parser.add_argument("--flame", action='store_true',
default = False, help="produce a flame graph on curl")
parser.add_argument("--limit-rate", action='store', type=str,
default=None, help="use curl's --limit-rate")
parser.add_argument("-H", "--handshakes", action='store_true',
default=False, help="evaluate handshakes only")
parser.add_argument("-d", "--downloads", action='store_true',
default=False, help="evaluate downloads")
parser.add_argument("--download-sizes", action='append', type=str,
metavar='numberlist',
default=None, help="evaluate download size")
parser.add_argument("--download-count", action='store', type=int,
metavar='number',
default=50, help="perform that many downloads")
parser.add_argument("--download-parallel", action='store', type=int,
metavar='number', default=0,
help="perform that many downloads in parallel (default all)")
parser.add_argument("-u", "--uploads", action='store_true',
default=False, help="evaluate uploads")
parser.add_argument("--upload-sizes", action='append', type=str,
metavar='numberlist',
default=None, help="evaluate upload size")
parser.add_argument("--upload-count", action='store', type=int,
metavar='number', default=50,
help="perform that many uploads")
parser.add_argument("--upload-parallel", action='store', type=int,
metavar='number', default=0,
help="perform that many uploads in parallel (default all)")
parser.add_argument("-r", "--requests", action='store_true',
default=False, help="evaluate requests")
parser.add_argument("--request-count", action='store', type=int,
metavar='number',
default=5000, help="perform that many requests")
parser.add_argument("--request-parallels", action='append', type=str,
metavar='numberlist',
default=None, help="evaluate request with these max-parallel numbers")
parser.add_argument("--socks4", action='store_true',
default=False, help="test with SOCKS4 proxy")
parser.add_argument("--socks5", action='store_true',
default=False, help="test with SOCKS5 proxy")
args = parser.parse_args()
if args.verbose > 0:
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
logging.getLogger('').addHandler(console)
if args.print:
rv = print_file(args.print)
elif not args.protocol:
parser.print_usage()
rv = 1
else:
rv = run_score(args, args.protocol)
sys.exit(rv)
if __name__ == "__main__":
main()
+295
View File
@@ -0,0 +1,295 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import pytest
from testenv import Env
from testenv import CurlClient
log = logging.getLogger(__name__)
class TestBasic:
# simple http: GET
def test_01_01_http_get(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url)
r.check_response(http_status=200)
assert r.json['server'] == env.domain1
# simple https: GET, any http version
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_01_02_https_get(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/data.json'
r = curl.http_get(url=url)
r.check_response(http_status=200)
assert r.json['server'] == env.domain1
# simple https: GET, h2 wanted and got
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_01_03_h2_get(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http2'])
r.check_response(http_status=200, protocol='HTTP/2')
assert r.json['server'] == env.domain1
# simple https: GET, h2 unsupported, fallback to h1
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_01_04_h2_unsupported(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'https://{env.domain2}:{env.https_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http2'])
r.check_response(http_status=200, protocol='HTTP/1.1')
assert r.json['server'] == env.domain2
# simple h3: GET, want h3 and get it
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_01_05_h3_get(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.h3_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http3-only'])
r.check_response(http_status=200, protocol='HTTP/3')
assert r.json['server'] == env.domain1
# simple download, check connect/handshake timings
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_01_06_timings(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
r.check_stats(http_status=200, count=1,
remote_port=env.port_for(alpn_proto=proto),
remote_ip='127.0.0.1')
# there are cases where time_connect is reported as 0
assert r.stats[0]['time_connect'] >= 0, f'{r.stats[0]}'
assert r.stats[0]['time_appconnect'] > 0, f'{r.stats[0]}'
# simple https: HEAD
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_01_07_head(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
r = curl.http_download(urls=[url], with_stats=True, with_headers=True,
extra_args=['-I'])
r.check_stats(http_status=200, count=1, exitcode=0,
remote_port=env.port_for(alpn_proto=proto),
remote_ip='127.0.0.1')
# got the Content-Length: header, but did not download anything
assert r.responses[0]['header']['content-length'] == '30', f'{r.responses[0]}'
assert r.stats[0]['size_download'] == 0, f'{r.stats[0]}'
# http: GET for HTTP/2, see Upgrade:, 101 switch
def test_01_08_h2_upgrade(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http2'])
r.check_exit_code(0)
assert len(r.responses) == 2, f'{r.responses}'
assert r.responses[0]['status'] == 101, f'{r.responses[0]}'
assert r.responses[1]['status'] == 200, f'{r.responses[1]}'
assert r.responses[1]['protocol'] == 'HTTP/2', f'{r.responses[1]}'
assert r.json['server'] == env.domain1
# http: GET for HTTP/2 with prior knowledge
def test_01_09_h2_prior_knowledge(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http2-prior-knowledge'])
r.check_exit_code(0)
assert len(r.responses) == 1, f'{r.responses}'
assert r.response['status'] == 200, f'{r.responsw}'
assert r.response['protocol'] == 'HTTP/2', f'{r.response}'
assert r.json['server'] == env.domain1
# http: strip TE header in HTTP/2 requests
def test_01_10_te_strip(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
r = curl.http_get(url=url, extra_args=['--http2', '-H', 'TE: gzip'])
r.check_exit_code(0)
assert len(r.responses) == 1, f'{r.responses}'
assert r.responses[0]['status'] == 200, f'{r.responses[1]}'
assert r.responses[0]['protocol'] == 'HTTP/2', f'{r.responses[1]}'
# http: large response headers
# send 48KB+ sized response headers to check we handle that correctly
# larger than 64KB headers expose a bug in Apache HTTP/2 that is not
# RSTing the stream correctly when its internal limits are exceeded.
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_01_11_large_resp_headers(self, env: Env, httpd, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?x-hd={48 * 1024}'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[])
r.check_exit_code(0)
assert len(r.responses) == 1, f'{r.responses}'
assert r.responses[0]['status'] == 200, f'{r.responses}'
# http: response headers larger than what curl buffers for
@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'),
reason='httpd must be at least 2.4.64')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_01_12_xlarge_resp_headers(self, env: Env, httpd, configures_httpd, proto):
httpd.set_extra_config('base', [
f'H2MaxHeaderBlockLen {130 * 1024}',
])
httpd.reload_if_config_changed()
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?x-hd={128 * 1024}'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[])
r.check_exit_code(0)
assert len(r.responses) == 1, f'{r.responses}'
assert r.responses[0]['status'] == 200, f'{r.responses}'
# http: 1 response header larger than what curl buffers for
@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'),
reason='httpd must be at least 2.4.64')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_01_13_megalarge_resp_headers(self, env: Env, httpd, configures_httpd, proto):
httpd.set_extra_config('base', [
'LogLevel http2:trace2',
f'H2MaxHeaderBlockLen {130 * 1024}',
])
httpd.reload_if_config_changed()
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?x-hd1={128 * 1024}'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[])
if proto == 'h2':
r.check_exit_code(16) # CURLE_HTTP2
else:
r.check_exit_code(100) # CURLE_TOO_LARGE
# http: several response headers, together > 256 KB
# nghttp2 error -905: Too many CONTINUATION frames following a HEADER frame
@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'),
reason='httpd must be at least 2.4.64')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_01_14_gigalarge_resp_headers(self, env: Env, httpd, configures_httpd, proto):
httpd.set_extra_config('base', [
'LogLevel http2:trace2',
f'H2MaxHeaderBlockLen {1024 * 1024}',
])
httpd.reload_if_config_changed()
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?x-hd={256 * 1024}'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[])
if proto == 'h2':
r.check_exit_code(16) # CURLE_HTTP2
else:
r.check_exit_code(0) # 1.1 can do
# http: one response header > 256 KB
@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'),
reason='httpd must be at least 2.4.64')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_01_15_gigalarge_resp_headers(self, env: Env, httpd, configures_httpd, proto):
httpd.set_extra_config('base', [
'LogLevel http2:trace2',
f'H2MaxHeaderBlockLen {1024 * 1024}',
])
httpd.reload_if_config_changed()
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?x-hd1={256 * 1024}'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[])
if proto == 'h2':
r.check_exit_code(16) # CURLE_HTTP2
else:
r.check_exit_code(100) # CURLE_TOO_LARGE
# http: invalid request headers, GET, issue #16998
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_01_16_inv_req_get(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
'-H', "a: a\x0ab"
])
# on h1, request is sent, h2/h3 reject
if proto == 'http/1.1':
r.check_exit_code(0)
else:
r.check_exit_code(43)
# http: special handling of TE request header
@pytest.mark.parametrize("te_in, te_out", [
pytest.param('trailers', 'trailers', id='trailers'),
pytest.param('chunked', None, id='chunked'),
pytest.param('gzip, trailers', 'trailers', id='gzip+trailers'),
pytest.param('gzip ;q=0.2;x="y,x", trailers', 'trailers', id='gzip+q+x+trailers'),
pytest.param('gzip ;x="trailers", chunks', None, id='gzip+x+chunks'),
])
def test_01_17_TE(self, env: Env, httpd, te_in, te_out):
proto = 'h2'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo'
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
with_headers=True,
extra_args=['-H', f'TE: {te_in}'])
r.check_response(200)
if te_out is not None:
assert r.responses[0]['header']['request-te'] == te_out, f'{r.responses[0]}'
else:
assert 'request-te' not in r.responses[0]['header'], f'{r.responses[0]}'
# check that an existing https: connection is not reused for http:
def test_01_18_tls_reuse(self, env: Env, httpd):
proto = 'h2'
curl = CurlClient(env=env)
url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json'
url2 = f'http://{env.authority_for(env.domain1, proto)}/data.json'
r = curl.http_download(urls=[url1, url2], alpn_proto=proto, with_stats=True)
assert len(r.stats) == 2
assert r.total_connects == 2, f'{r.dump_logs()}'
# check that an existing http: connection is not reused for https:
def test_01_19_plain_reuse(self, env: Env, httpd):
proto = 'h2'
curl = CurlClient(env=env)
url1 = f'http://{env.domain1}:{env.http_port}/data.json'
url2 = f'https://{env.domain1}:{env.http_port}/data.json'
r = curl.http_download(urls=[url1, url2], alpn_proto=proto, with_stats=True)
assert len(r.stats) == 2
assert r.total_connects == 2, f'{r.dump_logs()}'
+803
View File
@@ -0,0 +1,803 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import difflib
import filecmp
import logging
import math
import os
import re
import sys
from datetime import timedelta
import pytest
from testenv import Env, CurlClient, LocalClient
log = logging.getLogger(__name__)
class TestDownload:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd):
indir = httpd.docs_dir
env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
env.make_data_file(indir=indir, fname="data-10m", fsize=10*1024*1024)
env.make_data_file(indir=indir, fname="data-50m", fsize=50*1024*1024)
env.make_data_gzipbomb(indir=indir, fname="bomb-100m.txt", fsize=100*1024*1024)
# download 1 file
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_01_download_1(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(http_status=200)
# download 2 files
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_02_download_2(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(http_status=200, count=2)
# download 100 files sequentially
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_03_download_sequential(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if (proto == 'http/1.1' or proto == 'h2') and env.curl_uses_lib('mbedtls') and \
sys.platform.startswith('darwin') and env.ci_run:
pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')
count = 10
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(http_status=200, count=count, connect_count=1)
# download 100 files parallel
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_04_download_parallel(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h2' and env.curl_uses_lib('mbedtls') and \
sys.platform.startswith('darwin') and env.ci_run:
pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')
count = 10
max_parallel = 5
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel', '--parallel-max', f'{max_parallel}'
])
r.check_response(http_status=200, count=count)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
assert r.total_connects > 1, r.dump_logs()
else:
# http2 parallel transfers will use one connection (common limit is 100)
assert r.total_connects == 1, r.dump_logs()
# download 500 files sequential
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_05_download_many_sequential(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h2' and env.curl_uses_lib('mbedtls') and \
sys.platform.startswith('darwin') and env.ci_run:
pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')
count = 200
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(http_status=200, count=count)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
assert r.total_connects > 1, r.dump_logs()
else:
# http2 parallel transfers will use one connection (common limit is 100)
assert r.total_connects == 1, r.dump_logs()
# download 500 files parallel
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_02_06_download_many_parallel(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h2' and env.curl_uses_lib('mbedtls') and \
sys.platform.startswith('darwin') and env.ci_run:
pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')
count = 200
max_parallel = 50
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[000-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel', '--parallel-max', f'{max_parallel}'
])
r.check_response(http_status=200, count=count, connect_count=1)
# download files parallel, check connection reuse/multiplex
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_02_07_download_reuse(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 200
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto,
with_stats=True, extra_args=[
'--parallel', '--parallel-max', '200'
])
r.check_response(http_status=200, count=count)
# should have used at most 2 connections only (test servers allow 100 req/conn)
# it may be just 1 on slow systems where request are answered faster than
# curl can exhaust the capacity or if curl runs with address-sanitizer speed
assert r.total_connects <= 2, "h2 should use fewer connections here"
# download files parallel with http/1.1, check connection not reused
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_02_07b_download_reuse(self, env: Env, httpd, nghttpx, proto):
count = 6
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto,
with_stats=True, extra_args=[
'--parallel'
])
r.check_response(count=count, http_status=200)
# http/1.1 should have used count connections
assert r.total_connects == count, "http/1.1 should use this many connections"
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_08_1MB_serial(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
urln = f'https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_02_09_1MB_parallel(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
urln = f'https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
r.check_response(count=count, http_status=200)
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_10_10MB_serial(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 3
urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200)
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_02_11_10MB_parallel(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 3
urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_02_12_head_serial_https(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--head'
])
r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("proto", ['h2'])
def test_02_13_head_serial_h2c(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
urln = f'http://{env.domain1}:{env.http_port}/data-10m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--head', '--http2-prior-knowledge', '--fail-early'
])
r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_02_14_not_found(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
urln = f'https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
r.check_stats(count=count, http_status=404, exitcode=0,
remote_port=env.port_for(alpn_proto=proto),
remote_ip='127.0.0.1')
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_02_15_fail_not_found(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
urln = f'https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--fail'
])
r.check_stats(count=count, http_status=404, exitcode=22,
remote_port=env.port_for(alpn_proto=proto),
remote_ip='127.0.0.1')
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
def test_02_20_h2_small_frames(self, env: Env, httpd, configures_httpd):
# Test case to reproduce content corruption as observed in
# https://github.com/curl/curl/issues/10525
# To reliably reproduce, we need an Apache httpd that supports
# setting smaller frame sizes. This is not released yet, we
# test if it works and back out if not.
httpd.set_extra_config(env.domain1, lines=[
'H2MaxDataFrameLen 1024',
])
if not httpd.reload_if_config_changed():
pytest.skip('H2MaxDataFrameLen not supported')
# ok, make 100 downloads with 2 parallel running and they
# are expected to stumble into the issue when using `lib/http2.c`
# from curl 7.88.0
count = 5
urln = f'https://{env.authority_for(env.domain1, "h2")}/data-1m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto="h2", extra_args=[
'--parallel', '--parallel-max', '2'
])
r.check_response(count=count, http_status=200)
srcfile = os.path.join(httpd.docs_dir, 'data-1m')
self.check_downloads(curl, srcfile, count)
# download serial via lib client, pause/resume at different offsets
@pytest.mark.parametrize("pause_offset", [0, 10*1024, 100*1023, 640000])
@pytest.mark.parametrize("proto", ['http/1.1', 'h3'])
def test_02_21_lib_serial(self, env: Env, httpd, nghttpx, proto, pause_offset):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
docname = 'data-10m'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-P', f'{pause_offset}', '-V', proto, url
])
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
# h2 download parallel via lib client, pause/resume at different offsets
# debug-override stream window size to reproduce #16955
@pytest.mark.parametrize("pause_offset", [0, 10*1024, 100*1023, 640000])
@pytest.mark.parametrize("swin_max", [0, 10*1024])
def test_02_21_h2_lib_serial(self, env: Env, httpd, pause_offset, swin_max):
proto = 'h2'
count = 2
docname = 'data-10m'
url = f'https://localhost:{env.https_port}/{docname}'
run_env = os.environ.copy()
run_env['CURL_DEBUG'] = 'multi,http/2'
if swin_max > 0:
run_env['CURL_H2_STREAM_WIN_MAX'] = f'{swin_max}'
client = LocalClient(name='cli_hx_download', env=env, run_env=run_env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-P', f'{pause_offset}', '-V', proto, url
])
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
# download via lib client, several at a time, pause/resume
@pytest.mark.parametrize("pause_offset", [100*1023])
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_22_lib_parallel_resume(self, env: Env, httpd, nghttpx, proto, pause_offset):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
max_parallel = 5
docname = 'data-10m'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-m', f'{max_parallel}',
'-P', f'{pause_offset}', '-V', proto, url
])
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
# download, several at a time, pause and abort paused
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_23a_lib_abort_paused(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_ossl_quic():
pytest.skip('OpenSSL QUIC fails here')
if proto == 'h3' and env.ci_run and env.curl_uses_lib('quiche'):
pytest.skip("fails in CI, but works locally for unknown reasons")
count = 10
max_parallel = 5
if proto in ['h2', 'h3']:
pause_offset = 64 * 1024
else:
pause_offset = 12 * 1024
docname = 'data-1m'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-m', f'{max_parallel}', '-a',
'-P', f'{pause_offset}', '-V', proto, url
])
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
# downloads should be there, but not necessarily complete
self.check_downloads(client, srcfile, count, complete=False)
# download, several at a time, abort after n bytes
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_23b_lib_abort_offset(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_ossl_quic():
pytest.skip('OpenSSL QUIC fails here')
if proto == 'h3' and env.ci_run and env.curl_uses_lib('quiche'):
pytest.skip("fails in CI, but works locally for unknown reasons")
count = 10
max_parallel = 5
if proto in ['h2', 'h3']:
abort_offset = 64 * 1024
else:
abort_offset = 12 * 1024
docname = 'data-1m'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-m', f'{max_parallel}', '-a',
'-A', f'{abort_offset}', '-V', proto, url
])
r.check_exit_code(42) # CURLE_ABORTED_BY_CALLBACK
srcfile = os.path.join(httpd.docs_dir, docname)
# downloads should be there, but not necessarily complete
self.check_downloads(client, srcfile, count, complete=False)
# download, several at a time, abort after n bytes
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_23c_lib_fail_offset(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_ossl_quic():
pytest.skip('OpenSSL QUIC fails here')
if proto == 'h3' and env.ci_run and env.curl_uses_lib('quiche'):
pytest.skip("fails in CI, but works locally for unknown reasons")
count = 10
max_parallel = 5
if proto in ['h2', 'h3']:
fail_offset = 64 * 1024
else:
fail_offset = 12 * 1024
docname = 'data-1m'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-m', f'{max_parallel}', '-a',
'-F', f'{fail_offset}', '-V', proto, url
])
r.check_exit_code(23) # CURLE_WRITE_ERROR
srcfile = os.path.join(httpd.docs_dir, docname)
# downloads should be there, but not necessarily complete
self.check_downloads(client, srcfile, count, complete=False)
# speed limited download
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_24_speed_limit(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
url = f'https://{env.authority_for(env.domain1, proto)}/data-1m'
curl = CurlClient(env=env)
speed_limit = 384 * 1024
min_duration = math.floor((1024 * 1024)/speed_limit)
r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
'--limit-rate', f'{speed_limit}'
])
r.check_response(count=count, http_status=200)
assert r.duration > timedelta(seconds=min_duration), \
f'rate limited transfer should take more than {min_duration}s, '\
f'not {r.duration}'
# make extreme parallel h2 upgrades, check invalid conn reuse
# before protocol switch has happened
def test_02_25_h2_upgrade_x(self, env: Env, httpd):
url = f'http://localhost:{env.http_port}/data-100k'
client = LocalClient(name='cli_h2_upgrade_extreme', env=env, timeout=15)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[url])
assert r.exit_code == 0, f'{client.dump_logs()}'
# Special client that tests TLS session reuse in parallel transfers
# TODO: just uses a single connection for h2/h3. Not sure how to prevent that
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_26_session_shared_reuse(self, env: Env, proto, httpd, nghttpx):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
url = f'https://{env.authority_for(env.domain1, proto)}/data-100k'
client = LocalClient(name='cli_tls_session_reuse', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[url, proto])
r.check_exit_code(0)
# test on paused transfers, based on issue #11982
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_27a_paused_no_cl(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
url = f'https://{env.authority_for(env.domain1, proto)}' \
'/curltest/tweak/?&chunks=6&chunk_size=8000'
client = LocalClient(env=env, name='cli_h2_pausing')
r = client.run(args=['-V', proto, url])
r.check_exit_code(0)
# test on paused transfers, based on issue #11982
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_27b_paused_no_cl(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
url = f'https://{env.authority_for(env.domain1, proto)}' \
'/curltest/tweak/?error=502'
client = LocalClient(env=env, name='cli_h2_pausing')
r = client.run(args=['-V', proto, url])
r.check_exit_code(0)
# test on paused transfers, based on issue #11982
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_27c_paused_no_cl(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
url = f'https://{env.authority_for(env.domain1, proto)}' \
'/curltest/tweak/?status=200&chunks=1&chunk_size=100'
client = LocalClient(env=env, name='cli_h2_pausing')
r = client.run(args=['-V', proto, url])
r.check_exit_code(0)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_28_get_compressed(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
urln = f'https://{env.authority_for(env.domain1brotli, proto)}/data-100k?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--compressed'
])
r.check_exit_code(code=0)
r.check_response(count=count, http_status=200)
def check_downloads(self, client, srcfile: str, count: int,
complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dfile).readlines(),
fromfile=srcfile,
tofile=dfile,
n=1))
assert False, f'download {dfile} differs:\n{diff}'
# download via lib client, 1 at a time, pause/resume at different offsets
@pytest.mark.parametrize("pause_offset", [0, 10*1024, 100*1023, 640000])
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_29_h2_lib_serial(self, env: Env, httpd, nghttpx, proto, pause_offset):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
docname = 'data-10m'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-P', f'{pause_offset}', '-V', proto, url
])
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
# download parallel with prior knowledge
def test_02_30_parallel_prior_knowledge(self, env: Env, httpd):
count = 3
curl = CurlClient(env=env)
urln = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], extra_args=[
'--parallel', '--http2-prior-knowledge'
])
r.check_response(http_status=200, count=count)
assert r.total_connects == 1, r.dump_logs()
# download parallel with h2 "Upgrade:"
def test_02_31_parallel_upgrade(self, env: Env, httpd, nghttpx):
count = 3
curl = CurlClient(env=env)
urln = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], extra_args=[
'--parallel', '--http2'
])
r.check_response(http_status=200, count=count)
# we see up to 3 connections, because Apache wants to serve only a single
# request via Upgrade: and then closes the connection. But if a new
# request comes in time, it might still get served.
assert r.total_connects <= 3, r.dump_logs()
# nghttpx is the only server we have that supports TLS early data
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_32_earlydata(self, env: Env, httpd, nghttpx, proto):
if not env.curl_can_early_data():
pytest.skip('TLS earlydata not implemented')
if proto == 'h3' and \
(not env.have_h3() or not env.curl_can_h3_early_data()):
pytest.skip("h3 not supported")
if proto != 'h3' and sys.platform.startswith('darwin') and env.ci_run:
pytest.skip('failing on macOS CI runners')
if proto == 'h3' and sys.platform.startswith('darwin') and env.curl_uses_lib('wolfssl'):
pytest.skip('h3 wolfssl early data failing on macOS')
if proto == 'h3' and sys.platform.startswith('darwin') and env.curl_uses_lib('gnutls'):
pytest.skip('h3 gnutls early data failing on macOS')
count = 2
docname = 'data-10k'
# we want this test to always connect to nghttpx, since it is
# the only server we have that supports TLS earlydata
port = env.port_for(proto)
if proto != 'h3':
port = env.nghttpx_https_port
url = f'https://{env.domain1}:{port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}',
'-e', # use TLS earlydata
'-f', # forbid reuse of connections
'-r', f'{env.domain1}:{port}:127.0.0.1',
'-V', proto, url
])
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
# check that TLS earlydata worked as expected
earlydata = {}
reused_session = False
for line in r.trace_lines:
m = re.match(r'^\[t-(\d+)] EarlyData: (-?\d+)', line)
if m:
earlydata[int(m.group(1))] = int(m.group(2))
continue
if re.match(r'\[1-1] \* SSL reusing session.*', line):
reused_session = True
assert reused_session, 'session was not reused for 2nd transfer'
assert earlydata[0] == 0, f'{earlydata}'
if proto == 'http/1.1':
assert earlydata[1] == 111, f'{earlydata}'
elif proto == 'h2':
assert earlydata[1] == 127, f'{earlydata}'
elif proto == 'h3':
assert earlydata[1] == 109, f'{earlydata}'
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
@pytest.mark.parametrize("max_host_conns", [0, 1, 5])
def test_02_33_max_host_conns(self, env: Env, httpd, nghttpx, proto, max_host_conns):
if not env.curl_is_debug():
pytest.skip('only works for curl debug builds')
if not env.curl_is_verbose():
pytest.skip('only works for curl with verbose strings')
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
max_parallel = 50
docname = 'data-10k'
port = env.port_for(proto)
url = f'https://{env.domain1}:{port}/{docname}'
run_env = os.environ.copy()
run_env['CURL_DEBUG'] = 'multi'
client = LocalClient(name='cli_hx_download', env=env, run_env=run_env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}',
'-m', f'{max_parallel}',
'-x', # always use a fresh connection
'-M', str(max_host_conns), # limit conns per host
'-r', f'{env.domain1}:{port}:127.0.0.1',
'-V', proto, url
])
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
if max_host_conns > 0:
matched_lines = 0
for line in r.trace_lines:
m = re.match(r'.*The cache now contains (\d+) members.*', line)
if m:
matched_lines += 1
n = int(m.group(1))
assert n <= max_host_conns
assert matched_lines > 0
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
@pytest.mark.parametrize("max_total_conns", [0, 1, 5])
def test_02_34_max_total_conns(self, env: Env, httpd, nghttpx, proto, max_total_conns):
if not env.curl_is_debug():
pytest.skip('only works for curl debug builds')
if not env.curl_is_verbose():
pytest.skip('only works for curl with verbose strings')
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
max_parallel = 50
docname = 'data-10k'
port = env.port_for(proto)
url = f'https://{env.domain1}:{port}/{docname}'
run_env = os.environ.copy()
run_env['CURL_DEBUG'] = 'multi'
client = LocalClient(name='cli_hx_download', env=env, run_env=run_env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}',
'-m', f'{max_parallel}',
'-x', # always use a fresh connection
'-T', str(max_total_conns), # limit total connections
'-r', f'{env.domain1}:{port}:127.0.0.1',
'-V', proto, url
])
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
if max_total_conns > 0:
matched_lines = 0
for line in r.trace_lines:
m = re.match(r'.*The cache now contains (\d+) members.*', line)
if m:
matched_lines += 1
n = int(m.group(1))
assert n <= max_total_conns
assert matched_lines > 0
# 2 parallel transers, pause and resume. Load a 100 MB zip bomb from
# the server with "Content-Encoding: gzip" that gets exloded during
# response writing to the client. Client pauses after 1MB unzipped data
# and causes buffers to fill while the server sends more response
# data.
# * http/1.1: not much buffering is done as curl does no longer
# serve the connections that are paused
# * h2/h3: server continues sending what the stream window allows and
# since the one connection involved unpaused transfers, data continues
# to be received, requiring buffering.
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_35_pause_bomb(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
pause_offset = 1024 * 1024
docname = 'bomb-100m.txt.var'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-m', f'{count}',
'-P', f'{pause_offset}', '-V', proto, url
])
r.check_exit_code(0)
# download with looong urls
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@pytest.mark.parametrize("url_junk", [1024, 16*1024, 32*1024, 64*1024, 80*1024, 96*1024])
def test_02_36_looong_urls(self, env: Env, httpd, nghttpx, proto, url_junk):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('quiche'):
pytest.skip("quiche fails from 16k onwards")
curl = CurlClient(env=env)
# url is longer than 'url_len'
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?{"x"*(url_junk)}'
r = curl.http_download(urls=[url], alpn_proto=proto)
if url_junk <= 1024:
r.check_exit_code(0)
r.check_response(http_status=200)
elif url_junk <= 16*1024:
r.check_exit_code(0)
# server replies with 414, Request URL too long
r.check_response(http_status=414)
elif url_junk <= 32*1024:
r.check_exit_code(0)
# server replies with 414, Request URL too long
r.check_response(http_status=414)
else:
# with urls larger than 64k, behaviour differs
if proto == 'http/1.1':
r.check_exit_code(0)
r.check_response(http_status=414)
elif proto == 'h2':
# h2 is unable to send such large headers (frame limits)
r.check_exit_code(55)
elif proto == 'h3':
if url_junk <= 64*1024:
r.check_exit_code(0)
# nghttpx reports 431 Request Header Field too Large
r.check_response(http_status=431)
else:
# nghttpx destroys the connection with internal error
# ERR_QPACK_HEADER_TOO_LARGE
r.check_exit_code(56)
+139
View File
@@ -0,0 +1,139 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import time
from datetime import timedelta
from threading import Thread
import pytest
from testenv import Env, CurlClient, ExecResult
log = logging.getLogger(__name__)
class TestGoAway:
# download files sequentially with delay, reload server for GOAWAY
def test_03_01_h2_goaway(self, env: Env, httpd, nghttpx):
proto = 'h2'
count = 3
self.r = None
def long_run():
curl = CurlClient(env=env)
# send 10 chunks of 1024 bytes in a response body with 100ms delay in between
urln = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=10&chunk_size=1024&chunk_delay=100ms'
self.r = curl.http_download(urls=[urln], alpn_proto=proto)
t = Thread(target=long_run)
t.start()
# each request will take a second, reload the server in the middle
# of the first one.
time.sleep(1.5)
assert httpd.reload()
t.join()
r: ExecResult = self.r
r.check_response(count=count, http_status=200)
# reload will shut down the connection gracefully with GOAWAY
# we expect to see a second connection opened afterwards
assert r.total_connects == 2
for idx, s in enumerate(r.stats):
if s['num_connects'] > 0:
log.debug(f'request {idx} connected')
# this should take `count` seconds to retrieve
assert r.duration >= timedelta(seconds=count)
# download files sequentially with delay, reload server for GOAWAY
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_03_02_h3_goaway(self, env: Env, httpd, nghttpx):
proto = 'h3'
if proto == 'h3' and env.curl_uses_ossl_quic():
pytest.skip('OpenSSL QUIC fails here')
count = 3
self.r = None
def long_run():
curl = CurlClient(env=env)
# send 10 chunks of 1024 bytes in a response body with 100ms delay in between
urln = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=10&chunk_size=1024&chunk_delay=100ms'
self.r = curl.http_download(urls=[urln], alpn_proto=proto)
t = Thread(target=long_run)
t.start()
# each request will take a second, reload the server in the middle
# of the first one.
time.sleep(1.5)
assert nghttpx.reload(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))
t.join()
r: ExecResult = self.r
# this should take `count` seconds to retrieve, maybe a little less
assert r.duration >= timedelta(seconds=count-1)
r.check_response(count=count, http_status=200, connect_count=2)
# reload will shut down the connection gracefully with GOAWAY
# we expect to see a second connection opened afterwards
for idx, s in enumerate(r.stats):
if s['num_connects'] > 0:
log.debug(f'request {idx} connected')
# download files sequentially with delay, reload server for GOAWAY
def test_03_03_h1_goaway(self, env: Env, httpd, nghttpx):
proto = 'http/1.1'
count = 3
self.r = None
def long_run():
curl = CurlClient(env=env)
# send 10 chunks of 1024 bytes in a response body with 100ms delay in between
# pause 2 seconds between requests
urln = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=10&chunk_size=1024&chunk_delay=100ms'
self.r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--rate', '30/m',
])
t = Thread(target=long_run)
t.start()
# each request will take a second, reload the server in the middle
# of the first one.
time.sleep(1.5)
assert httpd.reload()
t.join()
r: ExecResult = self.r
r.check_response(count=count, http_status=200, connect_count=2)
# reload will shut down the connection gracefully
# we expect to see a second connection opened afterwards
for idx, s in enumerate(r.stats):
if s['num_connects'] > 0:
log.debug(f'request {idx} connected')
# this should take `count` seconds to retrieve
assert r.duration >= timedelta(seconds=count)
+135
View File
@@ -0,0 +1,135 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
from typing import Tuple, List, Dict
import pytest
from testenv import Env, CurlClient
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
class TestStuttered:
# download 1 file, check that delayed response works in general
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_04_01_download_1(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=100&chunk_size=100&chunk_delay=10ms'
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=1, http_status=200)
# download 50 files in 100 chunks a 100 bytes with 10ms delay between
# prepend 100 file requests to warm up connection processing limits
# (Apache2 increases # of parallel processed requests after successes)
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_04_02_100_100_10(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
warmups = 100
curl = CurlClient(env=env)
url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]'
urln = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?id=[0-{count-1}]'\
'&chunks=100&chunk_size=100&chunk_delay=10ms'
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
extra_args=['--parallel'])
r.check_response(count=warmups+count, http_status=200)
assert r.total_connects == 1
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
if t_max < (5 * t_min) and t_min < 2:
log.warning(f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]')
# download 50 files in 1000 chunks a 10 bytes with 1ms delay between
# prepend 100 file requests to warm up connection processing limits
# (Apache2 increases # of parallel processed requests after successes)
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_04_03_1000_10_1(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
warmups = 100
curl = CurlClient(env=env)
url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]'
urln = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=1000&chunk_size=10&chunk_delay=100us'
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
extra_args=['--parallel'])
r.check_response(count=warmups+count, http_status=200)
assert r.total_connects == 1
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
if t_max < (5 * t_min):
log.warning(f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]')
# download 50 files in 10000 chunks a 1 byte with 10us delay between
# prepend 100 file requests to warm up connection processing limits
# (Apache2 increases # of parallel processed requests after successes)
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_04_04_1000_10_1(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
warmups = 100
curl = CurlClient(env=env)
url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]'
urln = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=10000&chunk_size=1&chunk_delay=50us'
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
extra_args=['--parallel'])
r.check_response(count=warmups+count, http_status=200)
assert r.total_connects == 1
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
if t_max < (5 * t_min):
log.warning(f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]')
def stats_spread(self, stats: List[Dict], key: str) -> Tuple[float, int, float, int, float]:
stotals = 0.0
s_min = 100.0
i_min = -1
s_max = 0.0
i_max = -1
for idx, s in enumerate(stats):
val = float(s[key])
stotals += val
if val > s_max:
s_max = val
i_max = idx
if val < s_min:
s_min = val
i_min = idx
return stotals/len(stats), i_min, s_min, i_max, s_max
+125
View File
@@ -0,0 +1,125 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import pytest
from testenv import Env, CurlClient
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.55'),
reason=f"httpd version too old for this: {Env.httpd_version()}")
class TestErrors:
# download 1 file, check that we get CURLE_PARTIAL_FILE
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_05_01_partial_1(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=3&chunk_size=16000&body_error=reset'
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--retry', '0'
])
r.check_exit_code(False)
invalid_stats = []
for idx, s in enumerate(r.stats):
if 'exitcode' not in s or s['exitcode'] not in [18, 56, 92, 95]:
invalid_stats.append(f'request {idx} exit with {s["exitcode"]}')
assert len(invalid_stats) == 0, f'failed: {invalid_stats}'
# download files, check that we get CURLE_PARTIAL_FILE for all
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_05_02_partial_20(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_ossl_quic():
pytest.skip("openssl-quic is flaky in yielding proper error codes")
count = 20
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}' \
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=5&chunk_size=16000&body_error=reset'
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--retry', '0', '--parallel',
])
r.check_exit_code(False)
assert len(r.stats) == count, f'did not get all stats: {r}'
invalid_stats = []
for idx, s in enumerate(r.stats):
if 'exitcode' not in s or s['exitcode'] not in [18, 55, 56, 92, 95]:
invalid_stats.append(f'request {idx} exit with {s["exitcode"]}\n{s}')
assert len(invalid_stats) == 0, f'failed: {invalid_stats}'
# access a resource that, on h2, RST the stream with HTTP_1_1_REQUIRED
def test_05_03_required(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
proto = 'http/1.1'
urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/1_1'
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_exit_code(0)
r.check_response(http_status=200, count=1)
proto = 'h2'
urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/1_1'
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_exit_code(0)
r.check_response(http_status=200, count=1)
# check that we did a downgrade
assert r.stats[0]['http_version'] == '1.1', r.dump_logs()
# On the URL used here, Apache is doing an "unclean" TLS shutdown,
# meaning it sends no shutdown notice and just closes TCP.
# The HTTP response delivers a body without Content-Length. We expect:
# - http/1.0 to fail since it relies on a clean connection close to
# detect the end of the body
# - http/1.1 to work since it will used "chunked" transfer encoding
# and stop receiving when that signals the end
# - h2 to work since it will signal the end of the response before
# and not see the "unclean" close either
@pytest.mark.parametrize("proto", ['http/1.0', 'http/1.1', 'h2'])
def test_05_04_unclean_tls_shutdown(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 10 if proto == 'h2' else 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}'\
f'/curltest/shutdown_unclean?id=[0-{count-1}]&chunks=4'
r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
'--parallel', '--trace-config', 'ssl'
])
if proto == 'http/1.0':
# we are inconsistent if we fail or not in missing TLS shutdown
# openssl code ignore such errors intentionally in non-debug builds
r.check_exit_code(56)
else:
r.check_exit_code(0)
r.check_response(http_status=200, count=count)
+135
View File
@@ -0,0 +1,135 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import re
import pytest
from testenv import Env, CurlClient
log = logging.getLogger(__name__)
class TestEyeballs:
# download using only HTTP/3 on working server
@pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support")
def test_06_01_h3_only(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
r.check_response(count=1, http_status=200)
assert r.stats[0]['http_version'] == '3'
# download using only HTTP/3 on missing server
@pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support")
def test_06_02_h3_only(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{env.https_only_tcp_port}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
r.check_response(exitcode=7, http_status=None)
# download using HTTP/3 on missing server with fallback on h2
@pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support")
def test_06_03_h3_fallback_h2(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{env.https_only_tcp_port}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3'])
r.check_response(count=1, http_status=200)
assert r.stats[0]['http_version'] == '2'
# download using HTTP/3 on missing server with fallback on http/1.1
@pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support")
def test_06_04_h3_fallback_h1(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
urln = f'https://{env.domain2}:{env.https_only_tcp_port}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3'])
r.check_response(count=1, http_status=200)
assert r.stats[0]['http_version'] == '1.1'
# make a successful https: transfer and observer the timer stats
def test_06_10_stats_success(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
r = curl.http_download(urls=[urln])
r.check_response(count=1, http_status=200)
assert r.stats[0]['time_connect'] > 0.0
assert r.stats[0]['time_appconnect'] > 0.0
# make https: to a hostname that tcp connects, but will not verify
def test_06_11_stats_fail_verify(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
urln = f'https://not-valid.com:{env.https_port}/data.json'
r = curl.http_download(urls=[urln], extra_args=[
'--resolve', f'not-valid.com:{env.https_port}:127.0.0.1'
])
r.check_response(count=1, http_status=0, exitcode=False)
assert r.stats[0]['time_connect'] > 0.0 # was tcp connected
assert r.stats[0]['time_appconnect'] == 0 # but not SSL verified
# make https: to an invalid address
def test_06_12_stats_fail_tcp(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
urln = 'https://not-valid.com:1/data.json'
r = curl.http_download(urls=[urln], extra_args=[
'--resolve', f'not-valid.com:{1}:127.0.0.1'
])
r.check_response(count=1, http_status=None, exitcode=False)
assert r.stats[0]['time_connect'] == 0 # no one should have listened
assert r.stats[0]['time_appconnect'] == 0 # did not happen either
# check timers when trying 3 unresponsive addresses
@pytest.mark.skipif(condition=not Env.curl_has_feature('IPv6'),
reason='curl lacks ipv6 support')
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_06_13_timers(self, env: Env):
curl = CurlClient(env=env)
# ipv6 0100::/64 is supposed to go into the void (rfc6666)
r = curl.http_download(urls=['https://xxx.invalid/'], extra_args=[
'--resolve', 'xxx.invalid:443:0100::1,0100::2,0100::3',
'--connect-timeout', '1',
'--happy-eyeballs-timeout-ms', '123',
'--trace-config', 'timer,happy-eyeballs,tcp'
])
r.check_response(count=1, http_status=None, exitcode=False)
assert r.stats[0]['time_connect'] == 0 # no one connected
# check that we indeed started attempts on all 3 addresses
tcp_attempts = [line for line in r.trace_lines
if re.match(r'.*Trying \[100::[123]]:443', line)]
assert len(tcp_attempts) == 3, f'fond: {"".join(tcp_attempts)}\n{r.dump_logs()}'
# if the 0100::/64 really goes into the void, we should see 2 HAPPY_EYEBALLS
# timeouts being set here
failed_attempts = [line for line in r.trace_lines
if re.match(r'.*checked connect attempts: 0 ongoing', line)]
if len(failed_attempts):
# github CI fails right away with "Network is unreachable", slackers...
assert len(failed_attempts) == 3, f'found: {"".join(failed_attempts)}\n{r.dump_logs()}'
else:
# no immediately failed attempts, as should be
he_timers_set = [line for line in r.trace_lines
if re.match(r'.*\[TIMER] \[HAPPY_EYEBALLS] set for', line)]
assert len(he_timers_set) == 2, f'found: {"".join(he_timers_set)}\n{r.dump_logs()}'
+761
View File
@@ -0,0 +1,761 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import difflib
import filecmp
import logging
import os
import re
import sys
import pytest
from typing import List, Union
from testenv import Env, CurlClient, LocalClient, ExecResult
log = logging.getLogger(__name__)
class TestUpload:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd, nghttpx):
env.make_data_file(indir=env.gen_dir, fname="data-10k", fsize=10*1024)
env.make_data_file(indir=env.gen_dir, fname="data-63k", fsize=63*1024)
env.make_data_file(indir=env.gen_dir, fname="data-64k", fsize=64*1024)
env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
env.make_data_file(indir=env.gen_dir, fname="data-1m+", fsize=(1024*1024)+1)
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
# upload small data, check that this is what was echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_01_upload_1_small(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
data = '0123456789'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
r.check_stats(count=1, http_status=200, exitcode=0)
respdata = open(curl.response_file(0)).readlines()
assert respdata == [data]
# upload large data, check that this is what was echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_02_upload_1_large(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-100k')
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
r.check_stats(count=1, http_status=200, exitcode=0)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
# upload data sequentially, check that they were echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_10_upload_sequential(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 20
data = '0123456789'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# upload data parallel, check that they were echoed
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_07_11_upload_parallel(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
# limit since we use a separate connection in h1
count = 20
data = '0123456789'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
extra_args=['--parallel'])
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# upload large data sequentially, check that this is what was echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_12_upload_seq_large(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-100k')
count = 10
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
r.check_response(count=count, http_status=200)
indata = open(fdata).readlines()
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == indata
# upload very large data sequentially, check that this is what was echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_13_upload_seq_large(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-10m')
count = 2
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
r.check_stats(count=count, http_status=200, exitcode=0)
indata = open(fdata).readlines()
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == indata
# upload from stdin, issue #14870
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@pytest.mark.parametrize("indata", [
'', '1', '123\n456andsomething\n\n'
])
def test_07_14_upload_stdin(self, env: Env, httpd, nghttpx, proto, indata):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
r = curl.http_put(urls=[url], data=indata, alpn_proto=proto)
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [f'{len(indata)}']
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_15_hx_put(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
upload_size = 128*1024
url = f'https://localhost:{env.https_port}/curltest/put'
client = LocalClient(name='cli_hx_upload', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-S', f'{upload_size}', '-V', proto, url
])
r.check_exit_code(0)
self.check_downloads(client, r, [f"{upload_size}"], count)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_16_hx_put_reuse(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
upload_size = 128*1024
url = f'https://localhost:{env.https_port}/curltest/put'
client = LocalClient(name='cli_hx_upload', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-S', f'{upload_size}', '-R', '-V', proto, url
])
r.check_exit_code(0)
self.check_downloads(client, r, [f"{upload_size}"], count)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_17_hx_post_reuse(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
upload_size = 128*1024
url = f'https://localhost:{env.https_port}/curltest/echo'
client = LocalClient(name='cli_hx_upload', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-M', 'POST', '-S', f'{upload_size}', '-R', '-V', proto, url
])
r.check_exit_code(0)
self.check_downloads(client, r, ["x" * upload_size], count)
# upload data parallel, check that they were echoed
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_07_20_upload_parallel(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
# limit since we use a separate connection in h1
count = 10
data = '0123456789'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
extra_args=['--parallel'])
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# upload large data parallel, check that this is what was echoed
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_07_21_upload_parallel_large(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-100k')
# limit since we use a separate connection in h1
count = 10
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
extra_args=['--parallel'])
r.check_response(count=count, http_status=200)
self.check_download(r, count, fdata, curl)
# upload large data parallel to a URL that denies uploads
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_07_22_upload_parallel_fail(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-10m')
count = 20
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}'\
f'/curltest/tweak?status=400&delay=5ms&chunks=1&body_error=reset&id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
extra_args=['--parallel'])
# depending on timing and protocol, we might get CURLE_PARTIAL_FILE or
# CURLE_SEND_ERROR or CURLE_HTTP3 or CURLE_HTTP2_STREAM
r.check_stats(count=count, exitcode=[18, 55, 56, 92, 95])
# PUT 100k
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_30_put_100k(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-100k')
count = 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
extra_args=['--parallel'])
r.check_stats(count=count, http_status=200, exitcode=0)
exp_data = [f'{os.path.getsize(fdata)}']
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
# PUT 10m
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_31_put_10m(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-10m')
count = 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]&chunk_delay=2ms'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
extra_args=['--parallel'])
r.check_stats(count=count, http_status=200, exitcode=0)
exp_data = [f'{os.path.getsize(fdata)}']
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
# issue #10591
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_32_issue_10591(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-10m')
count = 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto)
r.check_stats(count=count, http_status=200, exitcode=0)
# issue #11157, upload that is 404'ed by server, needs to terminate
# correctly and not time out on sending
def test_07_33_issue_11157a(self, env: Env, httpd, nghttpx):
proto = 'h2'
fdata = os.path.join(env.gen_dir, 'data-10m')
# send a POST to our PUT handler which will send immediately a 404 back
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put'
curl = CurlClient(env=env)
r = curl.run_direct(with_stats=True, args=[
'--resolve', f'{env.authority_for(env.domain1, proto)}:127.0.0.1',
'--cacert', env.ca.cert_file,
'--request', 'POST',
'--max-time', '5', '-v',
'--url', url,
'--form', 'idList=12345678',
'--form', 'pos=top',
'--form', 'name=mr_test',
'--form', f'fileSource=@{fdata};type=application/pdf',
])
assert r.exit_code == 0, f'{r}'
r.check_stats(1, 404)
# issue #11157, send upload that is slowly read in
def test_07_33_issue_11157b(self, env: Env, httpd, nghttpx):
proto = 'h2'
fdata = os.path.join(env.gen_dir, 'data-10m')
# tell our test PUT handler to read the upload more slowly, so
# that the send buffering and transfer loop needs to wait
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?chunk_delay=2ms'
curl = CurlClient(env=env)
r = curl.run_direct(with_stats=True, args=[
'--verbose', '--trace-config', 'ids,time',
'--resolve', f'{env.authority_for(env.domain1, proto)}:127.0.0.1',
'--cacert', env.ca.cert_file,
'--request', 'PUT',
'--max-time', '10', '-v',
'--url', url,
'--form', 'idList=12345678',
'--form', 'pos=top',
'--form', 'name=mr_test',
'--form', f'fileSource=@{fdata};type=application/pdf',
])
assert r.exit_code == 0, r.dump_logs()
r.check_stats(1, 200)
def test_07_34_issue_11194(self, env: Env, httpd, nghttpx):
proto = 'h2'
# tell our test PUT handler to read the upload more slowly, so
# that the send buffering and transfer loop needs to wait
fdata = os.path.join(env.gen_dir, 'data-100k')
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put'
curl = CurlClient(env=env)
r = curl.run_direct(with_stats=True, args=[
'--verbose', '--trace-config', 'ids,time',
'--resolve', f'{env.authority_for(env.domain1, proto)}:127.0.0.1',
'--cacert', env.ca.cert_file,
'--request', 'PUT',
'--digest', '--user', 'test:test',
'--data-binary', f'@{fdata}',
'--url', url,
])
assert r.exit_code == 0, r.dump_logs()
r.check_stats(1, 200)
# upload large data on a h1 to h2 upgrade
def test_07_35_h1_h2_upgrade_upload(self, env: Env, httpd, nghttpx):
fdata = os.path.join(env.gen_dir, 'data-100k')
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', extra_args=[
'--http2'
])
r.check_response(count=1, http_status=200)
# apache does not Upgrade on request with a body
assert r.stats[0]['http_version'] == '1.1', f'{r}'
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
# upload to a 301,302,303 response
@pytest.mark.parametrize("redir", ['301', '302', '303'])
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_36_upload_30x(self, env: Env, httpd, nghttpx, redir, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_ossl_quic():
pytest.skip("OpenSSL's own QUIC is flaky here")
data = '0123456789' * 10
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo{redir}?id=[0-0]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
'-L', '--trace-config', 'http/2,http/3'
])
r.check_response(count=1, http_status=200)
respdata = open(curl.response_file(0)).readlines()
assert respdata == [] # was transformed to a GET
# upload to a 307 response
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_37_upload_307(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_ossl_quic():
pytest.skip("OpenSSL's own QUIC is flaky here")
data = '0123456789' * 10
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo307?id=[0-0]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
'-L', '--trace-config', 'http/2,http/3'
])
r.check_response(count=1, http_status=200)
respdata = open(curl.response_file(0)).readlines()
assert respdata == [data] # was POST again
# POST form data, yet another code path in transfer
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_38_form_small(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_form(urls=[url], alpn_proto=proto, form={
'name1': 'value1',
})
r.check_stats(count=1, http_status=200, exitcode=0)
# POST data urlencoded, small enough to be sent with request headers
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_39_post_urlenc_small(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-63k')
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
'--trace-config', 'http/2,http/3'
])
r.check_stats(count=1, http_status=200, exitcode=0)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
# POST data urlencoded, large enough to be sent separate from request headers
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_40_post_urlenc_large(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
fdata = os.path.join(env.gen_dir, 'data-64k')
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
'--trace-config', 'http/2,http/3'
])
r.check_stats(count=1, http_status=200, exitcode=0)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
# POST data urlencoded, small enough to be sent with request headers
# and request headers are so large that the first send is larger
# than our default upload buffer length (64KB).
# Unfixed, this will fail when run with CURL_DBG_SOCK_WBLOCK=80 most
# of the time
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_41_post_urlenc_small(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('quiche'):
pytest.skip("quiche has CWND issues with large requests")
fdata = os.path.join(env.gen_dir, 'data-63k')
curl = CurlClient(env=env)
extra_args = ['--trace-config', 'http/2,http/3']
# add enough headers so that the first send chunk is > 64KB
for i in range(63):
extra_args.extend(['-H', f'x{i:02d}: {"y"*1019}'])
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=extra_args)
r.check_stats(count=1, http_status=200, exitcode=0)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
def check_download(self, r: ExecResult, count: int, srcfile: Union[str, os.PathLike], curl: CurlClient):
for i in range(count):
dfile = curl.download_file(i)
assert os.path.exists(dfile), f'download {dfile} missing\n{r.dump_logs()}'
if not filecmp.cmp(srcfile, dfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dfile).readlines(),
fromfile=srcfile,
tofile=dfile,
n=1))
assert False, f'download {dfile} differs:\n{diff}\n{r.dump_logs()}'
# upload data, pause, let connection die with an incomplete response
# issues #11769 #13260
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_42a_upload_disconnect(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
client = LocalClient(name='cli_upload_pausing', env=env, timeout=60)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]&die_after=0'
r = client.run(['-V', proto, url])
if r.exit_code == 18: # PARTIAL_FILE is always ok
pass
elif proto == 'h2':
# CURLE_HTTP2, CURLE_HTTP2_STREAM
assert r.exit_code in [16, 92], f'unexpected exit code\n{r.dump_logs()}'
elif proto == 'h3':
r.check_exit_code(95) # CURLE_HTTP3 also ok
else:
r.check_exit_code(18) # will fail as it should
# upload data, pause, let connection die without any response at all
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_42b_upload_disconnect(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
client = LocalClient(name='cli_upload_pausing', env=env, timeout=60)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=0&just_die=1'
r = client.run(['-V', proto, url])
exp_code = 52 # GOT_NOTHING
if proto == 'h2' or proto == 'h3':
exp_code = 0 # we get a 500 from the server
r.check_exit_code(exp_code) # GOT_NOTHING
# upload data, pause, let connection die after 100 continue
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_42c_upload_disconnect(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
client = LocalClient(name='cli_upload_pausing', env=env, timeout=60)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=0&die_after_100=1'
r = client.run(['-V', proto, url])
exp_code = 52 # GOT_NOTHING
if proto == 'h2' or proto == 'h3':
exp_code = 0 # we get a 500 from the server
r.check_exit_code(exp_code) # GOT_NOTHING
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_43_upload_denied(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_ossl_quic():
pytest.skip("openssl-quic is flaky in filed PUTs")
fdata = os.path.join(env.gen_dir, 'data-10m')
count = 1
max_upload = 128 * 1024
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?'\
f'id=[0-{count-1}]&max_upload={max_upload}'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
extra_args=['--trace-config', 'all'])
r.check_stats(count=count, http_status=413, exitcode=0)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@pytest.mark.parametrize("httpcode", [301, 302, 307, 308])
def test_07_44_put_redir(self, env: Env, httpd, nghttpx, proto, httpcode):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
upload_size = 128*1024
url = f'https://localhost:{env.https_port}/curltest/put-redir-{httpcode}'
client = LocalClient(name='cli_hx_upload', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-l', '-S', f'{upload_size}', '-V', proto, url
])
r.check_exit_code(0)
results = [int(m.group(1)) for line in r.trace_lines
if (m := re.match(r'.* FINISHED, result=(\d+), response=(\d+)', line))]
httpcodes = [int(m.group(2)) for line in r.trace_lines
if (m := re.match(r'.* FINISHED, result=(\d+), response=(\d+)', line))]
if httpcode == 308:
assert results[0] == 65, f'{r}' # could not rewind input
else:
assert httpcodes[0] == httpcode, f'{r}'
# speed limited on put handler
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_50_put_speed_limit(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
fdata = os.path.join(env.gen_dir, 'data-100k')
up_len = 100 * 1024
speed_limit = 50 * 1024
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
with_headers=True, extra_args=[
'--limit-rate', f'{speed_limit}'
])
r.check_response(count=count, http_status=200)
assert r.responses[0]['header']['received-length'] == f'{up_len}', f'{r.responses[0]}'
up_speed = r.stats[0]['speed_upload']
assert (speed_limit * 0.5) <= up_speed <= (speed_limit * 1.5), f'{r.stats[0]}'
# speed limited on echo handler
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_07_51_echo_speed_limit(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
fdata = os.path.join(env.gen_dir, 'data-100k')
speed_limit = 50 * 1024
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
with_headers=True, extra_args=[
'--limit-rate', f'{speed_limit}'
])
r.check_response(count=count, http_status=200)
up_speed = r.stats[0]['speed_upload']
assert (speed_limit * 0.5) <= up_speed <= (speed_limit * 1.5), f'{r.stats[0]}'
# upload larger data, triggering "Expect: 100-continue" code paths
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_07_60_upload_exp100(self, env: Env, httpd, nghttpx, proto):
fdata = os.path.join(env.gen_dir, 'data-1m+')
read_delay = 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]'\
f'&read_delay={read_delay}s'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto, extra_args=[
'--expect100-timeout', f'{read_delay+1}'
])
r.check_stats(count=1, http_status=200, exitcode=0)
# upload larger data, triggering "Expect: 100-continue" code paths
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_07_61_upload_exp100_timeout(self, env: Env, httpd, nghttpx, proto):
fdata = os.path.join(env.gen_dir, 'data-1m+')
read_delay = 2
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]'\
f'&read_delay={read_delay}s'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto, extra_args=[
'--expect100-timeout', f'{read_delay-1}'
])
r.check_stats(count=1, http_status=200, exitcode=0)
# issue #15688 when posting a form and cr_mime_read() is called with
# length < 4, we did not progress
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_07_62_upload_issue_15688(self, env: Env, httpd, proto):
# this length leads to (including multipart formatting) to a
# client reader invocation with length 1.
upload_len = 196169
fname = f'data-{upload_len}'
env.make_data_file(indir=env.gen_dir, fname=fname, fsize=upload_len)
fdata = os.path.join(env.gen_dir, fname)
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_form(urls=[url], form={
'file': f'@{fdata}',
}, alpn_proto=proto, extra_args=[
'--max-time', '10'
])
r.check_stats(count=1, http_status=200, exitcode=0)
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_07_63_upload_exp100_paused(self, env: Env, httpd, nghttpx, proto):
read_delay = 1
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'\
f'&read_delay={read_delay}s'
upload_size = 128 * 1024
client = LocalClient(name='cli_hx_upload', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', '1',
'-S', f'{upload_size}',
'-P', '1',
'-M', 'MIME',
'-r', f'{env.domain1}:{env.port_for(proto)}:127.0.0.1',
'-V', proto, url
])
r.check_exit_code(0)
# nghttpx is the only server we have that supports TLS early data and
# has a limit of 16k it announces
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx")
@pytest.mark.parametrize("proto,upload_size,exp_early", [
pytest.param('http/1.1', 100, 203, id='h1-small-body'),
pytest.param('http/1.1', 10*1024, 10345, id='h1-medium-body'),
pytest.param('http/1.1', 32*1024, 16384, id='h1-limited-body'),
pytest.param('h2', 10*1024, 10378, id='h2-medium-body'),
pytest.param('h2', 32*1024, 16384, id='h2-limited-body'),
pytest.param('h3', 1024, 1126, id='h3-small-body'),
pytest.param('h3', 1024 * 1024, 131177, id='h3-limited-body'),
# h3: limited+body (long app data). The 0RTT size is limited by
# our sendbuf size of 128K.
])
def test_07_70_put_earlydata(self, env: Env, httpd, nghttpx, proto, upload_size, exp_early):
if not env.curl_can_early_data():
pytest.skip('TLS earlydata not implemented')
if proto == 'h3' and \
(not env.have_h3() or not env.curl_can_h3_early_data()):
pytest.skip("h3 not supported")
if proto != 'h3' and sys.platform.startswith('darwin') and env.ci_run:
pytest.skip('failing on macOS CI runners')
if proto == 'h3' and sys.platform.startswith('darwin') and env.curl_uses_lib('wolfssl'):
pytest.skip('h3 wolfssl early data failing on macOS')
if proto == 'h3' and sys.platform.startswith('darwin') and env.curl_uses_lib('gnutls'):
pytest.skip('h3 gnutls early data failing on macOS')
count = 2
# we want this test to always connect to nghttpx, since it is
# the only server we have that supports TLS earlydata
port = env.port_for(proto)
if proto != 'h3':
port = env.nghttpx_https_port
url = f'https://{env.domain1}:{port}/curltest/put'
client = LocalClient(name='cli_hx_upload', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}',
'-e', # use TLS earlydata
'-f', # forbid reuse of connections
'-l', # announce upload length, no 'Expect: 100'
'-S', f'{upload_size}',
'-r', f'{env.domain1}:{port}:127.0.0.1',
'-V', proto, url
])
r.check_exit_code(0)
self.check_downloads(client, r, [f"{upload_size}"], count)
earlydata = {}
for line in r.trace_lines:
m = re.match(r'^\[t-(\d+)] EarlyData: (-?\d+)', line)
if m:
earlydata[int(m.group(1))] = int(m.group(2))
assert earlydata[0] == 0, f'{earlydata}\n{r.dump_logs()}'
# depending on cpu load, curl might not upload as much before
# the handshake starts and early data stops.
assert 0 < earlydata[1] <= exp_early, f'{earlydata}\n{r.dump_logs()}'
def check_downloads(self, client, r, source: List[str], count: int,
complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile), f'download {dfile} missing\n{r.dump_logs()}'
if complete:
diff = "".join(difflib.unified_diff(a=source,
b=open(dfile).readlines(),
fromfile='-',
tofile=dfile,
n=1))
assert not diff, f'download {dfile} differs:\n{diff}\n{r.dump_logs()}'
+235
View File
@@ -0,0 +1,235 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import difflib
import filecmp
import logging
import os
import re
import pytest
from testenv import Env, CurlClient, Caddy, LocalClient
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.has_caddy(), reason="missing caddy")
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
class TestCaddy:
@pytest.fixture(autouse=True, scope='class')
def caddy(self, env):
caddy = Caddy(env=env)
assert caddy.initial_start()
yield caddy
caddy.stop()
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
fpath = os.path.join(docs_dir, fname)
data1k = 1024*'x'
flen = 0
with open(fpath, 'w') as fd:
while flen < fsize:
fd.write(data1k)
flen += len(data1k)
return flen
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, caddy):
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data10k.data', fsize=10*1024)
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data1.data', fsize=1024*1024)
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data5.data', fsize=5*1024*1024)
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data10.data', fsize=10*1024*1024)
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data100.data', fsize=100*1024*1024)
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
# download 1 file
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_08_01_download_1(self, env: Env, caddy: Caddy, proto):
if proto == 'h3' and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{caddy.port}/data.json'
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(count=1, http_status=200)
# download 1MB files sequentially
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_08_02_download_1mb_sequential(self, env: Env, caddy: Caddy, proto):
if proto == 'h3' and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
count = 50
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200, connect_count=1)
# download 1MB files parallel
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_08_03_download_1mb_parallel(self, env: Env, caddy: Caddy, proto):
if proto == 'h3' and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
count = 20
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
r.check_response(count=count, http_status=200)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
assert r.total_connects > 1, r.dump_logs()
else:
assert r.total_connects == 1, r.dump_logs()
# download 5MB files sequentially
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_08_04a_download_10mb_sequential(self, env: Env, caddy: Caddy, proto):
if proto == 'h3' and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
count = 40
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data5.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200, connect_count=1)
# download 10MB files sequentially
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_08_04b_download_10mb_sequential(self, env: Env, caddy: Caddy, proto):
if proto == 'h3' and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
count = 20
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200, connect_count=1)
# download 10MB files parallel
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_08_05_download_1mb_parallel(self, env: Env, caddy: Caddy, proto):
if proto == 'h3' and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
if proto == 'http/1.1' and env.curl_uses_lib('mbedtls'):
pytest.skip("mbedtls 3.6.0 fails on 50 connections with: "
"ssl_handshake returned: (-0x7F00) SSL - Memory allocation failed")
count = 50
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
r.check_response(count=count, http_status=200)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
assert r.total_connects > 1, r.dump_logs()
else:
assert r.total_connects == 1, r.dump_logs()
# post data parallel, check that they were echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_08_06_post_parallel(self, env: Env, httpd, caddy, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
# limit since we use a separate connection in h1
count = 20
data = '0123456789'
curl = CurlClient(env=env)
url = f'https://{env.domain2}:{caddy.port}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
extra_args=['--parallel'])
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# put large file, check that they length were echoed
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_08_07_put_large(self, env: Env, httpd, caddy, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
# limit since we use a separate connection in h1<
count = 1
fdata = os.path.join(env.gen_dir, 'data-10m')
curl = CurlClient(env=env)
url = f'https://{env.domain2}:{caddy.port}/curltest/put?id=[0-{count-1}]'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto)
exp_data = [f'{os.path.getsize(fdata)}']
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_08_08_earlydata(self, env: Env, httpd, caddy, proto):
if not env.curl_can_early_data():
pytest.skip('TLS earlydata not implemented')
if proto == 'h3' and \
(not env.have_h3() or not env.curl_can_h3_early_data()):
pytest.skip("h3 not supported")
count = 2
docname = 'data10k.data'
url = f'https://{env.domain1}:{caddy.port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}',
'-e', # use TLS earlydata
'-f', # forbid reuse of connections
'-r', f'{env.domain1}:{caddy.port}:127.0.0.1',
'-V', proto, url
])
r.check_exit_code(0)
srcfile = os.path.join(caddy.docs_dir, docname)
self.check_downloads(client, srcfile, count)
earlydata = {}
for line in r.trace_lines:
m = re.match(r'^\[t-(\d+)] EarlyData: (-?\d+)', line)
if m:
earlydata[int(m.group(1))] = int(m.group(2))
assert earlydata[0] == 0, f'{earlydata}'
if proto == 'h3':
assert earlydata[1] == 113, f'{earlydata}'
else:
# Caddy does not support early data on TCP
assert earlydata[1] == 0, f'{earlydata}'
def check_downloads(self, client, srcfile: str, count: int,
complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dfile).readlines(),
fromfile=srcfile,
tofile=dfile,
n=1))
assert False, f'download {dfile} differs:\n{diff}'
+85
View File
@@ -0,0 +1,85 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import pytest
from testenv import Env, CurlClient, LocalClient
log = logging.getLogger(__name__)
class TestPush:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd):
push_dir = os.path.join(httpd.docs_dir, 'push')
if not os.path.exists(push_dir):
os.makedirs(push_dir)
env.make_data_file(indir=push_dir, fname="data1", fsize=1*1024)
env.make_data_file(indir=push_dir, fname="data2", fsize=1*1024)
env.make_data_file(indir=push_dir, fname="data3", fsize=1*1024)
def httpd_configure(self, env, httpd):
httpd.set_extra_config(env.domain1, [
'H2EarlyHints on',
'<Location /push/data1>',
' H2PushResource /push/data2',
'</Location>',
'<Location /push/data2>',
' H2PushResource /push/data1',
' H2PushResource /push/data3',
'</Location>',
])
# activate the new config
httpd.reload_if_config_changed()
# download a file that triggers a "103 Early Hints" response
def test_09_01_h2_early_hints(self, env: Env, httpd, configures_httpd):
self.httpd_configure(env, httpd)
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/push/data1'
r = curl.http_download(urls=[url], alpn_proto='h2', with_stats=False,
with_headers=True)
r.check_exit_code(0)
assert len(r.responses) == 2, f'{r.responses}'
assert r.responses[0]['status'] == 103, f'{r.responses}'
assert 'link' in r.responses[0]['header'], f'{r.responses[0]}'
assert r.responses[0]['header']['link'] == '</push/data2>; rel=preload', f'{r.responses[0]}'
def test_09_02_h2_push(self, env: Env, httpd, configures_httpd):
self.httpd_configure(env, httpd)
# use localhost as we do not have resolve support in local client
url = f'https://localhost:{env.https_port}/push/data1'
client = LocalClient(name='cli_h2_serverpush', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[url])
r.check_exit_code(0)
assert os.path.exists(client.download_file(0))
assert os.path.exists(os.path.join(client.run_dir, 'push0')), r.dump_logs()
+388
View File
@@ -0,0 +1,388 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import filecmp
import logging
import os
import re
import sys
import pytest
from testenv import Env, CurlClient, ExecResult
log = logging.getLogger(__name__)
class TestProxy:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd, nghttpx_fwd):
push_dir = os.path.join(httpd.docs_dir, 'push')
if not os.path.exists(push_dir):
os.makedirs(push_dir)
if env.have_nghttpx():
nghttpx_fwd.start_if_needed()
env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
indir = httpd.docs_dir
env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
def get_tunnel_proto_used(self, r: ExecResult):
for line in r.trace_lines:
m = re.match(r'.* CONNECT: \'(\S+)\' negotiated$', line)
if m:
return m.group(1)
assert False, f'tunnel protocol not found in:\n{"".join(r.trace_lines)}'
return None
# download via http: proxy (no tunnel)
def test_10_01_proxy_http(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=curl.get_proxy_args(proxys=False))
r.check_response(count=1, http_status=200)
# download via https: proxy (no tunnel)
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_10_02_proxys_down(self, env: Env, httpd, proto):
if proto == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(proto=proto)
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=200,
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
# upload via https: with proto (no tunnel)
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
@pytest.mark.parametrize("fname, fcount", [
['data.json', 5],
['data-100k', 5],
['data-1m', 2]
])
@pytest.mark.skipif(condition=not Env.have_nghttpx(),
reason="no nghttpx available")
def test_10_02_proxys_up(self, env: Env, httpd, nghttpx, proto,
fname, fcount):
if proto == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
count = fcount
srcfile = os.path.join(httpd.docs_dir, fname)
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/curltest/echo?id=[0-{count-1}]'
xargs = curl.get_proxy_args(proto=proto)
r = curl.http_upload(urls=[url], data=f'@{srcfile}', alpn_proto=proto,
extra_args=xargs)
r.check_response(count=count, http_status=200,
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
indata = open(srcfile).readlines()
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == indata
# download http: via http: proxytunnel
def test_10_03_proxytunnel_http(self, env: Env, httpd, nghttpx_fwd):
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=200)
# download http: via https: proxytunnel
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_10_04_proxy_https(self, env: Env, httpd, nghttpx_fwd):
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(tunnel=True)
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=200)
# download https: with proto via http: proxytunnel
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_10_05_proxytunnel_http(self, env: Env, httpd, nghttpx_fwd, proto):
curl = CurlClient(env=env)
url = f'https://localhost:{env.https_port}/data.json'
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=200,
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
# download https: with proto via https: proxytunnel
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_10_06_proxytunnel_https(self, env: Env, httpd, nghttpx_fwd, proto, tunnel):
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
curl = CurlClient(env=env)
url = f'https://localhost:{env.https_port}/data.json?[0-0]'
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=200,
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
assert self.get_tunnel_proto_used(r) == tunnel
srcfile = os.path.join(httpd.docs_dir, 'data.json')
dfile = curl.download_file(0)
assert filecmp.cmp(srcfile, dfile, shallow=False)
# download many https: with proto via https: proxytunnel
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
@pytest.mark.parametrize("fname, fcount", [
['data.json', 100],
['data-100k', 20],
['data-1m', 5]
])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_10_07_pts_down_small(self, env: Env, httpd, nghttpx_fwd, proto,
tunnel, fname, fcount):
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
if env.curl_uses_lib('mbedtls') and \
sys.platform.startswith('darwin') and env.ci_run:
pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')
count = fcount
curl = CurlClient(env=env)
url = f'https://localhost:{env.https_port}/{fname}?[0-{count-1}]'
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
extra_args=xargs)
r.check_response(count=count, http_status=200,
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
assert self.get_tunnel_proto_used(r) == tunnel
srcfile = os.path.join(httpd.docs_dir, fname)
for i in range(count):
dfile = curl.download_file(i)
assert filecmp.cmp(srcfile, dfile, shallow=False)
assert r.total_connects == 1, r.dump_logs()
# upload many https: with proto via https: proxytunnel
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
@pytest.mark.parametrize("fname, fcount", [
['data.json', 50],
['data-100k', 20],
['data-1m', 5]
])
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_10_08_upload_seq_large(self, env: Env, httpd, nghttpx, proto,
tunnel, fname, fcount):
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
if env.curl_uses_lib('mbedtls') and \
sys.platform.startswith('darwin') and env.ci_run:
pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')
count = fcount
srcfile = os.path.join(httpd.docs_dir, fname)
curl = CurlClient(env=env)
url = f'https://localhost:{env.https_port}/curltest/echo?id=[0-{count-1}]'
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
r = curl.http_upload(urls=[url], data=f'@{srcfile}', alpn_proto=proto,
extra_args=xargs)
assert self.get_tunnel_proto_used(r) == tunnel
r.check_response(count=count, http_status=200)
assert r.total_connects == 1, r.dump_logs()
indata = open(srcfile).readlines()
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == indata, f'response {i} differs'
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_10_09_reuse_server(self, env: Env, httpd, nghttpx_fwd, tunnel):
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
curl = CurlClient(env=env)
url1 = f'https://localhost:{env.https_port}/data.json'
url2 = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
r = curl.http_download(urls=[url1, url2], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
r.check_response(count=2, http_status=200)
assert self.get_tunnel_proto_used(r) == tunnel
if tunnel == 'h2':
# TODO: we would like to reuse the first connection for the
# second URL, but this is currently not possible
# assert r.total_connects == 1
assert r.total_connects == 2
else:
assert r.total_connects == 2
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_10_10_reuse_proxy(self, env: Env, httpd, nghttpx_fwd, tunnel):
# url twice via https: proxy separated with '--next', will reuse
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
if env.curl_uses_lib('mbedtls') and \
sys.platform.startswith('darwin') and env.ci_run:
pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')
curl = CurlClient(env=env)
url = f'https://localhost:{env.https_port}/data.json'
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=proxy_args)
r1.check_response(count=1, http_status=200)
assert self.get_tunnel_proto_used(r1) == tunnel
# get the args, duplicate separated with '--next'
x2_args = r1.args[1:]
x2_args.append('--next')
x2_args.extend(proxy_args)
r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=x2_args)
r2.check_response(count=2, http_status=200)
assert r2.total_connects == 1
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
@pytest.mark.skipif(condition=not Env.curl_uses_lib('openssl'), reason="tls13-ciphers not supported")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_10_11_noreuse_proxy_https(self, env: Env, httpd, nghttpx_fwd, tunnel):
# different --proxy-tls13-ciphers, no reuse of connection for https:
curl = CurlClient(env=env)
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
url = f'https://localhost:{env.https_port}/data.json'
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=proxy_args)
r1.check_response(count=1, http_status=200)
assert self.get_tunnel_proto_used(r1) == tunnel
# get the args, duplicate separated with '--next'
x2_args = r1.args[1:]
x2_args.append('--next')
x2_args.extend(proxy_args)
x2_args.extend(['--proxy-tls13-ciphers', 'TLS_AES_256_GCM_SHA384'])
r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=x2_args)
r2.check_response(count=2, http_status=200)
assert r2.total_connects == 2
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
@pytest.mark.skipif(condition=not Env.curl_uses_lib('openssl'), reason="tls13-ciphers not supported")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_10_12_noreuse_proxy_http(self, env: Env, httpd, nghttpx_fwd, tunnel):
# different --proxy-tls13-ciphers, no reuse of connection for http:
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=proxy_args)
r1.check_response(count=1, http_status=200)
assert self.get_tunnel_proto_used(r1) == tunnel
# get the args, duplicate separated with '--next'
x2_args = r1.args[1:]
x2_args.append('--next')
x2_args.extend(proxy_args)
x2_args.extend(['--proxy-tls13-ciphers', 'TLS_AES_256_GCM_SHA384'])
r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=x2_args)
r2.check_response(count=2, http_status=200)
assert r2.total_connects == 2
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
@pytest.mark.skipif(condition=not Env.curl_uses_lib('openssl'), reason="tls13-ciphers not supported")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_10_13_noreuse_https(self, env: Env, httpd, nghttpx_fwd, tunnel):
# different --tls13-ciphers on https: same proxy config
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
curl = CurlClient(env=env)
url = f'https://localhost:{env.https_port}/data.json'
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=proxy_args)
r1.check_response(count=1, http_status=200)
assert self.get_tunnel_proto_used(r1) == tunnel
# get the args, duplicate separated with '--next'
x2_args = r1.args[1:]
x2_args.append('--next')
x2_args.extend(proxy_args)
x2_args.extend(['--tls13-ciphers', 'TLS_AES_256_GCM_SHA384'])
r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=x2_args)
r2.check_response(count=2, http_status=200)
assert r2.total_connects == 2
# download via https: proxy (no tunnel) using IP address
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_10_14_proxys_ip_addr(self, env: Env, httpd, proto):
if proto == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(proto=proto, use_ip=True)
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
if env.curl_uses_lib('mbedtls') and \
not env.curl_lib_version_at_least('mbedtls', '3.5.0'):
r.check_exit_code(60) # CURLE_PEER_FAILED_VERIFICATION
else:
r.check_response(count=1, http_status=200,
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
+134
View File
@@ -0,0 +1,134 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import socket
from threading import Thread
from typing import Generator
import pytest
from testenv import Env, CurlClient
log = logging.getLogger(__name__)
class UDSFaker:
def __init__(self, path):
self._uds_path = path
self._done = False
self._socket = None
self._thread = None
@property
def path(self):
return self._uds_path
def start(self):
def process(self):
self._socket.listen(1)
self._process()
try:
os.unlink(self._uds_path)
except OSError:
if os.path.exists(self._uds_path):
raise
self._socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self._socket.bind(self._uds_path)
self._thread = Thread(target=process, daemon=True, args=[self])
self._thread.start()
def stop(self):
self._done = True
self._socket.close()
def _process(self):
while self._done is False:
try:
c, client_address = self._socket.accept()
try:
c.recv(16)
c.sendall("""HTTP/1.1 200 Ok
Server: UdsFaker
Content-Type: application/json
Content-Length: 19
{ "host": "faked" }""".encode())
finally:
c.close()
except ConnectionAbortedError:
self._done = True
except OSError:
self._done = True
class TestUnix:
@pytest.fixture(scope="class")
def uds_faker(self, env: Env) -> Generator[UDSFaker, None, None]:
uds_path = os.path.join(env.gen_dir, 'uds_11.sock')
faker = UDSFaker(path=uds_path)
faker.start()
yield faker
faker.stop()
# download http: via Unix socket
def test_11_01_unix_connect_http(self, env: Env, httpd, uds_faker):
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_download(urls=[url], with_stats=True,
extra_args=[
'--unix-socket', uds_faker.path,
])
r.check_response(count=1, http_status=200)
# download https: via Unix socket
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_11_02_unix_connect_http(self, env: Env, httpd, uds_faker):
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/data.json'
r = curl.http_download(urls=[url], with_stats=True,
extra_args=[
'--unix-socket', uds_faker.path,
])
r.check_response(exitcode=35, http_status=None)
# download HTTP/3 via Unix socket
@pytest.mark.skipif(condition=not Env.have_h3(), reason='h3 not supported')
def test_11_03_unix_connect_quic(self, env: Env, httpd, uds_faker):
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/data.json'
r = curl.http_download(urls=[url], with_stats=True,
alpn_proto='h3',
extra_args=[
'--unix-socket', uds_faker.path,
])
r.check_response(exitcode=96, http_status=None)
+175
View File
@@ -0,0 +1,175 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
from datetime import datetime, timedelta
import pytest
from testenv import Env, CurlClient
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
class TestReuse:
# check if HTTP/1.1 handles 'Connection: close' correctly
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_12_01_h1_conn_close(self, env: Env, httpd, configures_httpd, nghttpx, proto):
httpd.reset_config()
httpd.set_extra_config('base', [
'MaxKeepAliveRequests 1',
])
httpd.reload_if_config_changed()
count = 100
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200)
# Server sends `Connection: close` on every 2nd request, requiring
# a new connection
delta = 5
assert (count/2 - delta) < r.total_connects < (count/2 + delta)
@pytest.mark.skipif(condition=Env.httpd_is_at_least('2.5.0'),
reason="httpd 2.5+ handles KeepAlives different")
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_12_02_h1_conn_timeout(self, env: Env, httpd, configures_httpd, nghttpx, proto):
httpd.reset_config()
httpd.set_extra_config('base', [
'KeepAliveTimeout 1',
])
httpd.reload_if_config_changed()
count = 5
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--rate', '30/m',
])
r.check_response(count=count, http_status=200)
# Connections time out on server before we send another request,
assert r.total_connects == count
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_03_as_follow_h2h3(self, env: Env, httpd, configures_httpd, nghttpx):
# write an alt-svc file that advises h3 instead of h2
asfile = os.path.join(env.gen_dir, 'alt-svc-12_03.txt')
self.create_asfile(asfile, f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.h3_port}')
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
'--alt-svc', f'{asfile}',
])
r.check_response(count=1, http_status=200)
assert r.stats[0]['http_version'] == '3', f'{r.stats}'
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_04_as_follow_h3h2(self, env: Env, httpd, configures_httpd, nghttpx):
count = 2
# write an alt-svc file the advises h2 instead of h3
asfile = os.path.join(env.gen_dir, 'alt-svc-12_04.txt')
ts = datetime.now() + timedelta(hours=24)
expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
with open(asfile, 'w') as fd:
fd.write(f'h3 {env.domain1} {env.https_port} h2 {env.domain1} {env.https_port} "{expires}" 0 0')
log.info(f'altscv: {open(asfile).readlines()}')
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
'--alt-svc', f'{asfile}', '--http3'
])
r.check_response(count=count, http_status=200)
# We expect the connection to be reused and use HTTP/2
assert r.total_connects == 1
for s in r.stats:
assert s['http_version'] == '2', f'{s}'
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_05_as_follow_h3h1(self, env: Env, httpd, configures_httpd, nghttpx):
# With '--http3` an Alt-Svc redirection from h3 to h1 is allowed
count = 2
# write an alt-svc file the advises h1 instead of h3
asfile = os.path.join(env.gen_dir, 'alt-svc-12_05.txt')
ts = datetime.now() + timedelta(hours=24)
expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
with open(asfile, 'w') as fd:
fd.write(f'h3 {env.domain1} {env.https_port} http/1.1 {env.domain1} {env.https_port} "{expires}" 0 0')
log.info(f'altscv: {open(asfile).readlines()}')
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
'--alt-svc', f'{asfile}', '--http3'
])
r.check_response(count=count, http_status=200)
# We expect the connection to be reused and use HTTP/1.1
assert r.total_connects == 1
for s in r.stats:
assert s['http_version'] == '1.1', f'{s}'
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_06_as_ignore_h3h1(self, env: Env, httpd, configures_httpd, nghttpx):
# With '--http3-only` an Alt-Svc redirection from h3 to h1 is ignored
count = 2
# write an alt-svc file the advises h1 instead of h3
asfile = os.path.join(env.gen_dir, 'alt-svc-12_05.txt')
ts = datetime.now() + timedelta(hours=24)
expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
with open(asfile, 'w') as fd:
fd.write(f'h3 {env.domain1} {env.https_port} http/1.1 {env.domain1} {env.https_port} "{expires}" 0 0')
log.info(f'altscv: {open(asfile).readlines()}')
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
'--alt-svc', f'{asfile}', '--http3-only'
])
r.check_response(count=count, http_status=200)
# We expect the connection to be stay on h3, since we used --http3-only
assert r.total_connects == 1
for s in r.stats:
assert s['http_version'] == '3', f'{s}'
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_07_as_ignore_h2h3(self, env: Env, httpd, configures_httpd, nghttpx):
# With '--http2` an Alt-Svc redirection from h2 to h3 is ignored
# write an alt-svc file that advises h3 instead of h2
asfile = os.path.join(env.gen_dir, 'alt-svc-12_03.txt')
self.create_asfile(asfile, f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.h3_port}')
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
'--alt-svc', f'{asfile}', '--http2'
])
r.check_response(count=1, http_status=200)
assert r.stats[0]['http_version'] == '2', f'{r.stats}'
def create_asfile(self, fpath, line):
ts = datetime.now() + timedelta(hours=24)
expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
with open(fpath, 'w') as fd:
fd.write(f'{line} "{expires}" 0 0')
log.info(f'altscv: {open(fpath).readlines()}')
+171
View File
@@ -0,0 +1,171 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import re
import pytest
from testenv import Env, CurlClient, ExecResult
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=Env.setup_incomplete(),
reason=f"missing: {Env.incomplete_reason()}")
class TestProxyAuth:
def httpd_configure(self, env, httpd):
httpd.set_proxy_auth(True)
httpd.reload_if_config_changed()
def get_tunnel_proto_used(self, r: ExecResult):
for line in r.trace_lines:
m = re.match(r'.* CONNECT: \'(\S+)\' negotiated$', line)
if m:
return m.group(1)
assert False, f'tunnel protocol not found in:\n{"".join(r.trace_lines)}'
return None
# download via http: proxy (no tunnel), no auth
def test_13_01_proxy_no_auth(self, env: Env, httpd, configures_httpd):
self.httpd_configure(env, httpd)
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=curl.get_proxy_args(proxys=False))
r.check_response(count=1, http_status=407)
# download via http: proxy (no tunnel), auth
def test_13_02_proxy_auth(self, env: Env, httpd, configures_httpd):
self.httpd_configure(env, httpd)
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(proxys=False)
xargs.extend(['--proxy-user', 'proxy:proxy'])
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=200)
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_13_03_proxys_no_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd):
self.httpd_configure(env, httpd)
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(proxys=True)
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=407)
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_13_04_proxys_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd):
self.httpd_configure(env, httpd)
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(proxys=True)
xargs.extend(['--proxy-user', 'proxy:proxy'])
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=200)
def test_13_05_tunnel_http_no_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd):
self.httpd_configure(env, httpd)
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
# expect "COULD_NOT_CONNECT"
r.check_response(exitcode=56, http_status=None)
def test_13_06_tunnel_http_auth(self, env: Env, httpd, configures_httpd):
self.httpd_configure(env, httpd)
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
xargs.extend(['--proxy-user', 'proxy:proxy'])
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=200)
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
def test_13_07_tunnels_no_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd, proto, tunnel):
self.httpd_configure(env, httpd)
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
curl = CurlClient(env=env)
url = f'https://localhost:{env.https_port}/data.json'
xargs = curl.get_proxy_args(proxys=True, tunnel=True, proto=tunnel)
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
extra_args=xargs)
# expect "COULD_NOT_CONNECT"
r.check_response(exitcode=56, http_status=None)
assert self.get_tunnel_proto_used(r) == tunnel
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
def test_13_08_tunnels_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd, proto, tunnel):
self.httpd_configure(env, httpd)
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
pytest.skip('only supported with nghttp2')
curl = CurlClient(env=env)
url = f'https://localhost:{env.https_port}/data.json'
xargs = curl.get_proxy_args(proxys=True, tunnel=True, proto=tunnel)
xargs.extend(['--proxy-user', 'proxy:proxy'])
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
extra_args=xargs)
r.check_response(count=1, http_status=200,
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
assert self.get_tunnel_proto_used(r) == tunnel
@pytest.mark.skipif(condition=not Env.curl_has_feature('SPNEGO'),
reason='curl lacks SPNEGO support')
def test_13_09_negotiate_http(self, env: Env, httpd, configures_httpd):
self.httpd_configure(env, httpd)
run_env = os.environ.copy()
run_env['https_proxy'] = f'http://127.0.0.1:{env.proxy_port}'
curl = CurlClient(env=env, run_env=run_env)
url = f'https://localhost:{env.https_port}/data.json'
r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, extra_args=[
'--negotiate', '--proxy-user', 'proxy:proxy'
])
r1.check_response(count=1, http_status=200)
+141
View File
@@ -0,0 +1,141 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import pytest
from testenv import Env, CurlClient
log = logging.getLogger(__name__)
class TestAuth:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd, nghttpx):
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
# download 1 file, not authenticated
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_14_01_digest_get_noauth(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(http_status=401)
# download 1 file, authenticated
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_14_02_digest_get_auth(self, env: Env, httpd, nghttpx, proto):
if not env.curl_has_feature('digest'):
pytest.skip("curl built without digest")
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
'--digest', '--user', 'test:test'
])
r.check_response(http_status=200)
# PUT data, authenticated
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_14_03_digest_put_auth(self, env: Env, httpd, nghttpx, proto):
if not env.curl_has_feature('digest'):
pytest.skip("curl built without digest")
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_ossl_quic():
pytest.skip("openssl-quic is flaky in retrying POST")
data='0123456789'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
'--digest', '--user', 'test:test'
])
r.check_response(http_status=200)
# PUT data, digest auth large pw
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_14_04_digest_large_pw(self, env: Env, httpd, nghttpx, proto):
if not env.curl_has_feature('digest'):
pytest.skip("curl built without digest")
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
data='0123456789'
password = 'x' * 65535
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
'--digest', '--user', f'test:{password}',
'--trace-config', 'http/2,http/3'
])
# digest does not submit the password, but a hash of it, so all
# works and, since the pw is not correct, we get a 401
r.check_response(http_status=401)
# PUT data, basic auth large pw
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_14_05_basic_large_pw(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and not env.curl_uses_lib('ngtcp2'):
# See <https://github.com/cloudflare/quiche/issues/1573>
pytest.skip("quiche/openssl-quic have problems with large requests")
# just large enough that nghttp2 will submit
password = 'x' * (47 * 1024)
fdata = os.path.join(env.gen_dir, 'data-10m')
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
'--basic', '--user', f'test:{password}',
'--trace-config', 'http/2,http/3'
])
# but apache denies on length limit
r.check_response(http_status=431)
# PUT data, basic auth with very large pw
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_14_06_basic_very_large_pw(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if proto == 'h3' and env.curl_uses_lib('quiche'):
# See <https://github.com/cloudflare/quiche/issues/1573>
pytest.skip("quiche has problems with large requests")
password = 'x' * (64 * 1024)
fdata = os.path.join(env.gen_dir, 'data-10m')
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
'--basic', '--user', f'test:{password}'
])
# Depending on protocol, we might have an error sending or
# the server might shutdown the connection and we see the error
# on receiving
assert r.exit_code in [55, 56, 95], f'{r.dump_logs()}'
+115
View File
@@ -0,0 +1,115 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import re
import pytest
from testenv import Env
from testenv import CurlClient
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
class TestTracing:
# default verbose output
def test_15_01_trace_defaults(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url, def_tracing=False, extra_args=[
'-v'
])
r.check_response(http_status=200)
trace = r.trace_lines
assert len(trace) > 0
# trace ids
def test_15_02_trace_ids(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url, def_tracing=False, extra_args=[
'-v', '--trace-config', 'ids'
])
r.check_response(http_status=200)
for line in r.trace_lines:
m = re.match(r'^\[0-[0x]] .+', line)
if m is None:
assert False, f'no match: {line}'
# trace ids+time
def test_15_03_trace_ids_time(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url, def_tracing=False, extra_args=[
'-v', '--trace-config', 'ids,time'
])
r.check_response(http_status=200)
for line in r.trace_lines:
m = re.match(r'^([0-9:.]+) \[0-[0x]] .+', line)
if m is None:
assert False, f'no match: {line}'
# trace all
def test_15_04_trace_all(self, env: Env, httpd):
if not env.curl_is_verbose():
pytest.skip('only works for curl with verbose strings')
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url, def_tracing=False, extra_args=[
'-v', '--trace-config', 'all'
])
r.check_response(http_status=200)
found_tcp = False
for line in r.trace_lines:
m = re.match(r'^([0-9:.]+) \[0-[0x]] .+', line)
if m is None:
assert False, f'no match: {line}'
m = re.match(r'^([0-9:.]+) \[0-[0x]] .+ \[TCP].+', line)
if m is not None:
found_tcp = True
assert found_tcp, f'TCP filter does not appear in trace "all": {r.stderr}'
# trace all, no TCP, no time
def test_15_05_trace_all(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url, def_tracing=False, extra_args=[
'-v', '--trace-config', 'all,-tcp,-time'
])
r.check_response(http_status=200)
found_tcp = False
for line in r.trace_lines:
m = re.match(r'^\[0-[0x]] .+', line)
if m is None:
assert False, f'no match: {line}'
m = re.match(r'^\[0-[0x]] . \[TCP].+', line)
if m is not None:
found_tcp = True
if found_tcp:
assert False, f'TCP filter appears in trace "all,-tcp": {r.stderr}'
+177
View File
@@ -0,0 +1,177 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import pytest
from testenv import Env, CurlClient
log = logging.getLogger(__name__)
class TestInfo:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd):
indir = httpd.docs_dir
env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
# download plain file
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_16_01_info_download(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
r.check_stats(count=count, http_status=200, exitcode=0,
remote_port=env.port_for(alpn_proto=proto),
remote_ip='127.0.0.1')
for idx, s in enumerate(r.stats):
self.check_stat(idx, s, r, dl_size=30, ul_size=0)
# download plain file with a 302 redirect
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_16_02_info_302_download(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json.302?[0-{count-1}]'
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True, extra_args=[
'--location'
])
r.check_stats(count=count, http_status=200, exitcode=0,
remote_port=env.port_for(alpn_proto=proto),
remote_ip='127.0.0.1')
for idx, s in enumerate(r.stats):
self.check_stat(idx, s, r, dl_size=30, ul_size=0)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_16_03_info_upload(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
fdata = os.path.join(env.gen_dir, 'data-100k')
fsize = 100 * 1024
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
with_headers=True, extra_args=[
'--trace-config', 'http/2,http/3'
])
r.check_response(count=count, http_status=200)
r.check_stats(count=count, http_status=200, exitcode=0,
remote_port=env.port_for(alpn_proto=proto),
remote_ip='127.0.0.1')
for idx, s in enumerate(r.stats):
self.check_stat(idx, s, r, dl_size=fsize, ul_size=fsize)
# download plain file via http: ('time_appconnect' is 0)
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_16_04_info_http_download(self, env: Env, httpd, nghttpx, proto):
count = 2
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
r.check_stats(count=count, http_status=200, exitcode=0,
remote_port=env.http_port, remote_ip='127.0.0.1')
for idx, s in enumerate(r.stats):
self.check_stat(idx, s, r, dl_size=30, ul_size=0)
def check_stat(self, idx, s, r, dl_size=None, ul_size=None):
self.check_stat_times(s, idx)
# we always send something
self.check_stat_positive(s, idx, 'size_request')
# we always receive response headers
self.check_stat_positive(s, idx, 'size_header')
if ul_size is not None:
assert s['size_upload'] == ul_size, f'stat #{idx}\n{r.dump_logs()}' # the file we sent
assert s['size_request'] >= s['size_upload'], \
f'stat #{idx}, "size_request" smaller than "size_upload", {s}\n{r.dump_logs()}'
if dl_size is not None:
assert s['size_download'] == dl_size, f'stat #{idx}\n{r.dump_logs()}' # the file we received
def check_stat_positive(self, s, idx, key):
assert key in s, f'stat #{idx} "{key}" missing: {s}'
assert s[key] > 0, f'stat #{idx} "{key}" not positive: {s}'
def check_stat_positive_or_0(self, s, idx, key):
assert key in s, f'stat #{idx} "{key}" missing: {s}'
assert s[key] >= 0, f'stat #{idx} "{key}" not positive: {s}'
def check_stat_zero(self, s, key):
assert key in s, f'stat "{key}" missing: {s}'
assert s[key] == 0, f'stat "{key}" not zero: {s}'
def check_stat_times(self, s, idx):
# check timings reported on a transfer for consistency
url = s['url_effective']
# connect time is sometimes reported as 0 by openssl-quic (sigh)
self.check_stat_positive_or_0(s, idx, 'time_connect')
# all stat keys which reporting timings
all_keys = {
'time_appconnect', 'time_redirect',
'time_pretransfer', 'time_starttransfer', 'time_total'
}
# stat keys where we expect a positive value
pos_keys = {'time_pretransfer', 'time_starttransfer', 'time_total', 'time_queue'}
if s['num_connects'] > 0:
if url.startswith('https:'):
pos_keys.add('time_appconnect')
if s['num_redirects'] > 0:
pos_keys.add('time_redirect')
zero_keys = all_keys - pos_keys
# assert all zeros are zeros and the others are positive
for key in zero_keys:
self.check_stat_zero(s, key)
for key in pos_keys:
self.check_stat_positive(s, idx, key)
# assert that all timers before "time_pretransfer" are less or equal
for key in ['time_appconnect', 'time_connect', 'time_namelookup']:
assert s[key] < s['time_pretransfer'], f'time "{key}" larger than' \
f'"time_pretransfer": {s}'
# assert transfer total is after pretransfer.
# (in MOST situations, pretransfer is before starttransfer, BUT
# in protocols like HTTP we might get a server response already before
# we transition to multi state DID.)
assert s['time_pretransfer'] <= s['time_total'], f'"time_pretransfer" '\
f'greater than "time_total", {s}'
# assert that transfer start is before total
assert s['time_starttransfer'] <= s['time_total'], f'"time_starttransfer" '\
f'greater than "time_total", {s}'
if s['num_redirects'] > 0:
assert s['time_queue'] < s['time_starttransfer'], f'"time_queue" '\
f'greater/equal than "time_starttransfer", {s}'
else:
assert s['time_queue'] <= s['time_starttransfer'], f'"time_queue" '\
f'greater than "time_starttransfer", {s}'
+598
View File
@@ -0,0 +1,598 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import json
import logging
import os
import re
import pytest
from testenv import Env, CurlClient, LocalClient
log = logging.getLogger(__name__)
class TLSDefs:
TLS_VERSIONS = ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3']
TLS_VERSION_IDS = {
'TLSv1': 0x301,
'TLSv1.1': 0x302,
'TLSv1.2': 0x303,
'TLSv1.3': 0x304
}
CURL_ARG_MIN_VERSION_ID = {
'none': 0x0,
'tlsv1': 0x301,
'tlsv1.0': 0x301,
'tlsv1.1': 0x302,
'tlsv1.2': 0x303,
'tlsv1.3': 0x304,
}
CURL_ARG_MAX_VERSION_ID = {
'none': 0x0,
'1.0': 0x301,
'1.1': 0x302,
'1.2': 0x303,
'1.3': 0x304,
}
class TestSSLUse:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd, nghttpx):
env.make_data_file(indir=httpd.docs_dir, fname="data-10k", fsize=10*1024)
def test_17_01_sslinfo_plain(self, env: Env, httpd):
proto = 'http/1.1'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto)
assert r.json['HTTPS'] == 'on', f'{r.json}'
assert 'SSL_SESSION_ID' in r.json, f'{r.json}'
assert 'SSL_SESSION_RESUMED' in r.json, f'{r.json}'
assert r.json['SSL_SESSION_RESUMED'] == 'Initial', f'{r.json}'
@pytest.mark.parametrize("tls_max", ['1.2', '1.3'])
def test_17_02_sslinfo_reconnect(self, env: Env, tls_max, httpd):
proto = 'http/1.1'
count = 3
exp_resumed = 'Resumed'
xargs = ['--sessionid', '--tls-max', tls_max, f'--tlsv{tls_max}']
if env.curl_uses_lib('libressl'):
if tls_max == '1.3':
exp_resumed = 'Initial' # 1.2 works in LibreSSL, but 1.3 does not, TODO
if env.curl_uses_lib('rustls-ffi'):
exp_resumed = 'Initial' # Rustls does not support sessions, TODO
if env.curl_uses_lib('mbedtls') and tls_max == '1.3' and \
not env.curl_lib_version_at_least('mbedtls', '3.6.0'):
pytest.skip('mbedtls TLSv1.3 session resume not working in 3.6.0')
run_env = os.environ.copy()
run_env['CURL_DEBUG'] = 'ssl'
curl = CurlClient(env=env, run_env=run_env)
# tell the server to close the connection after each request
urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo?'\
f'id=[0-{count-1}]&close'
r = curl.http_download(urls=[urln], alpn_proto=proto, with_stats=True,
extra_args=xargs)
r.check_response(count=count, http_status=200)
# should have used one connection for each request, sessions after
# first should have been resumed
assert r.total_connects == count, r.dump_logs()
for i in range(count):
dfile = curl.download_file(i)
assert os.path.exists(dfile)
with open(dfile) as f:
djson = json.load(f)
assert djson['HTTPS'] == 'on', f'{i}: {djson}'
if i == 0:
assert djson['SSL_SESSION_RESUMED'] == 'Initial', f'{i}: {djson}\n{r.dump_logs()}'
else:
assert djson['SSL_SESSION_RESUMED'] == exp_resumed, f'{i}: {djson}\n{r.dump_logs()}'
# use host name with trailing dot, verify handshake
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_03_trailing_dot(self, env: Env, proto, httpd, nghttpx):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
domain = f'{env.domain1}.'
url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto)
assert r.exit_code == 0, f'{r}'
assert r.json, f'{r}'
if proto != 'h3': # we proxy h3
# the SNI the server received is without trailing dot
assert r.json['SSL_TLS_SNI'] == env.domain1, f'{r.json}'
# use host name with double trailing dot, verify handshake
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_04_double_dot(self, env: Env, proto, httpd, nghttpx):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
domain = f'{env.domain1}..'
url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
'-H', f'Host: {env.domain1}',
])
if r.exit_code == 0:
assert r.json, f'{r.stdout}'
# the SNI the server received is without trailing dot
if proto != 'h3': # we proxy h3
assert r.json['SSL_TLS_SNI'] == env.domain1, f'{r.json}'
assert False, f'should not have succeeded: {r.json}'
# 7 - Rustls rejects a servername with .. during setup
# 35 - LibreSSL rejects setting an SNI name with trailing dot
# 60 - peer name matching failed against certificate
assert r.exit_code in [7, 35, 60], f'{r}'
# use ip address for connect
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_05_good_ip_addr(self, env: Env, proto, httpd, nghttpx):
if env.curl_uses_lib('mbedtls'):
pytest.skip("mbedTLS does use IP addresses in SNI")
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
domain = '127.0.0.1'
url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto)
assert r.exit_code == 0, f'{r}'
assert r.json, f'{r}'
if proto != 'h3': # we proxy h3
# the SNI should not have been used
assert 'SSL_TLS_SNI' not in r.json, f'{r.json}'
# use IP address that is not in cert
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_05_bad_ip_addr(self, env: Env, proto,
httpd, configures_httpd,
nghttpx, configures_nghttpx):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
httpd.set_domain1_cred_name('domain1-no-ip')
httpd.reload_if_config_changed()
if proto == 'h3':
nghttpx.set_cred_name('domain1-no-ip')
nghttpx.reload_if_config_changed()
curl = CurlClient(env=env)
url = f'https://127.0.0.1:{env.port_for(proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto)
assert r.exit_code == 60, f'{r}'
# use IP address that is in cert as DNS name (not really legal)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_05_very_bad_ip_addr(self, env: Env, proto,
httpd, configures_httpd,
nghttpx, configures_nghttpx):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if env.curl_uses_lib('mbedtls'):
pytest.skip("mbedtls falsely verifies a DNS: altname as IP address")
if env.curl_uses_lib('wolfssl'):
pytest.skip("wolfSSL falsely verifies a DNS: altname as IP address")
httpd.set_domain1_cred_name('domain1-very-bad')
httpd.reload_if_config_changed()
if proto == 'h3':
nghttpx.set_cred_name('domain1-very-bad')
nghttpx.reload_if_config_changed()
curl = CurlClient(env=env)
url = f'https://127.0.0.1:{env.port_for(proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto)
assert r.exit_code == 60, f'{r}'
# use localhost for connect
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_06_localhost(self, env: Env, proto, httpd, nghttpx):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
domain = 'localhost'
url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto)
assert r.exit_code == 0, f'{r}'
assert r.json, f'{r}'
if proto != 'h3': # we proxy h3
assert r.json['SSL_TLS_SNI'] == domain, f'{r.json}'
@staticmethod
def gen_test_17_07_list():
tls13_tests = [
['def', None, True],
['AES128SHA256', ['TLS_AES_128_GCM_SHA256'], True],
['AES128SHA384', ['TLS_AES_256_GCM_SHA384'], False],
['CHACHA20SHA256', ['TLS_CHACHA20_POLY1305_SHA256'], True],
['AES128SHA384+CHACHA20SHA256', ['TLS_AES_256_GCM_SHA384', 'TLS_CHACHA20_POLY1305_SHA256'], True],
]
tls12_tests = [
['def', None, True],
['AES128ish', ['ECDHE-ECDSA-AES128-GCM-SHA256', 'ECDHE-RSA-AES128-GCM-SHA256'], True],
['AES256ish', ['ECDHE-ECDSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES256-GCM-SHA384'], False],
['CHACHA20ish', ['ECDHE-ECDSA-CHACHA20-POLY1305', 'ECDHE-RSA-CHACHA20-POLY1305'], True],
['AES256ish+CHACHA20ish', ['ECDHE-ECDSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES256-GCM-SHA384',
'ECDHE-ECDSA-CHACHA20-POLY1305', 'ECDHE-RSA-CHACHA20-POLY1305'], True],
]
ret = []
for tls_id, tls_proto in {
'TLSv1.2+3': 'TLSv1.3 +TLSv1.2',
'TLSv1.3': 'TLSv1.3',
'TLSv1.2': 'TLSv1.2'}.items():
for [cid13, ciphers13, succeed13] in tls13_tests:
for [cid12, ciphers12, succeed12] in tls12_tests:
id = f'{tls_id}-{cid13}-{cid12}'
ret.append(pytest.param(tls_proto, ciphers13, ciphers12, succeed13, succeed12, id=id))
return ret
@pytest.mark.parametrize(
"tls_proto, ciphers13, ciphers12, succeed13, succeed12",
gen_test_17_07_list())
def test_17_07_ssl_ciphers(self, env: Env, httpd, configures_httpd,
tls_proto, ciphers13, ciphers12,
succeed13, succeed12):
# to test setting cipher suites, the AES 256 ciphers are disabled in the test server
httpd.set_extra_config('base', [
'SSLCipherSuite SSL'
' ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256'
':ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305',
'SSLCipherSuite TLSv1.3'
' TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256',
f'SSLProtocol {tls_proto}'
])
httpd.reload_if_config_changed()
proto = 'http/1.1'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
# SSL backend specifics
if env.curl_uses_lib('gnutls'):
pytest.skip('GnuTLS does not support setting ciphers')
elif env.curl_uses_lib('boringssl'):
if ciphers13 is not None:
pytest.skip('BoringSSL does not support setting TLSv1.3 ciphers')
elif env.curl_uses_lib('schannel'): # not in CI, so untested
if ciphers12 is not None:
pytest.skip('Schannel does not support setting TLSv1.2 ciphers by name')
elif env.curl_uses_lib('mbedtls') and not env.curl_lib_version_at_least('mbedtls', '3.6.0'):
if tls_proto == 'TLSv1.3':
pytest.skip('mbedTLS < 3.6.0 does not support TLSv1.3')
# test
extra_args = ['--tls13-ciphers', ':'.join(ciphers13)] if ciphers13 else []
extra_args += ['--ciphers', ':'.join(ciphers12)] if ciphers12 else []
r = curl.http_get(url=url, alpn_proto=proto, extra_args=extra_args)
if tls_proto != 'TLSv1.2' and succeed13:
assert r.exit_code == 0, r.dump_logs()
assert r.json['HTTPS'] == 'on', r.dump_logs()
assert r.json['SSL_PROTOCOL'] == 'TLSv1.3', r.dump_logs()
assert ciphers13 is None or r.json['SSL_CIPHER'] in ciphers13, r.dump_logs()
elif tls_proto == 'TLSv1.2' and succeed12:
assert r.exit_code == 0, r.dump_logs()
assert r.json['HTTPS'] == 'on', r.dump_logs()
assert r.json['SSL_PROTOCOL'] == 'TLSv1.2', r.dump_logs()
assert ciphers12 is None or r.json['SSL_CIPHER'] in ciphers12, r.dump_logs()
else:
assert r.exit_code != 0, r.dump_logs()
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_08_cert_status(self, env: Env, proto, httpd, nghttpx):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if not env.curl_uses_lib('openssl') and \
not env.curl_uses_lib('gnutls') and \
not env.curl_uses_lib('quictls'):
pytest.skip("TLS library does not support --cert-status")
curl = CurlClient(env=env)
domain = 'localhost'
url = f'https://{env.authority_for(domain, proto)}/'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
'--cert-status'
])
# CURLE_SSL_INVALIDCERTSTATUS, our certs have no OCSP info
assert r.exit_code == 91, f'{r}'
@staticmethod
def gen_test_17_09_list():
return [
[server_tls, min_arg, max_arg]
for server_tls in TLSDefs.TLS_VERSIONS
for min_arg in TLSDefs.CURL_ARG_MIN_VERSION_ID
for max_arg in TLSDefs.CURL_ARG_MAX_VERSION_ID
]
@pytest.mark.parametrize("server_tls, min_arg, max_arg", gen_test_17_09_list())
def test_17_09_ssl_min_max(self, env: Env, httpd, configures_httpd, server_tls, min_arg, max_arg):
# We test if curl using min/max versions arguments (and defaults) can connect
# to a server using 'server_tls' version only
httpd.set_extra_config('base', [
f'SSLProtocol {server_tls}',
'SSLCipherSuite ALL:@SECLEVEL=0',
])
httpd.reload_if_config_changed()
# curl's TLS backend supported version
if env.curl_uses_lib('gnutls') or \
env.curl_uses_lib('quiche') or \
env.curl_uses_lib('aws-lc') or \
env.curl_uses_lib('boringssl'):
curl_supported = [0x301, 0x302, 0x303, 0x304]
elif env.curl_uses_lib('openssl') and \
env.curl_lib_version_before('openssl', '3.0.0'):
curl_supported = [0x301, 0x302, 0x303, 0x304]
else: # most SSL backends dropped support for TLSv1.0, TLSv1.1
curl_supported = [0x303, 0x304]
extra_args = ['--trace-config', 'ssl']
# determine effective min/max version used by curl with these args
if max_arg != 'none':
extra_args.extend(['--tls-max', max_arg])
curl_max_ver = TLSDefs.CURL_ARG_MAX_VERSION_ID[max_arg]
else:
curl_max_ver = max(TLSDefs.TLS_VERSION_IDS.values())
if min_arg != 'none':
extra_args.append(f'--{min_arg}')
curl_min_ver = TLSDefs.CURL_ARG_MIN_VERSION_ID[min_arg]
else:
curl_min_ver = min(0x303, curl_max_ver) # TLSv1.2 is the default now
# collect all versions that curl is allowed with this command lines and supports
curl_allowed = [tid for tid in sorted(TLSDefs.TLS_VERSION_IDS.values())
if curl_min_ver <= tid <= curl_max_ver and
tid in curl_supported]
# we expect a successful transfer, when the server TLS version is allowed
server_ver = TLSDefs.TLS_VERSION_IDS[server_tls]
# do the transfer
proto = 'http/1.1'
run_env = os.environ.copy()
if env.curl_uses_lib('gnutls'):
# we need to override any default system configuration since
# we want to test all protocol versions. Ubuntu (or the GH image)
# disable TSL1.0 and TLS1.1 system wide. We do not want.
our_config = os.path.join(env.gen_dir, 'gnutls_config')
if not os.path.exists(our_config):
with open(our_config, 'w') as fd:
fd.write('# empty\n')
run_env['GNUTLS_SYSTEM_PRIORITY_FILE'] = our_config
curl = CurlClient(env=env, run_env=run_env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=extra_args)
if server_ver in curl_allowed:
assert r.exit_code == 0, f'should succeed, server={server_ver:04x}, curl=[{curl_min_ver:04x}, {curl_max_ver:04x}], allowed={curl_allowed}\n{r.dump_logs()}'
assert r.json['HTTPS'] == 'on', r.dump_logs()
assert r.json['SSL_PROTOCOL'] == server_tls, r.dump_logs()
else:
assert r.exit_code != 0, f'should fail, server={server_ver:04x}, curl=[{curl_min_ver:04x}, {curl_max_ver:04x}]\n{r.dump_logs()}'
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_17_10_h3_session_reuse(self, env: Env, httpd, nghttpx):
if not env.have_h3():
pytest.skip("h3 not supported")
if not env.curl_uses_lib('quictls') and \
not (env.curl_uses_lib('openssl') and env.curl_uses_lib('ngtcp2')) and \
not env.curl_uses_lib('gnutls') and \
not env.curl_uses_lib('wolfssl'):
pytest.skip("QUIC session reuse not implemented")
count = 2
docname = 'data-10k'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}',
'-f', # forbid reuse of connections
'-r', f'{env.domain1}:{env.port_for("h3")}:127.0.0.1',
'-V', 'h3', url
])
r.check_exit_code(0)
# check that TLS session was reused as expected
reused_session = False
for line in r.trace_lines:
if re.match(r'.*\[1-1] (\* )?SSL reusing session.*', line):
reused_session = True
assert reused_session, f'{r}\n{r.dump_logs()}'
# use host name server has no certificate for
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_11_wrong_host(self, env: Env, proto, httpd, nghttpx):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
domain = f'insecure.{env.tld}'
url = f'https://{domain}:{env.port_for(proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto)
assert r.exit_code == 60, f'{r}'
# use host name server has no cert for with --insecure
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_12_insecure(self, env: Env, proto, httpd, nghttpx):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
domain = f'insecure.{env.tld}'
url = f'https://{domain}:{env.port_for(proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
'--insecure'
])
assert r.exit_code == 0, f'{r}'
assert r.json, f'{r}'
# connect to an expired certificate
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_17_14_expired_cert(self, env: Env, proto, httpd):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
url = f'https://{env.expired_domain}:{env.port_for(proto)}/'
r = curl.http_get(url=url, alpn_proto=proto)
assert r.exit_code == 60, f'{r}' # peer failed verification
exp_trace = None
match_trace = None
if env.curl_uses_lib('openssl') or env.curl_uses_lib('quictls'):
exp_trace = r'.*SSL certificate OpenSSL verify result: certificate has expired.*$'
elif env.curl_uses_lib('gnutls'):
exp_trace = r'.*SSL certificate verification failed: certificate has expired\..*'
elif env.curl_uses_lib('wolfssl'):
exp_trace = r'.*server verification failed: certificate has expired\.$'
if exp_trace is not None:
for line in r.trace_lines:
if re.match(exp_trace, line):
match_trace = line
break
assert match_trace, f'Did not find "{exp_trace}" in trace\n{r.dump_logs()}'
@pytest.mark.skipif(condition=not Env.curl_has_feature('SSLS-EXPORT'),
reason='curl lacks SSL session export support')
def test_17_15_session_export(self, env: Env, httpd):
proto = 'http/1.1'
if env.curl_uses_lib('libressl'):
pytest.skip('Libressl resumption does not work inTLSv1.3')
if env.curl_uses_lib('rustls-ffi'):
pytest.skip('rustsls does not expose sessions')
if env.curl_uses_lib('mbedtls') and \
not env.curl_lib_version_at_least('mbedtls', '3.6.0'):
pytest.skip('mbedtls TLSv1.3 session resume not working before 3.6.0')
run_env = os.environ.copy()
run_env['CURL_DEBUG'] = 'ssl,ssls'
# clean session file first, then reuse
session_file = os.path.join(env.gen_dir, 'test_17_15.sessions')
if os.path.exists(session_file):
return os.remove(session_file)
xargs = ['--tls-max', '1.3', '--tlsv1.3', '--ssl-sessions', session_file]
curl = CurlClient(env=env, run_env=run_env)
# tell the server to close the connection after each request
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=xargs)
assert r.exit_code == 0, f'{r}'
assert r.json['HTTPS'] == 'on', f'{r.json}'
assert r.json['SSL_SESSION_RESUMED'] == 'Initial', f'{r.json}\n{r.dump_logs()}'
# ok, run again, sessions should be imported
run_dir2 = os.path.join(env.gen_dir, 'curl2')
curl = CurlClient(env=env, run_env=run_env, run_dir=run_dir2)
r = curl.http_get(url=url, alpn_proto=proto, extra_args=xargs)
assert r.exit_code == 0, f'{r}'
assert r.json['SSL_SESSION_RESUMED'] == 'Resumed', f'{r.json}\n{r.dump_logs()}'
# verify the ciphers are ignored when talking TLSv1.3 only
# see issue #16232
def test_17_16_h3_ignore_ciphers12(self, env: Env, httpd, nghttpx):
proto = 'h3'
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if env.curl_uses_lib('gnutls'):
pytest.skip("gnutls does not ignore --ciphers on TLSv1.3")
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
'--ciphers', 'NONSENSE'
])
assert r.exit_code == 0, f'{r}'
def test_17_17_h1_ignore_ciphers13(self, env: Env, httpd):
proto = 'http/1.1'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
'--tls13-ciphers', 'NONSENSE', '--tls-max', '1.2'
])
assert r.exit_code == 0, f'{r}'
@pytest.mark.parametrize("priority, tls_proto, ciphers, success", [
pytest.param("", "", [], False, id='prio-empty'),
pytest.param("NONSENSE", "", [], False, id='nonsense'),
pytest.param("+NONSENSE", "", [], False, id='+nonsense'),
pytest.param("NORMAL:-VERS-ALL:+VERS-TLS1.2", "TLSv1.2", ['ECDHE-RSA-CHACHA20-POLY1305'], True, id='TLSv1.2-normal-only'),
pytest.param("-VERS-ALL:+VERS-TLS1.2", "TLSv1.2", ['ECDHE-RSA-CHACHA20-POLY1305'], True, id='TLSv1.2-only'),
pytest.param("NORMAL", "TLSv1.3", ['TLS_CHACHA20_POLY1305_SHA256'], True, id='TLSv1.3-normal'),
pytest.param("NORMAL:-VERS-ALL:+VERS-TLS1.3", "TLSv1.3", ['TLS_CHACHA20_POLY1305_SHA256'], True, id='TLSv1.3-normal-only'),
pytest.param("-VERS-ALL:+VERS-TLS1.3", "TLSv1.3", ['TLS_CHACHA20_POLY1305_SHA256'], True, id='TLSv1.3-only'),
pytest.param("!CHACHA20-POLY1305", "TLSv1.3", ['TLS_AES_128_GCM_SHA256'], True, id='TLSv1.3-no-chacha'),
pytest.param("-CIPHER-ALL:+CHACHA20-POLY1305", "TLSv1.3", ['TLS_CHACHA20_POLY1305_SHA256'], True, id='TLSv1.3-only-chacha'),
pytest.param("-CIPHER-ALL:+AES-256-GCM", "", [], False, id='only-AES256'),
pytest.param("-CIPHER-ALL:+AES-128-GCM", "TLSv1.3", ['TLS_AES_128_GCM_SHA256'], True, id='TLSv1.3-only-AES128'),
pytest.param("SECURE:-CIPHER-ALL:+AES-128-GCM:-VERS-ALL:+VERS-TLS1.2", "TLSv1.2", ['ECDHE-RSA-AES128-GCM-SHA256'], True, id='TLSv1.2-secure'),
pytest.param("-MAC-ALL:+SHA256", "", [], False, id='MAC-only-SHA256'),
pytest.param("-MAC-ALL:+AEAD", "TLSv1.3", ['TLS_CHACHA20_POLY1305_SHA256'], True, id='TLSv1.3-MAC-only-AEAD'),
pytest.param("-GROUP-ALL:+GROUP-X25519", "TLSv1.3", ['TLS_CHACHA20_POLY1305_SHA256'], True, id='TLSv1.3-group-only-X25519'),
pytest.param("-GROUP-ALL:+GROUP-SECP192R1", "", [], False, id='group-only-SECP192R1'),
])
def test_17_18_gnutls_priority(self, env: Env, httpd, configures_httpd, priority, tls_proto, ciphers, success):
# to test setting cipher suites, the AES 256 ciphers are disabled in the test server
httpd.set_extra_config('base', [
'SSLCipherSuite SSL'
' ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256'
':ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305',
'SSLCipherSuite TLSv1.3'
' TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256',
])
httpd.reload_if_config_changed()
proto = 'http/1.1'
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
# SSL backend specifics
if not env.curl_uses_lib('gnutls'):
pytest.skip('curl not build with GnuTLS')
# test
extra_args = ['--ciphers', f'{priority}']
r = curl.http_get(url=url, alpn_proto=proto, extra_args=extra_args)
if success:
assert r.exit_code == 0, r.dump_logs()
assert r.json['HTTPS'] == 'on', r.dump_logs()
if tls_proto:
assert r.json['SSL_PROTOCOL'] == tls_proto, r.dump_logs()
assert r.json['SSL_CIPHER'] in ciphers, r.dump_logs()
else:
assert r.exit_code != 0, r.dump_logs()
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_19_wrong_pin(self, env: Env, proto, httpd):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if env.curl_uses_lib('rustls-ffi'):
pytest.skip('TLS backend ignores --pinnedpubkey')
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
'--pinnedpubkey', 'sha256//ffff'
])
# expect NOT_IMPLEMENTED or CURLE_SSL_PINNEDPUBKEYNOTMATCH
assert r.exit_code in [2, 90], f'{r.dump_logs()}'
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_17_20_correct_pin(self, env: Env, proto, httpd):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
creds = env.get_credentials(env.domain1)
assert creds
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
'--pinnedpubkey', f'sha256//{creds.pub_sha256_b64()}'
])
# expect NOT_IMPLEMENTED or OK
assert r.exit_code in [0, 2], f'{r.dump_logs()}'
+67
View File
@@ -0,0 +1,67 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import pytest
from testenv import Env, CurlClient
log = logging.getLogger(__name__)
class TestMethods:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd, nghttpx):
indir = httpd.docs_dir
env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
# download 1 file
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_18_01_delete(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]'
r = curl.http_delete(urls=[url], alpn_proto=proto)
r.check_stats(count=count, http_status=204, exitcode=0)
# make HTTP/2 in the server send
# - HEADER frame with 204 and eos=0
# - 10ms later DATA frame length=0 and eos=1
# should be accepted
def test_18_02_delete_h2_special(self, env: Env, httpd, nghttpx):
proto = 'h2'
count = 1
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]'\
'&chunks=1&chunk_size=0&chunk_delay=10ms'
r = curl.http_delete(urls=[url], alpn_proto=proto)
r.check_stats(count=count, http_status=204, exitcode=0)
+214
View File
@@ -0,0 +1,214 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import re
import pytest
from testenv import Env, CurlClient, LocalClient
log = logging.getLogger(__name__)
class TestShutdown:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd):
indir = httpd.docs_dir
env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
# check with `tcpdump` that we see curl TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_19_01_check_tcp_rst(self, env: Env, httpd, proto):
if env.ci_run:
pytest.skip("seems not to work in CI")
# timing critical, disable trace overrides
run_env = os.environ.copy()
if 'CURL_DEBUG' in run_env:
del run_env['CURL_DEBUG']
curl = CurlClient(env=env, run_env=run_env)
port = env.port_for(alpn_proto=proto)
url = f'https://{env.domain1}:{port}/data.json?[0-1]'
r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[
'--parallel'
])
r.check_response(http_status=200, count=2)
assert r.tcpdump
assert len(r.tcpdump.get_rsts(ports=[port])) != 0, f'Expected TCP RSTs packets: {r.tcpdump.stderr}'
# check with `tcpdump` that we do NOT see TCP RST when CURL_GRACEFUL_SHUTDOWN set
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_19_02_check_shutdown(self, env: Env, httpd, proto):
if not env.curl_is_debug():
pytest.skip('only works for curl debug builds')
run_env = os.environ.copy()
run_env.update({
'CURL_GRACEFUL_SHUTDOWN': '2000',
'CURL_DEBUG': 'ssl,tcp,lib-ids,multi'
})
curl = CurlClient(env=env, run_env=run_env)
port = env.port_for(alpn_proto=proto)
url = f'https://{env.domain1}:{port}/data.json?[0-1]'
r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[
'--parallel'
])
r.check_response(http_status=200, count=2)
assert r.tcpdump
assert len(r.tcpdump.get_rsts(ports=[port])) == 0, 'Unexpected TCP RST packets'
# run downloads where the server closes the connection after each request
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_19_03_shutdown_by_server(self, env: Env, httpd, proto):
if not env.curl_is_debug():
pytest.skip('only works for curl debug builds')
if not env.curl_is_verbose():
pytest.skip('only works for curl with verbose strings')
count = 10
curl = CurlClient(env=env, run_env={
'CURL_GRACEFUL_SHUTDOWN': '2000',
'CURL_DEBUG': 'ssl,multi'
})
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?'\
f'id=[0-{count-1}]&with_cl&close'
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(http_status=200, count=count)
shutdowns = [line for line in r.trace_lines
if re.match(r'.*\[SHUTDOWN] shutdown, done=1', line)]
assert len(shutdowns) == count, f'{shutdowns}'
# run downloads with CURLOPT_FORBID_REUSE set, meaning *we* close
# the connection after each request
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_19_04_shutdown_by_curl(self, env: Env, httpd, proto):
if not env.curl_is_debug():
pytest.skip('only works for curl debug builds')
if not env.curl_is_verbose():
pytest.skip('only works for curl with verbose strings')
count = 10
docname = 'data.json'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env, run_env={
'CURL_GRACEFUL_SHUTDOWN': '2000',
'CURL_DEBUG': 'ssl,multi'
})
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', '-f', '-V', proto, url
])
r.check_exit_code(0)
shutdowns = [line for line in r.trace_lines
if re.match(r'.*SHUTDOWN] shutdown, done=1', line)]
assert len(shutdowns) == count, f'{shutdowns}'
# run event-based downloads with CURLOPT_FORBID_REUSE set, meaning *we* close
# the connection after each request
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_19_05_event_shutdown_by_server(self, env: Env, httpd, proto):
if not env.curl_is_debug():
pytest.skip('only works for curl debug builds')
if not env.curl_is_verbose():
pytest.skip('only works for curl with verbose strings')
count = 10
run_env = os.environ.copy()
# forbid connection reuse to trigger shutdowns after transfer
run_env['CURL_FORBID_REUSE'] = '1'
# make socket receives block 50% of the time to delay shutdown
run_env['CURL_DBG_SOCK_RBLOCK'] = '50'
run_env['CURL_DEBUG'] = 'ssl,multi,lib-ids'
curl = CurlClient(env=env, run_env=run_env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?'\
f'id=[0-{count-1}]&with_cl&'
r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
'--test-event'
])
r.check_response(http_status=200, count=count)
# check that we closed all connections
closings = [line for line in r.trace_lines
if re.match(r'.*SHUTDOWN] (force )?closing', line)]
assert len(closings) == count, f'{closings}'
# check that all connection sockets were removed from event
removes = [line for line in r.trace_lines
if re.match(r'.*socket cb: socket \d+ REMOVED', line)]
assert len(removes) == count, f'{removes}'
# check graceful shutdown on multiplexed http
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_19_06_check_shutdown(self, env: Env, httpd, nghttpx, proto):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
if not env.curl_is_debug():
pytest.skip('only works for curl debug builds')
if not env.curl_is_verbose():
pytest.skip('only works for curl with verbose strings')
curl = CurlClient(env=env, run_env={
'CURL_GRACEFUL_SHUTDOWN': '2000',
'CURL_DEBUG': 'all'
})
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[
'--parallel'
])
r.check_response(http_status=200, count=2)
# check connection cache closings
shutdowns = [line for line in r.trace_lines
if re.match(r'.*SHUTDOWN] shutdown, done=1', line)]
assert len(shutdowns) == 1, f'{shutdowns}'
# run connection pressure, many small transfers, not reusing connections,
# limited total
@pytest.mark.parametrize("proto", ['http/1.1'])
def test_19_07_shutdown_by_curl(self, env: Env, httpd, proto):
if not env.curl_is_debug():
pytest.skip('only works for curl debug builds')
count = 500
docname = 'data.json'
url = f'https://localhost:{env.https_port}/{docname}'
client = LocalClient(name='cli_hx_download', env=env, run_env={
'CURL_GRACEFUL_SHUTDOWN': '2000',
'CURL_DEBUG': 'ssl,multi'
})
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
r = client.run(args=[
'-n', f'{count}', # that many transfers
'-f', # forbid conn reuse
'-m', '10', # max parallel
'-T', '5', # max total conns at a time
'-V', proto,
url
])
r.check_exit_code(0)
shutdowns = [line for line in r.trace_lines
if re.match(r'.*SHUTDOWN] shutdown, done=1', line)]
# we see less clean shutdowns as total limit forces early closes
assert len(shutdowns) < count, f'{shutdowns}'
+209
View File
@@ -0,0 +1,209 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import shutil
import socket
import subprocess
import time
from datetime import datetime, timedelta
from typing import Dict
import pytest
from testenv import Env, CurlClient, LocalClient
from testenv.ports import alloc_ports_and_do
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.curl_has_protocol('ws'),
reason='curl lacks ws protocol support')
class TestWebsockets:
PORT_SPECS = {
'ws': socket.SOCK_STREAM,
}
def check_alive(self, env, port, timeout=Env.SERVER_TIMEOUT):
curl = CurlClient(env=env)
url = f'http://localhost:{port}/'
end = datetime.now() + timedelta(seconds=timeout)
while datetime.now() < end:
r = curl.http_download(urls=[url])
if r.exit_code == 0:
return True
time.sleep(.1)
return False
def _mkpath(self, path):
if not os.path.exists(path):
return os.makedirs(path)
def _rmrf(self, path):
if os.path.exists(path):
return shutil.rmtree(path)
@pytest.fixture(autouse=True, scope='class')
def ws_echo(self, env):
self.run_dir = os.path.join(env.gen_dir, 'ws_echo_server')
err_file = os.path.join(self.run_dir, 'stderr')
self._rmrf(self.run_dir)
self._mkpath(self.run_dir)
self.cmd = os.path.join(env.project_dir,
'tests/http/testenv/ws_echo_server.py')
self.wsproc = None
self.cerr = None
def startup(ports: Dict[str, int]) -> bool:
wargs = [self.cmd, '--port', str(ports['ws'])]
log.info(f'start_ {wargs}')
self.wsproc = subprocess.Popen(args=wargs,
cwd=self.run_dir,
stderr=self.cerr,
stdout=self.cerr)
if self.check_alive(env, ports['ws']):
env.update_ports(ports)
return True
log.error(f'not alive {wargs}')
self.wsproc.terminate()
self.wsproc = None
return False
with open(err_file, 'w') as self.cerr:
assert alloc_ports_and_do(TestWebsockets.PORT_SPECS, startup,
env.gen_root, max_tries=3)
assert self.wsproc
yield
self.wsproc.terminate()
def test_20_01_basic(self, env: Env, ws_echo):
curl = CurlClient(env=env)
url = f'http://localhost:{env.ws_port}/'
r = curl.http_download(urls=[url])
r.check_response(http_status=426)
def test_20_02_pingpong_small(self, env: Env, ws_echo):
payload = 125 * "x"
client = LocalClient(env=env, name='cli_ws_pingpong')
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'ws://localhost:{env.ws_port}/'
r = client.run(args=[url, payload])
r.check_exit_code(0)
# the python websocket server does not like 'large' control frames
def test_20_03_pingpong_too_large(self, env: Env, ws_echo):
payload = 127 * "x"
client = LocalClient(env=env, name='cli_ws_pingpong')
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'ws://localhost:{env.ws_port}/'
r = client.run(args=[url, payload])
r.check_exit_code(100) # CURLE_TOO_LARGE
@pytest.mark.parametrize("model", [
pytest.param(1, id='multi_perform'),
pytest.param(2, id='curl_ws_send+recv'),
])
def test_20_04_data_small(self, env: Env, ws_echo, model):
client = LocalClient(env=env, name='cli_ws_data')
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'ws://localhost:{env.ws_port}/'
r = client.run(args=[f'-{model}', '-m', str(1), '-M', str(10), url])
r.check_exit_code(0)
@pytest.mark.parametrize("model", [
pytest.param(1, id='multi_perform'),
pytest.param(2, id='curl_ws_send+recv'),
])
def test_20_05_data_med(self, env: Env, ws_echo, model):
client = LocalClient(env=env, name='cli_ws_data')
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'ws://localhost:{env.ws_port}/'
r = client.run(args=[f'-{model}', '-m', str(120), '-M', str(130), url])
r.check_exit_code(0)
@pytest.mark.parametrize("model", [
pytest.param(1, id='multi_perform'),
pytest.param(2, id='curl_ws_send+recv'),
])
def test_20_06_data_large(self, env: Env, ws_echo, model):
client = LocalClient(env=env, name='cli_ws_data')
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'ws://localhost:{env.ws_port}/'
r = client.run(args=[f'-{model}', '-m', str(65535 - 5), '-M', str(65535 + 5), url])
r.check_exit_code(0)
@pytest.mark.parametrize("model", [
pytest.param(1, id='multi_perform'),
pytest.param(2, id='curl_ws_send+recv'),
])
def test_20_07_data_large_small_recv(self, env: Env, ws_echo, model):
run_env = os.environ.copy()
run_env['CURL_WS_CHUNK_SIZE'] = '1024'
client = LocalClient(env=env, name='cli_ws_data', run_env=run_env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'ws://localhost:{env.ws_port}/'
r = client.run(args=[f'-{model}', '-m', str(65535 - 5), '-M', str(65535 + 5), url])
r.check_exit_code(0)
# Send large frames and simulate send blocking on 8192 bytes chunks
# Simlates error reported in #15865
@pytest.mark.parametrize("model", [
pytest.param(1, id='multi_perform'),
pytest.param(2, id='curl_ws_send+recv'),
])
def test_20_08_data_very_large(self, env: Env, ws_echo, model):
run_env = os.environ.copy()
run_env['CURL_WS_CHUNK_EAGAIN'] = '8192'
client = LocalClient(env=env, name='cli_ws_data', run_env=run_env)
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'ws://localhost:{env.ws_port}/'
count = 10
large = 20000
r = client.run(args=[f'-{model}', '-c', str(count), '-m', str(large), url])
r.check_exit_code(0)
@pytest.mark.parametrize("model", [
pytest.param(1, id='multi_perform'),
pytest.param(2, id='curl_ws_send+recv'),
])
def test_20_09_data_empty(self, env: Env, ws_echo, model):
client = LocalClient(env=env, name='cli_ws_data')
if not client.exists():
pytest.skip(f'example client not built: {client.name}')
url = f'ws://localhost:{env.ws_port}/'
count = 10
large = 0
r = client.run(args=[f'-{model}', '-c', str(count), '-m', str(large), url])
r.check_exit_code(0)
+251
View File
@@ -0,0 +1,251 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import difflib
import filecmp
import logging
import os
import shutil
import pytest
from testenv import Env, CurlClient, VsFTPD
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.has_vsftpd(), reason="missing vsftpd")
class TestVsFTPD:
@pytest.fixture(autouse=True, scope='class')
def vsftpd(self, env):
vsftpd = VsFTPD(env=env)
assert vsftpd.initial_start()
yield vsftpd
vsftpd.stop()
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
fpath = os.path.join(docs_dir, fname)
data1k = 1024*'x'
flen = 0
with open(fpath, 'w') as fd:
while flen < fsize:
fd.write(data1k)
flen += len(data1k)
return flen
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, vsftpd):
if os.path.exists(vsftpd.docs_dir):
shutil.rmtree(vsftpd.docs_dir)
if not os.path.exists(vsftpd.docs_dir):
os.makedirs(vsftpd.docs_dir)
self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-1k', fsize=1024)
self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-10k', fsize=10*1024)
self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-1m', fsize=1024*1024)
self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-10m', fsize=10*1024*1024)
env.make_data_file(indir=env.gen_dir, fname="upload-1k", fsize=1024)
env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100*1024)
env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024*1024)
def test_30_01_list_dir(self, env: Env, vsftpd: VsFTPD):
curl = CurlClient(env=env)
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_stats(count=1, http_status=226)
lines = open(os.path.join(curl.run_dir, 'download_#1.data')).readlines()
assert len(lines) == 4, f'list: {lines}'
# download 1 file, no SSL
@pytest.mark.parametrize("docname", [
'data-1k', 'data-1m', 'data-10m'
])
def test_30_02_download_1(self, env: Env, vsftpd: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
@pytest.mark.parametrize("docname", [
'data-1k', 'data-1m', 'data-10m'
])
def test_30_03_download_10_serial(self, env: Env, vsftpd: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
count = 10
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
assert r.total_connects == count + 1, 'should reuse the control conn'
@pytest.mark.parametrize("docname", [
'data-1k', 'data-1m', 'data-10m'
])
def test_30_04_download_10_parallel(self, env: Env, vsftpd: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
count = 10
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True, extra_args=[
'--parallel'
])
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
assert r.total_connects > count + 1, 'should have used several control conns'
@pytest.mark.parametrize("docname", [
'upload-1k', 'upload-100k', 'upload-1m'
])
def test_30_05_upload_1(self, env: Env, vsftpd: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpd, docname=docname)
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_30_06_shutdownh_download(self, env: Env, vsftpd: VsFTPD):
docname = 'data-1k'
curl = CurlClient(env=env)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True, with_tcpdump=True)
r.check_stats(count=count, http_status=226)
assert r.tcpdump
# vsftp closes control connection without niceties,
# look only at ports from DATA connection.
data_ports = vsftpd.get_data_ports(r)
assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}'
assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets'
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_30_07_shutdownh_upload(self, env: Env, vsftpd: VsFTPD):
docname = 'upload-1k'
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True)
r.check_stats(count=count, http_status=226)
assert r.tcpdump
# vsftp closes control connection without niceties,
# look only at ports from DATA connection.
data_ports = vsftpd.get_data_ports(r)
assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}'
assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets'
def test_30_08_active_download(self, env: Env, vsftpd: VsFTPD):
docname = 'data-10k'
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True, extra_args=[
'--ftp-port', '127.0.0.1'
])
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
def test_30_09_active_up_file(self, env: Env, vsftpd: VsFTPD):
docname = 'upload-1k'
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, extra_args=[
'--ftp-port', '127.0.0.1'
])
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpd, docname=docname)
def test_30_10_active_up_ascii(self, env: Env, vsftpd: VsFTPD):
docname = 'upload-1k'
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, extra_args=[
'--ftp-port', '127.0.0.1', '--use-ascii'
])
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpd, docname=docname, binary=False)
def test_30_11_download_non_existing(self, env: Env, vsftpd: VsFTPD):
curl = CurlClient(env=env)
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/does-not-exist'
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_exit_code(78)
r.check_stats(count=1, exitcode=78)
def check_downloads(self, client, srcfile: str, count: int,
complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dfile).readlines(),
fromfile=srcfile,
tofile=dfile,
n=1))
assert False, f'download {dfile} differs:\n{diff}'
def check_upload(self, env, vsftpd: VsFTPD, docname, binary=True):
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
assert os.path.exists(srcfile)
assert os.path.exists(dstfile)
if not filecmp.cmp(srcfile, dstfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dstfile).readlines(),
fromfile=srcfile,
tofile=dstfile,
n=1))
assert not binary and len(diff) == 0, f'upload {dstfile} differs:\n{diff}'
+282
View File
@@ -0,0 +1,282 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import difflib
import filecmp
import logging
import os
import shutil
import pytest
from testenv import Env, CurlClient, VsFTPD
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.has_vsftpd(), reason="missing vsftpd")
class TestVsFTPD:
SUPPORTS_SSL = True
@pytest.fixture(autouse=True, scope='class')
def vsftpds(self, env):
if not TestVsFTPD.SUPPORTS_SSL:
pytest.skip('vsftpd does not seem to support SSL')
vsftpds = VsFTPD(env=env, with_ssl=True)
if not vsftpds.initial_start():
vsftpds.stop()
TestVsFTPD.SUPPORTS_SSL = False
pytest.skip('vsftpd does not seem to support SSL')
yield vsftpds
vsftpds.stop()
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
fpath = os.path.join(docs_dir, fname)
data1k = 1024*'x'
flen = 0
with open(fpath, 'w') as fd:
while flen < fsize:
fd.write(data1k)
flen += len(data1k)
return flen
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, vsftpds):
if os.path.exists(vsftpds.docs_dir):
shutil.rmtree(vsftpds.docs_dir)
if not os.path.exists(vsftpds.docs_dir):
os.makedirs(vsftpds.docs_dir)
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-1k', fsize=1024)
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-10k', fsize=10*1024)
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-1m', fsize=1024*1024)
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-10m', fsize=10*1024*1024)
env.make_data_file(indir=env.gen_dir, fname="upload-1k", fsize=1024)
env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100*1024)
env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024*1024)
def test_31_01_list_dir(self, env: Env, vsftpds: VsFTPD):
curl = CurlClient(env=env)
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
r.check_stats(count=1, http_status=226)
lines = open(os.path.join(curl.run_dir, 'download_#1.data')).readlines()
assert len(lines) == 4, f'list: {lines}'
# download 1 file, no SSL
@pytest.mark.parametrize("docname", [
'data-1k', 'data-1m', 'data-10m'
])
def test_31_02_download_1(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
@pytest.mark.parametrize("docname", [
'data-1k', 'data-1m', 'data-10m'
])
def test_31_03_download_10_serial(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
count = 10
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
assert r.total_connects == count + 1, 'should reuse the control conn'
@pytest.mark.parametrize("docname", [
'data-1k', 'data-1m', 'data-10m'
])
def test_31_04_download_10_parallel(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
count = 10
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_ssl_get(urls=[url], with_stats=True, extra_args=[
'--parallel'
])
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
assert r.total_connects > count + 1, 'should have used several control conns'
@pytest.mark.parametrize("docname", [
'upload-1k', 'upload-100k', 'upload-1m'
])
def test_31_05_upload_1(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpds, docname=docname)
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_31_06_shutdownh_download(self, env: Env, vsftpds: VsFTPD):
docname = 'data-1k'
curl = CurlClient(env=env)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_ssl_get(urls=[url], with_stats=True, with_tcpdump=True)
r.check_stats(count=count, http_status=226)
# vsftp closes control connection without niceties,
# look only at ports from DATA connection.
data_ports = vsftpds.get_data_ports(r)
assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}'
assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets'
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_31_07_shutdownh_upload(self, env: Env, vsftpds: VsFTPD):
docname = 'upload-1k'
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True)
r.check_stats(count=count, http_status=226)
# vsftp closes control connection without niceties,
# look only at ports from DATA connection.
data_ports = vsftpds.get_data_ports(r)
assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}'
assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets'
def test_31_08_upload_ascii(self, env: Env, vsftpds: VsFTPD):
docname = 'upload-ascii'
line_length = 21
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
env.make_data_file(indir=env.gen_dir, fname=docname, fsize=100*1024,
line_length=line_length)
srcsize = os.path.getsize(srcfile)
self._rmf(dstfile)
count = 1
curl = CurlClient(env=env)
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True,
extra_args=['--use-ascii'])
r.check_stats(count=count, http_status=226)
# expect the uploaded file to be number of converted newlines larger
dstsize = os.path.getsize(dstfile)
newlines = len(open(srcfile).readlines())
assert (srcsize + newlines) == dstsize, \
f'expected source with {newlines} lines to be that much larger,'\
f'instead srcsize={srcsize}, upload size={dstsize}, diff={dstsize-srcsize}'
def test_31_08_active_download(self, env: Env, vsftpds: VsFTPD):
docname = 'data-10k'
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_ssl_get(urls=[url], with_stats=True, extra_args=[
'--ftp-port', '127.0.0.1'
])
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
def test_31_09_active_upload(self, env: Env, vsftpds: VsFTPD):
docname = 'upload-1k'
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, extra_args=[
'--ftp-port', '127.0.0.1'
])
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpds, docname=docname)
@pytest.mark.parametrize("indata", [
pytest.param('1234567890', id='10-bytes'),
pytest.param('', id='0-bytes'),
])
def test_31_10_upload_stdin(self, env: Env, vsftpds: VsFTPD, indata):
curl = CurlClient(env=env)
docname = "upload_31_10"
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}'
r = curl.ftp_ssl_upload(urls=[url], updata=indata, with_stats=True)
r.check_stats(count=count, http_status=226)
assert os.path.exists(dstfile)
destdata = open(dstfile).readlines()
expdata = [indata] if len(indata) else []
assert expdata == destdata, f'expected: {expdata}, got: {destdata}'
def test_31_11_download_non_existing(self, env: Env, vsftpds: VsFTPD):
curl = CurlClient(env=env)
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/does-not-exist'
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
r.check_exit_code(78)
r.check_stats(count=1, exitcode=78)
def check_downloads(self, client, srcfile: str, count: int,
complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dfile).readlines(),
fromfile=srcfile,
tofile=dfile,
n=1))
assert False, f'download {dfile} differs:\n{diff}'
def check_upload(self, env, vsftpd: VsFTPD, docname):
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
assert os.path.exists(srcfile)
assert os.path.exists(dstfile)
if not filecmp.cmp(srcfile, dstfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dstfile).readlines(),
fromfile=srcfile,
tofile=dstfile,
n=1))
assert False, f'upload {dstfile} differs:\n{diff}'
+294
View File
@@ -0,0 +1,294 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import difflib
import filecmp
import logging
import os
import shutil
import pytest
from testenv import Env, CurlClient, VsFTPD
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.has_vsftpd(), reason="missing vsftpd")
class TestFtpsVsFTPD:
SUPPORTS_SSL = True
@pytest.fixture(autouse=True, scope='class')
def vsftpds(self, env):
if not TestFtpsVsFTPD.SUPPORTS_SSL:
pytest.skip('vsftpd does not seem to support SSL')
vsftpds = VsFTPD(env=env, with_ssl=True, ssl_implicit=True)
if not vsftpds.initial_start():
vsftpds.stop()
TestFtpsVsFTPD.SUPPORTS_SSL = False
pytest.skip('vsftpd does not seem to support SSL')
yield vsftpds
vsftpds.stop()
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
fpath = os.path.join(docs_dir, fname)
data1k = 1024*'x'
flen = 0
with open(fpath, 'w') as fd:
while flen < fsize:
fd.write(data1k)
flen += len(data1k)
return flen
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, vsftpds):
if os.path.exists(vsftpds.docs_dir):
shutil.rmtree(vsftpds.docs_dir)
if not os.path.exists(vsftpds.docs_dir):
os.makedirs(vsftpds.docs_dir)
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-1k', fsize=1024)
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-10k', fsize=10*1024)
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-1m', fsize=1024*1024)
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-10m', fsize=10*1024*1024)
env.make_data_file(indir=env.gen_dir, fname="upload-1k", fsize=1024)
env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100*1024)
env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024*1024)
def test_32_01_list_dir(self, env: Env, vsftpds: VsFTPD):
curl = CurlClient(env=env)
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_stats(count=1, http_status=226)
lines = open(os.path.join(curl.run_dir, 'download_#1.data')).readlines()
assert len(lines) == 4, f'list: {lines}'
# download 1 file, no SSL
@pytest.mark.parametrize("docname", [
'data-1k', 'data-1m', 'data-10m'
])
def test_32_02_download_1(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
count = 1
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
@pytest.mark.parametrize("docname", [
'data-1k', 'data-1m', 'data-10m'
])
def test_32_03_download_10_serial(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
count = 10
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
assert r.total_connects == count + 1, 'should reuse the control conn'
# 2 serial transfers, first with 'ftps://' and second with 'ftp://'
# we want connection reuse in this case
def test_32_03b_ftp_compat_ftps(self, env: Env, vsftpds: VsFTPD):
curl = CurlClient(env=env)
docname = 'data-1k'
count = 2
url1= f'ftps://{env.ftp_domain}:{vsftpds.port}/{docname}'
url2 = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}'
r = curl.ftp_get(urls=[url1, url2], with_stats=True)
r.check_stats(count=count, http_status=226)
assert r.total_connects == count + 1, 'should reuse the control conn'
@pytest.mark.parametrize("docname", [
'data-1k', 'data-1m', 'data-10m'
])
def test_32_04_download_10_parallel(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
count = 10
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True, extra_args=[
'--parallel'
])
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
assert r.total_connects > count + 1, 'should have used several control conns'
@pytest.mark.parametrize("docname", [
'upload-1k', 'upload-100k', 'upload-1m'
])
def test_32_05_upload_1(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpds, docname=docname)
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_32_06_shutdownh_download(self, env: Env, vsftpds: VsFTPD):
docname = 'data-1k'
curl = CurlClient(env=env)
count = 1
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True, with_tcpdump=True)
r.check_stats(count=count, http_status=226)
# vsftp closes control connection without niceties,
# look only at ports from DATA connection.
data_ports = vsftpds.get_data_ports(r)
assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}'
assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets'
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
@pytest.mark.skipif(condition=not Env.curl_is_debug(), reason="needs curl debug")
@pytest.mark.skipif(condition=not Env.curl_is_verbose(), reason="needs curl verbose strings")
def test_32_07_shutdownh_upload(self, env: Env, vsftpds: VsFTPD):
docname = 'upload-1k'
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True)
r.check_stats(count=count, http_status=226)
# vsftp closes control connection without niceties,
# look only at ports from DATA connection.
data_ports = vsftpds.get_data_ports(r)
assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}'
assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets'
def test_32_08_upload_ascii(self, env: Env, vsftpds: VsFTPD):
docname = 'upload-ascii'
line_length = 21
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
env.make_data_file(indir=env.gen_dir, fname=docname, fsize=100*1024,
line_length=line_length)
srcsize = os.path.getsize(srcfile)
self._rmf(dstfile)
count = 1
curl = CurlClient(env=env)
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True,
extra_args=['--use-ascii'])
r.check_stats(count=count, http_status=226)
# expect the uploaded file to be number of converted newlines larger
dstsize = os.path.getsize(dstfile)
newlines = len(open(srcfile).readlines())
assert (srcsize + newlines) == dstsize, \
f'expected source with {newlines} lines to be that much larger,'\
f'instead srcsize={srcsize}, upload size={dstsize}, diff={dstsize-srcsize}'
def test_32_08_active_download(self, env: Env, vsftpds: VsFTPD):
docname = 'data-10k'
curl = CurlClient(env=env)
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
count = 1
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
r = curl.ftp_get(urls=[url], with_stats=True, extra_args=[
'--ftp-port', '127.0.0.1'
])
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
def test_32_09_active_upload(self, env: Env, vsftpds: VsFTPD):
docname = 'upload-1k'
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/'
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, extra_args=[
'--ftp-port', '127.0.0.1'
])
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpds, docname=docname)
@pytest.mark.parametrize("indata", [
pytest.param('1234567890', id='10-bytes'),
pytest.param('', id='0-bytes'),
])
def test_32_10_upload_stdin(self, env: Env, vsftpds: VsFTPD, indata):
curl = CurlClient(env=env)
docname = "upload_31_10"
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/{docname}'
r = curl.ftp_upload(urls=[url], updata=indata, with_stats=True)
r.check_stats(count=count, http_status=226)
assert os.path.exists(dstfile)
destdata = open(dstfile).readlines()
expdata = [indata] if len(indata) else []
assert expdata == destdata, f'expected: {expdata}, got: {destdata}'
def test_32_11_download_non_existing(self, env: Env, vsftpds: VsFTPD):
curl = CurlClient(env=env)
url = f'ftps://{env.ftp_domain}:{vsftpds.port}/does-not-exist'
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_exit_code(78)
r.check_stats(count=1, exitcode=78)
def check_downloads(self, client, srcfile: str, count: int,
complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dfile).readlines(),
fromfile=srcfile,
tofile=dfile,
n=1))
assert False, f'download {dfile} differs:\n{diff}'
def check_upload(self, env, vsftpd: VsFTPD, docname):
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
assert os.path.exists(srcfile)
assert os.path.exists(dstfile)
if not filecmp.cmp(srcfile, dstfile, shallow=False):
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
b=open(dstfile).readlines(),
fromfile=srcfile,
tofile=dstfile,
n=1))
assert False, f'upload {dstfile} differs:\n{diff}'
+103
View File
@@ -0,0 +1,103 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
from typing import Generator
import pytest
from testenv import Env, CurlClient, Dante
log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.has_danted(), reason="missing danted")
class TestSocks:
@pytest.fixture(scope='class')
def danted(self, env: Env) -> Generator[Dante, None, None]:
danted = Dante(env=env)
assert danted.initial_start()
yield danted
danted.stop()
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, httpd):
indir = httpd.docs_dir
env.make_data_file(indir=indir, fname="data-10m", fsize=10*1024*1024)
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
@pytest.mark.parametrize("sproto", ['socks4', 'socks5'])
def test_40_01_socks_http(self, env: Env, sproto, danted: Dante, httpd):
curl = CurlClient(env=env, socks_args=[
f'--{sproto}', f'127.0.0.1:{danted.port}'
])
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url)
r.check_response(http_status=200)
@pytest.mark.parametrize("sproto", ['socks4', 'socks5'])
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_40_02_socks_https(self, env: Env, sproto, proto, danted: Dante, httpd):
if proto == 'h3' and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env, socks_args=[
f'--{sproto}', f'127.0.0.1:{danted.port}'
])
url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
r = curl.http_get(url=url, alpn_proto=proto)
if proto == 'h3':
assert r.exit_code == 3 # unsupported combination
else:
r.check_response(http_status=200)
@pytest.mark.parametrize("sproto", ['socks4', 'socks5'])
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_40_03_dl_serial(self, env: Env, httpd, danted, proto, sproto):
count = 3
urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'
curl = CurlClient(env=env, socks_args=[
f'--{sproto}', f'127.0.0.1:{danted.port}'
])
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("sproto", ['socks4', 'socks5'])
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
def test_40_04_ul_serial(self, env: Env, httpd, danted, proto, sproto):
fdata = os.path.join(env.gen_dir, 'data-10m')
count = 2
curl = CurlClient(env=env, socks_args=[
f'--{sproto}', f'127.0.0.1:{danted.port}'
])
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
r.check_stats(count=count, http_status=200, exitcode=0)
indata = open(fdata).readlines()
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == indata
+39
View File
@@ -0,0 +1,39 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
# ruff: noqa: F401, E402
import pytest
pytest.register_assert_rewrite("testenv.env", "testenv.curl", "testenv.caddy",
"testenv.httpd", "testenv.nghttpx")
from .env import Env
from .certs import TestCA, Credentials
from .caddy import Caddy
from .httpd import Httpd
from .curl import CurlClient, ExecResult, RunProfile
from .client import LocalClient
from .nghttpx import Nghttpx, NghttpxQuic, NghttpxFwd
from .vsftpd import VsFTPD
from .dante import Dante
+206
View File
@@ -0,0 +1,206 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import socket
import subprocess
import time
from datetime import timedelta, datetime
from json import JSONEncoder
from typing import Dict
from .curl import CurlClient
from .env import Env
from .ports import alloc_ports_and_do
log = logging.getLogger(__name__)
class Caddy:
PORT_SPECS = {
'caddy': socket.SOCK_STREAM,
'caddys': socket.SOCK_STREAM,
}
def __init__(self, env: Env):
self.env = env
self._caddy = os.environ['CADDY'] if 'CADDY' in os.environ else env.caddy
self._caddy_dir = os.path.join(env.gen_dir, 'caddy')
self._docs_dir = os.path.join(self._caddy_dir, 'docs')
self._conf_file = os.path.join(self._caddy_dir, 'Caddyfile')
self._error_log = os.path.join(self._caddy_dir, 'caddy.log')
self._tmp_dir = os.path.join(self._caddy_dir, 'tmp')
self._process = None
self._http_port = 0
self._https_port = 0
self._rmf(self._error_log)
@property
def docs_dir(self):
return self._docs_dir
@property
def port(self) -> int:
return self._https_port
def clear_logs(self):
self._rmf(self._error_log)
def is_running(self):
if self._process:
self._process.poll()
return self._process.returncode is None
return False
def start_if_needed(self):
if not self.is_running():
return self.start()
return True
def initial_start(self):
def startup(ports: Dict[str, int]) -> bool:
self._http_port = ports['caddy']
self._https_port = ports['caddys']
if self.start():
self.env.update_ports(ports)
return True
self.stop()
self._http_port = 0
self._https_port = 0
return False
return alloc_ports_and_do(Caddy.PORT_SPECS, startup,
self.env.gen_root, max_tries=3)
def start(self, wait_live=True):
assert self._http_port > 0 and self._https_port > 0
self._mkpath(self._tmp_dir)
if self._process:
self.stop()
self._write_config()
args = [
self._caddy, 'run'
]
caddyerr = open(self._error_log, 'a')
self._process = subprocess.Popen(args=args, cwd=self._caddy_dir, stderr=caddyerr)
if self._process.returncode is not None:
return False
return not wait_live or self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))
def stop(self, wait_dead=True):
self._mkpath(self._tmp_dir)
if self._process:
self._process.terminate()
try:
self._process.wait(timeout=1)
except Exception:
self._process.kill()
self._process = None
return not wait_dead or self.wait_dead(timeout=timedelta(seconds=5))
return True
def restart(self):
self.stop()
return self.start()
def wait_dead(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
check_url = f'https://{self.env.domain1}:{self.port}/'
r = curl.http_get(url=check_url)
if r.exit_code != 0:
return True
log.debug(f'waiting for caddy to stop responding: {r}')
time.sleep(.1)
log.debug(f"Server still responding after {timeout}")
return False
def wait_live(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
check_url = f'https://{self.env.domain1}:{self.port}/'
r = curl.http_get(url=check_url)
if r.exit_code == 0:
return True
time.sleep(.1)
log.error(f"Caddy still not responding after {timeout}")
return False
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
def _mkpath(self, path):
if not os.path.exists(path):
return os.makedirs(path)
def _write_config(self):
domain1 = self.env.domain1
creds1 = self.env.get_credentials(domain1)
assert creds1 # convince pytype this isn't None
domain2 = self.env.domain2
creds2 = self.env.get_credentials(domain2)
assert creds2 # convince pytype this isn't None
self._mkpath(self._docs_dir)
self._mkpath(self._tmp_dir)
with open(os.path.join(self._docs_dir, 'data.json'), 'w') as fd:
data = {
'server': f'{domain1}',
}
fd.write(JSONEncoder().encode(data))
with open(self._conf_file, 'w') as fd:
conf = [ # base server config
'{',
f' http_port {self._http_port}',
f' https_port {self._https_port}',
' log default {',
' level ERROR',
'}',
f' servers :{self._https_port} {{',
' protocols h3 h2 h1',
' }',
'}',
f'{domain1}:{self._https_port} {{',
' file_server * {',
f' root {self._docs_dir}',
' }',
f' tls {creds1.cert_file} {creds1.pkey_file}',
'}',
]
if self.env.http_port > 0:
conf.extend([
f'{domain2} {{',
f' reverse_proxy /* http://localhost:{self.env.http_port} {{',
' }',
f' tls {creds2.cert_file} {creds2.pkey_file}',
'}',
])
fd.write("\n".join(conf))
+569
View File
@@ -0,0 +1,569 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import base64
import ipaddress
import os
import re
from datetime import timedelta, datetime, timezone
from typing import List, Any, Optional
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives._serialization import PublicFormat
from cryptography.hazmat.primitives.asymmetric import ec, rsa
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption, load_pem_private_key
from cryptography.x509 import ExtendedKeyUsageOID, NameOID
EC_SUPPORTED = {}
EC_SUPPORTED.update([(curve.name.upper(), curve) for curve in [
ec.SECP192R1,
ec.SECP224R1,
ec.SECP256R1,
ec.SECP384R1,
]])
def _private_key(key_type):
if isinstance(key_type, str):
key_type = key_type.upper()
m = re.match(r'^(RSA)?(\d+)$', key_type)
if m:
key_type = int(m.group(2))
if isinstance(key_type, int):
return rsa.generate_private_key(
public_exponent=65537,
key_size=key_type,
backend=default_backend()
)
if not isinstance(key_type, ec.EllipticCurve) and key_type in EC_SUPPORTED:
key_type = EC_SUPPORTED[key_type]
return ec.generate_private_key(
curve=key_type,
backend=default_backend()
)
class CertificateSpec:
def __init__(self, name: Optional[str] = None,
domains: Optional[List[str]] = None,
email: Optional[str] = None,
key_type: Optional[str] = None,
single_file: bool = False,
valid_from: timedelta = timedelta(days=-1),
valid_to: timedelta = timedelta(days=89),
client: bool = False,
check_valid: bool = True,
sub_specs: Optional[List['CertificateSpec']] = None):
self._name = name
self.domains = domains
self.client = client
self.email = email
self.key_type = key_type
self.single_file = single_file
self.valid_from = valid_from
self.valid_to = valid_to
self.sub_specs = sub_specs
self.check_valid = check_valid
@property
def name(self) -> Optional[str]:
if self._name:
return self._name
elif self.domains:
return self.domains[0]
return None
@property
def type(self) -> Optional[str]:
if self.domains and len(self.domains):
return "server"
elif self.client:
return "client"
elif self.name:
return "ca"
return None
class Credentials:
def __init__(self,
name: str,
cert: Any,
pkey: Any,
issuer: Optional['Credentials'] = None):
self._name = name
self._cert = cert
self._pkey = pkey
self._issuer = issuer
self._cert_file = None
self._pkey_file = None
self._store = None
self._combined_file = None
@property
def name(self) -> str:
return self._name
@property
def subject(self) -> x509.Name:
return self._cert.subject
@property
def key_type(self):
if isinstance(self._pkey, RSAPrivateKey):
return f"rsa{self._pkey.key_size}"
elif isinstance(self._pkey, EllipticCurvePrivateKey):
return f"{self._pkey.curve.name}"
else:
raise Exception(f"unknown key type: {self._pkey}")
@property
def private_key(self) -> Any:
return self._pkey
def pub_sha256_b64(self) -> Any:
pubkey = self._pkey.public_key()
sha256 = hashes.Hash(algorithm=hashes.SHA256())
sha256.update(pubkey.public_bytes(
encoding=Encoding.DER,
format=PublicFormat.SubjectPublicKeyInfo
))
return base64.b64encode(sha256.finalize()).decode('utf8')
@property
def certificate(self) -> Any:
return self._cert
@property
def cert_pem(self) -> bytes:
return self._cert.public_bytes(Encoding.PEM)
@property
def pkey_pem(self) -> bytes:
return self._pkey.private_bytes(
Encoding.PEM,
PrivateFormat.TraditionalOpenSSL if self.key_type.startswith('rsa') else PrivateFormat.PKCS8,
NoEncryption())
@property
def issuer(self) -> Optional['Credentials']:
return self._issuer
def set_store(self, store: 'CertStore'):
self._store = store
def set_files(self, cert_file: str, pkey_file: Optional[str] = None,
combined_file: Optional[str] = None):
self._cert_file = cert_file
self._pkey_file = pkey_file
self._combined_file = combined_file
@property
def cert_file(self) -> str:
return self._cert_file
@property
def pkey_file(self) -> Optional[str]:
return self._pkey_file
@property
def combined_file(self) -> Optional[str]:
return self._combined_file
def get_first(self, name) -> Optional['Credentials']:
creds = self._store.get_credentials_for_name(name) if self._store else []
return creds[0] if len(creds) else None
def get_credentials_for_name(self, name) -> List['Credentials']:
return self._store.get_credentials_for_name(name) if self._store else []
def issue_certs(self, specs: List[CertificateSpec],
chain: Optional[List['Credentials']] = None) -> List['Credentials']:
return [self.issue_cert(spec=spec, chain=chain) for spec in specs]
def issue_cert(self, spec: CertificateSpec,
chain: Optional[List['Credentials']] = None) -> 'Credentials':
key_type = spec.key_type if spec.key_type else self.key_type
creds = None
if self._store:
creds = self._store.load_credentials(
name=spec.name, key_type=key_type, single_file=spec.single_file,
issuer=self, check_valid=spec.check_valid)
if creds is None:
creds = TestCA.create_credentials(spec=spec, issuer=self, key_type=key_type,
valid_from=spec.valid_from, valid_to=spec.valid_to)
if self._store:
self._store.save(creds, single_file=spec.single_file)
if spec.type == "ca":
self._store.save_chain(creds, "ca", with_root=True)
if spec.sub_specs:
if self._store:
sub_store = CertStore(fpath=os.path.join(self._store.path, creds.name))
creds.set_store(sub_store)
subchain = chain.copy() if chain else []
subchain.append(self)
creds.issue_certs(spec.sub_specs, chain=subchain)
return creds
class CertStore:
def __init__(self, fpath: str):
self._store_dir = fpath
if not os.path.exists(self._store_dir):
os.makedirs(self._store_dir)
self._creds_by_name = {}
@property
def path(self) -> str:
return self._store_dir
def save(self, creds: Credentials, name: Optional[str] = None,
chain: Optional[List[Credentials]] = None,
single_file: bool = False) -> None:
name = name if name is not None else creds.name
cert_file = self.get_cert_file(name=name, key_type=creds.key_type)
pkey_file = self.get_pkey_file(name=name, key_type=creds.key_type)
comb_file = self.get_combined_file(name=name, key_type=creds.key_type)
if single_file:
pkey_file = None
with open(cert_file, "wb") as fd:
fd.write(creds.cert_pem)
if chain:
for c in chain:
fd.write(c.cert_pem)
if pkey_file is None:
fd.write(creds.pkey_pem)
if pkey_file is not None:
with open(pkey_file, "wb") as fd:
fd.write(creds.pkey_pem)
with open(comb_file, "wb") as fd:
fd.write(creds.cert_pem)
if chain:
for c in chain:
fd.write(c.cert_pem)
fd.write(creds.pkey_pem)
creds.set_files(cert_file, pkey_file, comb_file)
self._add_credentials(name, creds)
def save_chain(self, creds: Credentials, infix: str, with_root=False):
name = creds.name
chain = [creds]
while creds.issuer is not None:
creds = creds.issuer
chain.append(creds)
if not with_root and len(chain) > 1:
chain = chain[:-1]
chain_file = os.path.join(self._store_dir, f'{name}-{infix}.pem')
with open(chain_file, "wb") as fd:
for c in chain:
fd.write(c.cert_pem)
def _add_credentials(self, name: str, creds: Credentials):
if name not in self._creds_by_name:
self._creds_by_name[name] = []
self._creds_by_name[name].append(creds)
def get_credentials_for_name(self, name) -> List[Credentials]:
return self._creds_by_name[name] if name in self._creds_by_name else []
def get_cert_file(self, name: str, key_type=None) -> str:
key_infix = ".{0}".format(key_type) if key_type is not None else ""
return os.path.join(self._store_dir, f'{name}{key_infix}.cert.pem')
def get_pkey_file(self, name: str, key_type=None) -> str:
key_infix = ".{0}".format(key_type) if key_type is not None else ""
return os.path.join(self._store_dir, f'{name}{key_infix}.pkey.pem')
def get_combined_file(self, name: str, key_type=None) -> str:
return os.path.join(self._store_dir, f'{name}.pem')
def load_pem_cert(self, fpath: str) -> x509.Certificate:
with open(fpath) as fd:
return x509.load_pem_x509_certificate("".join(fd.readlines()).encode())
def load_pem_pkey(self, fpath: str):
with open(fpath) as fd:
return load_pem_private_key("".join(fd.readlines()).encode(), password=None)
def load_credentials(self, name: str, key_type=None,
single_file: bool = False,
issuer: Optional[Credentials] = None,
check_valid: bool = False):
cert_file = self.get_cert_file(name=name, key_type=key_type)
pkey_file = cert_file if single_file else self.get_pkey_file(name=name, key_type=key_type)
comb_file = self.get_combined_file(name=name, key_type=key_type)
if os.path.isfile(cert_file) and os.path.isfile(pkey_file):
cert = self.load_pem_cert(cert_file)
pkey = self.load_pem_pkey(pkey_file)
try:
now = datetime.now(tz=timezone.utc)
if check_valid and \
((cert.not_valid_after_utc < now) or
(cert.not_valid_before_utc > now)):
return None
except AttributeError: # older python
now = datetime.now()
if check_valid and \
((cert.not_valid_after < now) or
(cert.not_valid_before > now)):
return None
creds = Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
creds.set_store(self)
creds.set_files(cert_file, pkey_file, comb_file)
self._add_credentials(name, creds)
return creds
return None
class TestCA:
@classmethod
def create_root(cls, name: str, store_dir: str, key_type: str = "rsa2048") -> Credentials:
store = CertStore(fpath=store_dir)
creds = store.load_credentials(name="ca", key_type=key_type, issuer=None)
if creds is None:
creds = TestCA._make_ca_credentials(name=name, key_type=key_type)
store.save(creds, name="ca")
creds.set_store(store)
return creds
@staticmethod
def create_credentials(spec: CertificateSpec, issuer: Credentials, key_type: Any,
valid_from: timedelta = timedelta(days=-1),
valid_to: timedelta = timedelta(days=89),
) -> Credentials:
"""
Create a certificate signed by this CA for the given domains.
:returns: the certificate and private key PEM file paths
"""
if spec.domains and len(spec.domains):
creds = TestCA._make_server_credentials(name=spec.name, domains=spec.domains,
issuer=issuer, valid_from=valid_from,
valid_to=valid_to, key_type=key_type)
elif spec.client:
creds = TestCA._make_client_credentials(name=spec.name, issuer=issuer,
email=spec.email, valid_from=valid_from,
valid_to=valid_to, key_type=key_type)
elif spec.name:
creds = TestCA._make_ca_credentials(name=spec.name, issuer=issuer,
valid_from=valid_from, valid_to=valid_to,
key_type=key_type)
else:
raise Exception(f"unrecognized certificate specification: {spec}")
return creds
@staticmethod
def _make_x509_name(org_name: Optional[str] = None, common_name: Optional[str] = None, parent: x509.Name = None) -> x509.Name:
name_pieces = []
if org_name:
oid = NameOID.ORGANIZATIONAL_UNIT_NAME if parent else NameOID.ORGANIZATION_NAME
name_pieces.append(x509.NameAttribute(oid, org_name))
elif common_name:
name_pieces.append(x509.NameAttribute(NameOID.COMMON_NAME, common_name))
if parent:
name_pieces.extend(list(parent))
return x509.Name(name_pieces)
@staticmethod
def _make_csr(
subject: x509.Name,
pkey: Any,
issuer_subject: Optional[Credentials],
valid_from_delta: Optional[timedelta] = None,
valid_until_delta: Optional[timedelta] = None
) -> x509.CertificateBuilder:
pubkey = pkey.public_key()
issuer_subject = issuer_subject if issuer_subject is not None else subject
valid_from = datetime.now()
if valid_until_delta is not None:
valid_from += valid_from_delta
valid_until = datetime.now()
if valid_until_delta is not None:
valid_until += valid_until_delta
return (
x509.CertificateBuilder()
.subject_name(subject)
.issuer_name(issuer_subject)
.public_key(pubkey)
.not_valid_before(valid_from)
.not_valid_after(valid_until)
.serial_number(x509.random_serial_number())
.add_extension(
x509.SubjectKeyIdentifier.from_public_key(pubkey),
critical=False,
)
)
@staticmethod
def _add_ca_usages(csr: Any) -> Any:
return csr.add_extension(
x509.BasicConstraints(ca=True, path_length=9),
critical=True,
).add_extension(
x509.KeyUsage(
digital_signature=True,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=True,
crl_sign=True,
encipher_only=False,
decipher_only=False),
critical=True
).add_extension(
x509.ExtendedKeyUsage([
ExtendedKeyUsageOID.CLIENT_AUTH,
ExtendedKeyUsageOID.SERVER_AUTH,
ExtendedKeyUsageOID.CODE_SIGNING,
]),
critical=True
)
@staticmethod
def _add_leaf_usages(csr: Any, domains: List[str], issuer: Credentials) -> Any:
names = []
for name in domains:
m = re.match(r'dns:(.+)', name)
if m:
names.append(x509.DNSName(m.group(1)))
else:
try:
names.append(x509.IPAddress(ipaddress.ip_address(name)))
# TODO: specify specific exceptions here
except: # noqa: E722
names.append(x509.DNSName(name))
return csr.add_extension(
x509.BasicConstraints(ca=False, path_length=None),
critical=True,
).add_extension(
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
issuer.certificate.extensions.get_extension_for_class(
x509.SubjectKeyIdentifier).value),
critical=False
).add_extension(
x509.SubjectAlternativeName(names), critical=True,
).add_extension(
x509.ExtendedKeyUsage([
ExtendedKeyUsageOID.SERVER_AUTH,
]),
critical=False
)
@staticmethod
def _add_client_usages(csr: Any, issuer: Credentials, rfc82name: Optional[str] = None) -> Any:
cert = csr.add_extension(
x509.BasicConstraints(ca=False, path_length=None),
critical=True,
).add_extension(
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
issuer.certificate.extensions.get_extension_for_class(
x509.SubjectKeyIdentifier).value),
critical=False
)
if rfc82name:
cert.add_extension(
x509.SubjectAlternativeName([x509.RFC822Name(rfc82name)]),
critical=True,
)
cert.add_extension(
x509.ExtendedKeyUsage([
ExtendedKeyUsageOID.CLIENT_AUTH,
]),
critical=True
)
return cert
@staticmethod
def _make_ca_credentials(name, key_type: Any,
issuer: Optional[Credentials] = None,
valid_from: timedelta = timedelta(days=-1),
valid_to: timedelta = timedelta(days=89),
) -> Credentials:
pkey = _private_key(key_type=key_type)
if issuer is not None:
issuer_subject = issuer.certificate.subject
issuer_key = issuer.private_key
else:
issuer_subject = None
issuer_key = pkey
subject = TestCA._make_x509_name(org_name=name, parent=issuer.subject if issuer else None)
csr = TestCA._make_csr(subject=subject,
issuer_subject=issuer_subject, pkey=pkey,
valid_from_delta=valid_from, valid_until_delta=valid_to)
csr = TestCA._add_ca_usages(csr)
cert = csr.sign(private_key=issuer_key,
algorithm=hashes.SHA256(),
backend=default_backend())
return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
@staticmethod
def _make_server_credentials(name: str, domains: List[str], issuer: Credentials,
key_type: Any,
valid_from: timedelta = timedelta(days=-1),
valid_to: timedelta = timedelta(days=89),
) -> Credentials:
pkey = _private_key(key_type=key_type)
subject = TestCA._make_x509_name(common_name=name, parent=issuer.subject)
csr = TestCA._make_csr(subject=subject,
issuer_subject=issuer.certificate.subject, pkey=pkey,
valid_from_delta=valid_from, valid_until_delta=valid_to)
csr = TestCA._add_leaf_usages(csr, domains=domains, issuer=issuer)
cert = csr.sign(private_key=issuer.private_key,
algorithm=hashes.SHA256(),
backend=default_backend())
return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
@staticmethod
def _make_client_credentials(name: str,
issuer: Credentials, email: Optional[str],
key_type: Any,
valid_from: timedelta = timedelta(days=-1),
valid_to: timedelta = timedelta(days=89),
) -> Credentials:
pkey = _private_key(key_type=key_type)
subject = TestCA._make_x509_name(common_name=name, parent=issuer.subject)
csr = TestCA._make_csr(subject=subject,
issuer_subject=issuer.certificate.subject, pkey=pkey,
valid_from_delta=valid_from, valid_until_delta=valid_to)
csr = TestCA._add_client_usages(csr, issuer=issuer, rfc82name=email)
cert = csr.sign(private_key=issuer.private_key,
algorithm=hashes.SHA256(),
backend=default_backend())
return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
+121
View File
@@ -0,0 +1,121 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import shutil
import subprocess
from datetime import datetime
from typing import Optional, Dict
from . import ExecResult
from .env import Env
log = logging.getLogger(__name__)
class LocalClient:
def __init__(self, name: str, env: Env, run_dir: Optional[str] = None,
timeout: Optional[float] = None,
run_env: Optional[Dict[str,str]] = None):
self.name = name
self.path = os.path.join(env.build_dir, 'tests/libtest/libtests')
self.env = env
self._run_env = run_env
self._timeout = timeout if timeout else env.test_timeout
self._curl = os.environ['CURL'] if 'CURL' in os.environ else env.curl
self._run_dir = run_dir if run_dir else os.path.join(env.gen_dir, name)
self._stdoutfile = f'{self._run_dir}/stdout'
self._stderrfile = f'{self._run_dir}/stderr'
self._rmrf(self._run_dir)
self._mkpath(self._run_dir)
@property
def run_dir(self) -> str:
return self._run_dir
@property
def stderr_file(self) -> str:
return self._stderrfile
def exists(self) -> bool:
return os.path.exists(self.path)
def download_file(self, i: int) -> str:
return os.path.join(self._run_dir, f'download_{i}.data')
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
def _rmrf(self, path):
if os.path.exists(path):
return shutil.rmtree(path)
def _mkpath(self, path):
if not os.path.exists(path):
return os.makedirs(path)
def run(self, args):
self._rmf(self._stdoutfile)
self._rmf(self._stderrfile)
start = datetime.now()
exception = None
myargs = [self.path, self.name]
myargs.extend(args)
run_env = None
if self._run_env:
run_env = self._run_env.copy()
for key in ['CURL_DEBUG']:
if key in os.environ and key not in run_env:
run_env[key] = os.environ[key]
try:
with open(self._stdoutfile, 'w') as cout, open(self._stderrfile, 'w') as cerr:
p = subprocess.run(myargs, stderr=cerr, stdout=cout,
cwd=self._run_dir, shell=False,
input=None, env=run_env,
timeout=self._timeout)
exitcode = p.returncode
except subprocess.TimeoutExpired:
log.warning(f'Timeout after {self._timeout}s: {args}')
exitcode = -1
exception = 'TimeoutExpired'
coutput = open(self._stdoutfile).readlines()
cerrput = open(self._stderrfile).readlines()
return ExecResult(args=myargs, exit_code=exitcode, exception=exception,
stdout=coutput, stderr=cerrput,
duration=datetime.now() - start)
def dump_logs(self):
lines = []
lines.append('>>--stdout ----------------------------------------------\n')
lines.extend(open(self._stdoutfile).readlines())
lines.append('>>--stderr ----------------------------------------------\n')
lines.extend(open(self._stderrfile).readlines())
lines.append('<<-------------------------------------------------------\n')
return ''.join(lines)
File diff suppressed because it is too large Load Diff
+174
View File
@@ -0,0 +1,174 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import socket
import subprocess
import time
from datetime import timedelta, datetime
from typing import Dict
from . import CurlClient
from .env import Env
from .ports import alloc_ports_and_do
log = logging.getLogger(__name__)
class Dante:
def __init__(self, env: Env):
self.env = env
self._cmd = env.danted
self._port = 0
self.name = 'danted'
self._port_skey = 'danted'
self._port_specs = {
'danted': socket.SOCK_STREAM,
}
self._dante_dir = os.path.join(env.gen_dir, self.name)
self._run_dir = os.path.join(self._dante_dir, 'run')
self._tmp_dir = os.path.join(self._dante_dir, 'tmp')
self._conf_file = os.path.join(self._dante_dir, 'test.conf')
self._dante_log = os.path.join(self._dante_dir, 'dante.log')
self._error_log = os.path.join(self._dante_dir, 'error.log')
self._pid_file = os.path.join(self._dante_dir, 'dante.pid')
self._process = None
self.clear_logs()
@property
def port(self) -> int:
return self._port
def clear_logs(self):
self._rmf(self._error_log)
self._rmf(self._dante_log)
def exists(self):
return os.path.exists(self._cmd)
def is_running(self):
if self._process:
self._process.poll()
return self._process.returncode is None
return False
def start_if_needed(self):
if not self.is_running():
return self.start()
return True
def stop(self, wait_dead=True):
self._mkpath(self._tmp_dir)
if self._process:
self._process.terminate()
self._process.wait(timeout=2)
self._process = None
return not wait_dead or True
return True
def restart(self):
self.stop()
return self.start()
def initial_start(self):
def startup(ports: Dict[str, int]) -> bool:
self._port = ports[self._port_skey]
if self.start():
self.env.update_ports(ports)
return True
self.stop()
self._port = 0
return False
return alloc_ports_and_do(self._port_specs, startup,
self.env.gen_root, max_tries=3)
def start(self, wait_live=True):
assert self._port > 0
self._mkpath(self._tmp_dir)
if self._process:
self.stop()
self._write_config()
args = [
self._cmd,
'-f', f'{self._conf_file}',
'-p', f'{self._pid_file}',
'-d', '0',
]
procerr = open(self._error_log, 'a')
self._process = subprocess.Popen(args=args, stderr=procerr)
if self._process.returncode is not None:
return False
return self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))
def wait_live(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir,
timeout=timeout.total_seconds(), socks_args=[
'--socks5', f'127.0.0.1:{self._port}'
])
try_until = datetime.now() + timeout
while datetime.now() < try_until:
r = curl.http_get(url=f'http://{self.env.domain1}:{self.env.http_port}/')
if r.exit_code == 0:
return True
time.sleep(.1)
log.error(f"Server still not responding after {timeout}")
return False
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
def _mkpath(self, path):
if not os.path.exists(path):
return os.makedirs(path)
def _write_config(self):
conf = [
f'errorlog: {self._error_log}',
f'logoutput: {self._dante_log}',
f'internal: 127.0.0.1 port = {self._port}',
'external: 127.0.0.1',
'clientmethod: none',
'socksmethod: none',
'client pass {',
' from: 127.0.0.0/24 to: 0.0.0.0/0',
' log: error',
'}',
'socks pass {',
' from: 0.0.0.0/0 to: 0.0.0.0/0',
' command: bindreply connect udpreply',
' log: error',
'}',
'\n',
]
with open(self._conf_file, 'w') as fd:
fd.write("\n".join(conf))
+774
View File
@@ -0,0 +1,774 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import gzip
import logging
import os
import re
import shutil
import subprocess
import tempfile
from configparser import ConfigParser, ExtendedInterpolation
from datetime import timedelta
from typing import Optional, Dict, List
import pytest
from filelock import FileLock
from .certs import CertificateSpec, Credentials, TestCA
log = logging.getLogger(__name__)
def init_config_from(conf_path):
if os.path.isfile(conf_path):
config = ConfigParser(interpolation=ExtendedInterpolation())
config.read(conf_path)
return config
return None
TESTS_HTTPD_PATH = os.path.dirname(os.path.dirname(__file__))
PROJ_PATH = os.path.dirname(os.path.dirname(TESTS_HTTPD_PATH))
TOP_PATH = os.path.join(os.getcwd(), os.path.pardir)
CONFIG_PATH = os.path.join(TOP_PATH, 'tests', 'http', 'config.ini')
if not os.path.exists(CONFIG_PATH):
ALT_CONFIG_PATH = os.path.join(PROJ_PATH, 'tests', 'http', 'config.ini')
if not os.path.exists(ALT_CONFIG_PATH):
raise Exception(f'unable to find config.ini in {CONFIG_PATH} nor {ALT_CONFIG_PATH}')
TOP_PATH = PROJ_PATH
CONFIG_PATH = ALT_CONFIG_PATH
DEF_CONFIG = init_config_from(CONFIG_PATH)
CURL = os.path.join(TOP_PATH, 'src', 'curl')
CURLINFO = os.path.join(TOP_PATH, 'src', 'curlinfo')
class NghttpxUtil:
CMD = None
VERSION_FULL = None
@classmethod
def version(cls, cmd):
if cmd is None:
return None
if cls.VERSION_FULL is None or cmd != cls.CMD:
p = subprocess.run(args=[cmd, '--version'],
capture_output=True, text=True)
if p.returncode != 0:
raise RuntimeError(f'{cmd} --version failed with exit code: {p.returncode}')
cls.CMD = cmd
for line in p.stdout.splitlines(keepends=False):
if line.startswith('nghttpx '):
cls.VERSION_FULL = line
if cls.VERSION_FULL is None:
raise RuntimeError(f'{cmd}: unable to determine version')
return cls.VERSION_FULL
@staticmethod
def version_with_h3(version):
return re.match(r'.* ngtcp2/\d+\.\d+\.\d+.*', version) is not None
class EnvConfig:
def __init__(self, pytestconfig: Optional[pytest.Config] = None,
testrun_uid=None,
worker_id=None):
self.pytestconfig = pytestconfig
self.testrun_uid = testrun_uid
self.worker_id = worker_id if worker_id is not None else 'master'
self.tests_dir = TESTS_HTTPD_PATH
self.gen_root = self.gen_dir = os.path.join(self.tests_dir, 'gen')
if self.worker_id != 'master':
self.gen_dir = os.path.join(self.gen_dir, self.worker_id)
self.project_dir = os.path.dirname(os.path.dirname(self.tests_dir))
self.build_dir = TOP_PATH
self.config = DEF_CONFIG
# check cur and its features
self.curl = CURL
self.curlinfo = CURLINFO
if 'CURL' in os.environ:
self.curl = os.environ['CURL']
self.curl_props = {
'version_string': '',
'version': '',
'os': '',
'fullname': '',
'features_string': '',
'features': set(),
'protocols_string': '',
'protocols': set(),
'libs': set(),
'lib_versions': set(),
}
self.curl_is_debug = False
self.curl_protos = []
p = subprocess.run(args=[self.curl, '-V'],
capture_output=True, text=True)
if p.returncode != 0:
raise RuntimeError(f'{self.curl} -V failed with exit code: {p.returncode}')
if p.stderr.startswith('WARNING:'):
self.curl_is_debug = True
for line in p.stdout.splitlines(keepends=False):
if line.startswith('curl '):
self.curl_props['version_string'] = line
m = re.match(r'^curl (?P<version>\S+) (?P<os>\S+) (?P<libs>.*)$', line)
if m:
self.curl_props['fullname'] = m.group(0)
self.curl_props['version'] = m.group('version')
self.curl_props['os'] = m.group('os')
self.curl_props['lib_versions'] = {
lib.lower() for lib in m.group('libs').split(' ')
}
self.curl_props['libs'] = {
re.sub(r'/[a-z0-9.-]*', '', lib) for lib in self.curl_props['lib_versions']
}
if line.startswith('Features: '):
self.curl_props['features_string'] = line[10:]
self.curl_props['features'] = {
feat.lower() for feat in line[10:].split(' ')
}
if line.startswith('Protocols: '):
self.curl_props['protocols_string'] = line[11:]
self.curl_props['protocols'] = {
prot.lower() for prot in line[11:].split(' ')
}
p = subprocess.run(args=[self.curlinfo],
capture_output=True, text=True)
if p.returncode != 0:
raise RuntimeError(f'{self.curlinfo} failed with exit code: {p.returncode}')
self.curl_is_verbose = 'verbose-strings: ON' in p.stdout
self.ports = {}
self.httpd = self.config['httpd']['httpd']
self.apxs = self.config['httpd']['apxs']
if len(self.apxs) == 0:
self.apxs = None
self._httpd_version = None
self.examples_pem = {
'key': 'xxx',
'cert': 'xxx',
}
self.htdocs_dir = os.path.join(self.gen_dir, 'htdocs')
self.tld = 'http.curl.se'
self.domain1 = f"one.{self.tld}"
self.domain1brotli = f"brotli.one.{self.tld}"
self.domain2 = f"two.{self.tld}"
self.ftp_domain = f"ftp.{self.tld}"
self.proxy_domain = f"proxy.{self.tld}"
self.expired_domain = f"expired.{self.tld}"
self.cert_specs = [
CertificateSpec(domains=[self.domain1, self.domain1brotli, 'localhost', '127.0.0.1'], key_type='rsa2048'),
CertificateSpec(name='domain1-no-ip', domains=[self.domain1, self.domain1brotli], key_type='rsa2048'),
CertificateSpec(name='domain1-very-bad', domains=[self.domain1, 'dns:127.0.0.1'], key_type='rsa2048'),
CertificateSpec(domains=[self.domain2], key_type='rsa2048'),
CertificateSpec(domains=[self.ftp_domain], key_type='rsa2048'),
CertificateSpec(domains=[self.proxy_domain, '127.0.0.1'], key_type='rsa2048'),
CertificateSpec(domains=[self.expired_domain], key_type='rsa2048',
valid_from=timedelta(days=-100), valid_to=timedelta(days=-10)),
CertificateSpec(name="clientsX", sub_specs=[
CertificateSpec(name="user1", client=True),
]),
]
self.nghttpx = self.config['nghttpx']['nghttpx']
if len(self.nghttpx.strip()) == 0:
self.nghttpx = None
self._nghttpx_version = None
self.nghttpx_with_h3 = False
if self.nghttpx is not None:
try:
self._nghttpx_version = NghttpxUtil.version(self.nghttpx)
self.nghttpx_with_h3 = NghttpxUtil.version_with_h3(self._nghttpx_version)
except RuntimeError:
# not a working nghttpx
log.exception('checking nghttpx version')
self.nghttpx = None
self.caddy = self.config['caddy']['caddy']
self._caddy_version = None
if len(self.caddy.strip()) == 0:
self.caddy = None
if self.caddy is not None:
try:
p = subprocess.run(args=[self.caddy, 'version'],
capture_output=True, text=True)
if p.returncode != 0:
# not a working caddy
self.caddy = None
m = re.match(r'v?(\d+\.\d+\.\d+).*', p.stdout)
if m:
self._caddy_version = m.group(1)
else:
raise RuntimeError(f'Unable to determine cadd version from: {p.stdout}')
# TODO: specify specific exceptions here
except: # noqa: E722
self.caddy = None
self.vsftpd = self.config['vsftpd']['vsftpd']
if self.vsftpd == '':
self.vsftpd = None
self._vsftpd_version = None
if self.vsftpd is not None:
try:
with tempfile.TemporaryFile('w+') as tmp:
p = subprocess.run(args=[self.vsftpd, '-v'],
capture_output=True, text=True, stdin=tmp)
if p.returncode != 0:
# not a working vsftpd
self.vsftpd = None
if p.stderr:
ver_text = p.stderr
else:
# Oddly, some versions of vsftpd write to stdin (!)
# instead of stderr, which is odd but works. If there
# is nothing on stderr, read the file on stdin and use
# any data there instead.
tmp.seek(0)
ver_text = tmp.read()
m = re.match(r'vsftpd: version (\d+\.\d+\.\d+)', ver_text)
if m:
self._vsftpd_version = m.group(1)
elif len(p.stderr) == 0:
# vsftp does not use stdout or stderr for printing its version... -.-
self._vsftpd_version = 'unknown'
else:
raise Exception(f'Unable to determine VsFTPD version from: {p.stderr}')
except Exception:
self.vsftpd = None
self.danted = self.config['danted']['danted']
if self.danted == '':
self.danted = None
self._danted_version = None
if self.danted is not None:
try:
p = subprocess.run(args=[self.danted, '-v'],
capture_output=True, text=True)
assert p.returncode == 0
if p.returncode != 0:
# not a working vsftpd
self.danted = None
m = re.match(r'^Dante v(\d+\.\d+\.\d+).*', p.stdout)
if not m:
m = re.match(r'^Dante v(\d+\.\d+\.\d+).*', p.stderr)
if m:
self._danted_version = m.group(1)
else:
self.danted = None
raise Exception(f'Unable to determine danted version from: {p.stderr}')
except Exception:
self.danted = None
self._tcpdump = shutil.which('tcpdump')
@property
def httpd_version(self):
if self._httpd_version is None and self.apxs is not None:
try:
p = subprocess.run(args=[self.apxs, '-q', 'HTTPD_VERSION'],
capture_output=True, text=True)
if p.returncode != 0:
log.error(f'{self.apxs} failed to query HTTPD_VERSION: {p}')
else:
self._httpd_version = p.stdout.strip()
except Exception:
log.exception(f'{self.apxs} failed to run')
return self._httpd_version
def versiontuple(self, v):
v = re.sub(r'(\d+\.\d+(\.\d+)?)(-\S+)?', r'\1', v)
return tuple(map(int, v.split('.')))
def httpd_is_at_least(self, minv):
if self.httpd_version is None:
return False
hv = self.versiontuple(self.httpd_version)
return hv >= self.versiontuple(minv)
def caddy_is_at_least(self, minv):
if self.caddy_version is None:
return False
hv = self.versiontuple(self.caddy_version)
return hv >= self.versiontuple(minv)
def is_complete(self) -> bool:
return os.path.isfile(self.httpd) and \
self.apxs is not None and \
os.path.isfile(self.apxs)
def get_incomplete_reason(self) -> Optional[str]:
if self.httpd is None or len(self.httpd.strip()) == 0:
return 'httpd not configured, see `--with-test-httpd=<path>`'
if not os.path.isfile(self.httpd):
return f'httpd ({self.httpd}) not found'
if self.apxs is None:
return "command apxs not found (commonly provided in apache2-dev)"
if not os.path.isfile(self.apxs):
return f"apxs ({self.apxs}) not found"
return None
@property
def nghttpx_version(self):
return self._nghttpx_version
@property
def caddy_version(self):
return self._caddy_version
@property
def vsftpd_version(self):
return self._vsftpd_version
@property
def tcpdmp(self) -> Optional[str]:
return self._tcpdump
def clear_locks(self):
ca_lock = os.path.join(self.gen_root, 'ca/ca.lock')
if os.path.exists(ca_lock):
os.remove(ca_lock)
class Env:
SERVER_TIMEOUT = 30 # seconds to wait for server to come up/reload
CONFIG = EnvConfig()
@staticmethod
def setup_incomplete() -> bool:
return not Env.CONFIG.is_complete()
@staticmethod
def incomplete_reason() -> Optional[str]:
return Env.CONFIG.get_incomplete_reason()
@staticmethod
def have_nghttpx() -> bool:
return Env.CONFIG.nghttpx is not None
@staticmethod
def have_h3_server() -> bool:
return Env.CONFIG.nghttpx_with_h3
@staticmethod
def have_ssl_curl() -> bool:
return Env.curl_has_feature('ssl') or Env.curl_has_feature('multissl')
@staticmethod
def have_h2_curl() -> bool:
return 'http2' in Env.CONFIG.curl_props['features']
@staticmethod
def have_h3_curl() -> bool:
return 'http3' in Env.CONFIG.curl_props['features']
@staticmethod
def curl_uses_lib(libname: str) -> bool:
return libname.lower() in Env.CONFIG.curl_props['libs']
@staticmethod
def curl_uses_any_libs(libs: List[str]) -> bool:
for libname in libs:
if libname.lower() in Env.CONFIG.curl_props['libs']:
return True
return False
@staticmethod
def curl_uses_ossl_quic() -> bool:
if Env.have_h3_curl():
return not Env.curl_uses_lib('ngtcp2') and Env.curl_uses_lib('nghttp3')
return False
@staticmethod
def curl_version_string() -> str:
return Env.CONFIG.curl_props['version_string']
@staticmethod
def curl_features_string() -> str:
return Env.CONFIG.curl_props['features_string']
@staticmethod
def curl_has_feature(feature: str) -> bool:
return feature.lower() in Env.CONFIG.curl_props['features']
@staticmethod
def curl_protocols_string() -> str:
return Env.CONFIG.curl_props['protocols_string']
@staticmethod
def curl_has_protocol(protocol: str) -> bool:
return protocol.lower() in Env.CONFIG.curl_props['protocols']
@staticmethod
def curl_lib_version(libname: str) -> str:
prefix = f'{libname.lower()}/'
for lversion in Env.CONFIG.curl_props['lib_versions']:
if lversion.startswith(prefix):
return lversion[len(prefix):]
return 'unknown'
@staticmethod
def curl_lib_version_at_least(libname: str, min_version) -> bool:
lversion = Env.curl_lib_version(libname)
if lversion != 'unknown':
return Env.CONFIG.versiontuple(min_version) <= \
Env.CONFIG.versiontuple(lversion)
return False
@staticmethod
def curl_lib_version_before(libname: str, lib_version) -> bool:
lversion = Env.curl_lib_version(libname)
if lversion != 'unknown':
if m := re.match(r'(\d+\.\d+\.\d+).*', lversion):
lversion = m.group(1)
return Env.CONFIG.versiontuple(lib_version) > \
Env.CONFIG.versiontuple(lversion)
return False
@staticmethod
def curl_os() -> str:
return Env.CONFIG.curl_props['os']
@staticmethod
def curl_fullname() -> str:
return Env.CONFIG.curl_props['fullname']
@staticmethod
def curl_version() -> str:
return Env.CONFIG.curl_props['version']
@staticmethod
def curl_is_debug() -> bool:
return Env.CONFIG.curl_is_debug
@staticmethod
def curl_is_verbose() -> bool:
return Env.CONFIG.curl_is_verbose
@staticmethod
def curl_can_early_data() -> bool:
if Env.curl_uses_lib('gnutls'):
return Env.curl_lib_version_at_least('gnutls', '3.6.13')
return Env.curl_uses_any_libs(['wolfssl', 'quictls', 'openssl'])
@staticmethod
def curl_can_h3_early_data() -> bool:
return Env.curl_can_early_data() and \
Env.curl_uses_lib('ngtcp2')
@staticmethod
def have_h3() -> bool:
return Env.have_h3_curl() and Env.have_h3_server()
@staticmethod
def httpd_version() -> str:
return Env.CONFIG.httpd_version
@staticmethod
def nghttpx_version() -> str:
return Env.CONFIG.nghttpx_version
@staticmethod
def caddy_version() -> str:
return Env.CONFIG.caddy_version
@staticmethod
def caddy_is_at_least(minv) -> bool:
return Env.CONFIG.caddy_is_at_least(minv)
@staticmethod
def httpd_is_at_least(minv) -> bool:
return Env.CONFIG.httpd_is_at_least(minv)
@staticmethod
def has_caddy() -> bool:
return Env.CONFIG.caddy is not None
@staticmethod
def has_vsftpd() -> bool:
return Env.CONFIG.vsftpd is not None
@staticmethod
def vsftpd_version() -> str:
return Env.CONFIG.vsftpd_version
@staticmethod
def has_danted() -> bool:
return Env.CONFIG.danted is not None
@staticmethod
def tcpdump() -> Optional[str]:
return Env.CONFIG.tcpdmp
def __init__(self, pytestconfig=None, env_config=None):
if env_config:
Env.CONFIG = env_config
self._verbose = pytestconfig.option.verbose \
if pytestconfig is not None else 0
self._ca = None
self._test_timeout = 300.0 if self._verbose > 1 else 60.0 # seconds
def issue_certs(self):
if self._ca is None:
ca_dir = os.path.join(self.CONFIG.gen_root, 'ca')
os.makedirs(ca_dir, exist_ok=True)
lock_file = os.path.join(ca_dir, 'ca.lock')
with FileLock(lock_file):
self._ca = TestCA.create_root(name=self.CONFIG.tld,
store_dir=ca_dir,
key_type="rsa2048")
self._ca.issue_certs(self.CONFIG.cert_specs)
def setup(self):
os.makedirs(self.gen_dir, exist_ok=True)
os.makedirs(self.htdocs_dir, exist_ok=True)
self.issue_certs()
def get_credentials(self, domain) -> Optional[Credentials]:
creds = self.ca.get_credentials_for_name(domain)
if len(creds) > 0:
return creds[0]
return None
@property
def verbose(self) -> int:
return self._verbose
@property
def test_timeout(self) -> Optional[float]:
return self._test_timeout
@test_timeout.setter
def test_timeout(self, val: Optional[float]):
self._test_timeout = val
@property
def gen_dir(self) -> str:
return self.CONFIG.gen_dir
@property
def gen_root(self) -> str:
return self.CONFIG.gen_root
@property
def project_dir(self) -> str:
return self.CONFIG.project_dir
@property
def build_dir(self) -> str:
return self.CONFIG.build_dir
@property
def ca(self):
return self._ca
@property
def htdocs_dir(self) -> str:
return self.CONFIG.htdocs_dir
@property
def tld(self) -> str:
return self.CONFIG.tld
@property
def domain1(self) -> str:
return self.CONFIG.domain1
@property
def domain1brotli(self) -> str:
return self.CONFIG.domain1brotli
@property
def domain2(self) -> str:
return self.CONFIG.domain2
@property
def ftp_domain(self) -> str:
return self.CONFIG.ftp_domain
@property
def proxy_domain(self) -> str:
return self.CONFIG.proxy_domain
@property
def expired_domain(self) -> str:
return self.CONFIG.expired_domain
@property
def ports(self) -> Dict[str, int]:
return self.CONFIG.ports
def update_ports(self, ports: Dict[str, int]):
self.CONFIG.ports.update(ports)
@property
def http_port(self) -> int:
return self.CONFIG.ports.get('http', 0)
@property
def https_port(self) -> int:
return self.CONFIG.ports['https']
@property
def https_only_tcp_port(self) -> int:
return self.CONFIG.ports['https-tcp-only']
@property
def nghttpx_https_port(self) -> int:
return self.CONFIG.ports['nghttpx_https']
@property
def h3_port(self) -> int:
return self.https_port
@property
def proxy_port(self) -> int:
return self.CONFIG.ports['proxy']
@property
def proxys_port(self) -> int:
return self.CONFIG.ports['proxys']
@property
def ftp_port(self) -> int:
return self.CONFIG.ports['ftp']
@property
def ftps_port(self) -> int:
return self.CONFIG.ports['ftps']
@property
def h2proxys_port(self) -> int:
return self.CONFIG.ports['h2proxys']
def pts_port(self, proto: str = 'http/1.1') -> int:
# proxy tunnel port
return self.CONFIG.ports['h2proxys' if proto == 'h2' else 'proxys']
@property
def caddy(self) -> str:
return self.CONFIG.caddy
@property
def caddy_https_port(self) -> int:
return self.CONFIG.ports['caddys']
@property
def caddy_http_port(self) -> int:
return self.CONFIG.ports['caddy']
@property
def danted(self) -> str:
return self.CONFIG.danted
@property
def vsftpd(self) -> str:
return self.CONFIG.vsftpd
@property
def ws_port(self) -> int:
return self.CONFIG.ports['ws']
@property
def curl(self) -> str:
return self.CONFIG.curl
@property
def httpd(self) -> str:
return self.CONFIG.httpd
@property
def apxs(self) -> str:
return self.CONFIG.apxs
@property
def nghttpx(self) -> Optional[str]:
return self.CONFIG.nghttpx
@property
def slow_network(self) -> bool:
return "CURL_DBG_SOCK_WBLOCK" in os.environ or \
"CURL_DBG_SOCK_WPARTIAL" in os.environ
@property
def ci_run(self) -> bool:
return "CURL_CI" in os.environ
def port_for(self, alpn_proto: Optional[str] = None):
if alpn_proto is None or \
alpn_proto in ['h2', 'http/1.1', 'http/1.0', 'http/0.9']:
return self.https_port
if alpn_proto in ['h3']:
return self.h3_port
return self.http_port
def authority_for(self, domain: str, alpn_proto: Optional[str] = None):
return f'{domain}:{self.port_for(alpn_proto=alpn_proto)}'
def make_data_file(self, indir: str, fname: str, fsize: int,
line_length: int = 1024) -> str:
if line_length < 11:
raise RuntimeError('line_length less than 11 not supported')
fpath = os.path.join(indir, fname)
s10 = "0123456789"
s = round((line_length / 10) + 1) * s10
s = s[0:line_length-11]
with open(fpath, 'w') as fd:
for i in range(int(fsize / line_length)):
fd.write(f"{i:09d}-{s}\n")
remain = int(fsize % line_length)
if remain != 0:
i = int(fsize / line_length) + 1
fd.write(f"{i:09d}-{s}"[0:remain-1] + "\n")
return fpath
def make_data_gzipbomb(self, indir: str, fname: str, fsize: int) -> str:
fpath = os.path.join(indir, fname)
gzpath = f'{fpath}.gz'
varpath = f'{fpath}.var'
with open(fpath, 'w') as fd:
fd.write('not what we are looking for!\n')
count = int(fsize / 1024)
zero1k = bytearray(1024)
with gzip.open(gzpath, 'wb') as fd:
for _ in range(count):
fd.write(zero1k)
with open(varpath, 'w') as fd:
fd.write(f'URI: {fname}\n')
fd.write('\n')
fd.write(f'URI: {fname}.gz\n')
fd.write('Content-Type: text/plain\n')
fd.write('Content-Encoding: x-gzip\n')
fd.write('\n')
return fpath
+592
View File
@@ -0,0 +1,592 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import inspect
import logging
import os
import shutil
import socket
import subprocess
from datetime import timedelta, datetime
from json import JSONEncoder
import time
from typing import List, Union, Optional, Dict
import copy
from .curl import CurlClient, ExecResult
from .env import Env
from .ports import alloc_ports_and_do
log = logging.getLogger(__name__)
class Httpd:
MODULES = [
'log_config', 'logio', 'unixd', 'version', 'watchdog',
'authn_core', 'authn_file',
'authz_user', 'authz_core', 'authz_host',
'auth_basic', 'auth_digest',
'alias', 'env', 'filter', 'headers', 'mime', 'setenvif', 'negotiation',
'socache_shmcb',
'rewrite', 'http2', 'ssl', 'proxy', 'proxy_http', 'proxy_connect',
'brotli',
'mpm_event',
]
COMMON_MODULES_DIRS = [
'/usr/lib/apache2/modules', # debian
'/usr/libexec/apache2/', # macos
]
MOD_CURLTEST = None
PORT_SPECS = {
'http': socket.SOCK_STREAM,
'https': socket.SOCK_STREAM,
'https-tcp-only': socket.SOCK_STREAM,
'proxy': socket.SOCK_STREAM,
'proxys': socket.SOCK_STREAM,
}
def __init__(self, env: Env):
self.env = env
self._apache_dir = os.path.join(env.gen_dir, 'apache')
self._run_dir = os.path.join(self._apache_dir, 'run')
self._lock_dir = os.path.join(self._apache_dir, 'locks')
self._docs_dir = os.path.join(self._apache_dir, 'docs')
self._conf_dir = os.path.join(self._apache_dir, 'conf')
self._conf_file = os.path.join(self._conf_dir, 'test.conf')
self._logs_dir = os.path.join(self._apache_dir, 'logs')
self._error_log = os.path.join(self._logs_dir, 'error_log')
self._tmp_dir = os.path.join(self._apache_dir, 'tmp')
self._basic_passwords = os.path.join(self._conf_dir, 'basic.passwords')
self._digest_passwords = os.path.join(self._conf_dir, 'digest.passwords')
self._mods_dir = None
self._auth_digest = True
self._proxy_auth_basic = False
# name used to lookup credentials for env.domain1
self._domain1_cred_name = env.domain1
self._extra_configs = {}
self._loaded_extra_configs = None
self._loaded_proxy_auth = None
self._loaded_domain1_cred_name = None
assert env.apxs
p = subprocess.run(args=[env.apxs, '-q', 'libexecdir'],
capture_output=True, text=True)
if p.returncode != 0:
raise Exception(f'{env.apxs} failed to query libexecdir: {p}')
self._mods_dir = p.stdout.strip()
if self._mods_dir is None:
raise Exception('apache modules dir cannot be found')
if not os.path.exists(self._mods_dir):
raise Exception(f'apache modules dir does not exist: {self._mods_dir}')
self._maybe_running = False
self.ports = {}
self._rmf(self._error_log)
self._init_curltest()
@property
def docs_dir(self):
return self._docs_dir
def clear_logs(self):
self._rmf(self._error_log)
def exists(self):
return os.path.exists(self.env.httpd)
def set_extra_config(self, domain: str, lines: Optional[Union[str, List[str]]]):
if lines is None:
self._extra_configs.pop(domain, None)
else:
self._extra_configs[domain] = lines
def reset_config(self):
self._extra_configs = {}
self.set_proxy_auth(False)
self._domain1_cred_name = self.env.domain1
def set_proxy_auth(self, active: bool):
self._proxy_auth_basic = active
def set_domain1_cred_name(self, name):
self._domain1_cred_name = name
def _run(self, args, intext=''):
env = os.environ.copy()
env['APACHE_RUN_DIR'] = self._run_dir
env['APACHE_RUN_USER'] = os.environ['USER']
env['APACHE_LOCK_DIR'] = self._lock_dir
env['APACHE_CONFDIR'] = self._apache_dir
p = subprocess.run(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE,
cwd=self.env.gen_dir,
input=intext.encode() if intext else None,
env=env)
start = datetime.now()
return ExecResult(args=args, exit_code=p.returncode,
stdout=p.stdout.decode().splitlines(),
stderr=p.stderr.decode().splitlines(),
duration=datetime.now() - start)
def _cmd_httpd(self, cmd: str):
args = [self.env.httpd,
"-d", self._apache_dir,
"-f", self._conf_file,
"-k", cmd]
return self._run(args=args)
def initial_start(self):
def startup(ports: Dict[str, int]) -> bool:
self.ports.update(ports)
if self.start():
self.env.update_ports(ports)
return True
self.stop()
self.ports.clear()
return False
return alloc_ports_and_do(Httpd.PORT_SPECS, startup,
self.env.gen_root, max_tries=3)
def start(self):
# assure ports are allocated
for key, _ in Httpd.PORT_SPECS.items():
assert self.ports[key] is not None
if self._maybe_running:
self.stop()
self._write_config()
with open(self._error_log, 'a') as fd:
fd.write('start of server\n')
with open(os.path.join(self._apache_dir, 'xxx'), 'a') as fd:
fd.write('start of server\n')
r = self._cmd_httpd('start')
if r.exit_code != 0 or len(r.stderr):
log.error(f'failed to start httpd: {r}')
self.stop()
return False
self._loaded_extra_configs = copy.deepcopy(self._extra_configs)
self._loaded_proxy_auth = self._proxy_auth_basic
return self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))
def stop(self):
r = self._cmd_httpd('stop')
self._loaded_extra_configs = None
self._loaded_proxy_auth = None
if r.exit_code == 0:
return self.wait_dead(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))
log.fatal(f'stopping httpd failed: {r}')
return r.exit_code == 0
def reload(self):
self._write_config()
r = self._cmd_httpd("graceful")
if r.exit_code != 0:
log.error(f'failed to reload httpd: {r}')
return False
self._loaded_extra_configs = None
self._loaded_proxy_auth = None
if r.exit_code != 0:
log.error(f'failed to reload httpd: {r}')
self._loaded_extra_configs = copy.deepcopy(self._extra_configs)
self._loaded_proxy_auth = self._proxy_auth_basic
return self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))
def reload_if_config_changed(self):
if self._maybe_running and \
self._loaded_extra_configs == self._extra_configs and \
self._loaded_proxy_auth == self._proxy_auth_basic and \
self._loaded_domain1_cred_name == self._domain1_cred_name:
return True
return self.reload()
def wait_dead(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
r = curl.http_get(url=f'http://{self.env.domain1}:{self.ports["http"]}/')
if r.exit_code != 0:
self._maybe_running = False
return True
time.sleep(.1)
log.debug(f"Server still responding after {timeout}")
return False
def wait_live(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir,
timeout=timeout.total_seconds())
try_until = datetime.now() + timeout
while datetime.now() < try_until:
r = curl.http_get(url=f'http://{self.env.domain1}:{self.ports["http"]}/')
if r.exit_code == 0:
self._maybe_running = True
return True
time.sleep(.1)
log.error(f"Server still not responding after {timeout}")
return False
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
def _mkpath(self, path):
if not os.path.exists(path):
return os.makedirs(path)
def _write_config(self):
domain1 = self.env.domain1
domain1brotli = self.env.domain1brotli
creds1 = self.env.get_credentials(self._domain1_cred_name)
assert creds1 # convince pytype this isn't None
self._loaded_domain1_cred_name = self._domain1_cred_name
domain2 = self.env.domain2
creds2 = self.env.get_credentials(domain2)
assert creds2 # convince pytype this isn't None
exp_domain = self.env.expired_domain
exp_creds = self.env.get_credentials(exp_domain)
assert exp_creds # convince pytype this isn't None
proxy_domain = self.env.proxy_domain
proxy_creds = self.env.get_credentials(proxy_domain)
assert proxy_creds # convince pytype this isn't None
self._mkpath(self._conf_dir)
self._mkpath(self._docs_dir)
self._mkpath(self._logs_dir)
self._mkpath(self._tmp_dir)
self._mkpath(os.path.join(self._docs_dir, 'two'))
with open(os.path.join(self._docs_dir, 'data.json'), 'w') as fd:
data = {
'server': f'{domain1}',
}
fd.write(JSONEncoder().encode(data))
with open(os.path.join(self._docs_dir, 'two/data.json'), 'w') as fd:
data = {
'server': f'{domain2}',
}
fd.write(JSONEncoder().encode(data))
if self._proxy_auth_basic:
with open(self._basic_passwords, 'w') as fd:
fd.write('proxy:$apr1$FQfeInbs$WQZbODJlVg60j0ogEIlTW/\n')
if self._auth_digest:
with open(self._digest_passwords, 'w') as fd:
fd.write('test:restricted area:57123e269fd73d71ae0656594e938e2f\n')
self._mkpath(os.path.join(self.docs_dir, 'restricted/digest'))
with open(os.path.join(self.docs_dir, 'restricted/digest/data.json'), 'w') as fd:
fd.write('{"area":"digest"}\n')
with open(self._conf_file, 'w') as fd:
for m in self.MODULES:
if os.path.exists(os.path.join(self._mods_dir, f'mod_{m}.so')):
fd.write(f'LoadModule {m}_module "{self._mods_dir}/mod_{m}.so"\n')
if Httpd.MOD_CURLTEST is not None:
fd.write(f'LoadModule curltest_module "{Httpd.MOD_CURLTEST}"\n')
conf = [ # base server config
f'ServerRoot "{self._apache_dir}"',
'DefaultRuntimeDir logs',
'PidFile httpd.pid',
f'ServerName {self.env.tld}',
f'ErrorLog {self._error_log}',
f'LogLevel {self._get_log_level()}',
'StartServers 4',
'ReadBufferSize 16000',
'KeepAliveTimeout 30', # CI may exceed the default of 5 sec
'H2MinWorkers 16',
'H2MaxWorkers 256',
f'TypesConfig "{self._conf_dir}/mime.types',
'SSLSessionCache "shmcb:ssl_gcache_data(32000)"',
'AddEncoding x-gzip .gz .tgz .gzip',
'AddHandler type-map .var',
]
conf.extend([f'Listen {port}' for _, port in self.ports.items()])
if 'base' in self._extra_configs:
conf.extend(self._extra_configs['base'])
conf.extend([ # plain http host for domain1
f'<VirtualHost *:{self.ports["http"]}>',
f' ServerName {domain1}',
' ServerAlias localhost',
f' DocumentRoot "{self._docs_dir}"',
' Protocols h2c http/1.1',
' H2Direct on',
])
conf.extend(self._curltest_conf(domain1))
conf.extend([
'</VirtualHost>',
'',
])
conf.extend([ # https host for domain1, h1 + h2
f'<VirtualHost *:{self.ports["https"]}>',
f' ServerName {domain1}',
' ServerAlias localhost',
' Protocols h2 http/1.1',
' SSLEngine on',
f' SSLCertificateFile {creds1.cert_file}',
f' SSLCertificateKeyFile {creds1.pkey_file}',
f' DocumentRoot "{self._docs_dir}"',
])
conf.extend(self._curltest_conf(domain1))
if domain1 in self._extra_configs:
conf.extend(self._extra_configs[domain1])
conf.extend([
'</VirtualHost>',
'',
])
conf.extend([ # https host for domain1, h1 + h2, tcp only
f'<VirtualHost *:{self.ports["https-tcp-only"]}>',
f' ServerName {domain1}',
' ServerAlias localhost',
' Protocols h2 http/1.1',
' SSLEngine on',
f' SSLCertificateFile {creds1.cert_file}',
f' SSLCertificateKeyFile {creds1.pkey_file}',
f' DocumentRoot "{self._docs_dir}"',
])
conf.extend(self._curltest_conf(domain1))
if domain1 in self._extra_configs:
conf.extend(self._extra_configs[domain1])
conf.extend([
'</VirtualHost>',
'',
])
# Alternate to domain1 with BROTLI compression
conf.extend([ # https host for domain1, h1 + h2
f'<VirtualHost *:{self.ports["https"]}>',
f' ServerName {domain1brotli}',
' Protocols h2 http/1.1',
' SSLEngine on',
f' SSLCertificateFile {creds1.cert_file}',
f' SSLCertificateKeyFile {creds1.pkey_file}',
f' DocumentRoot "{self._docs_dir}"',
' SetOutputFilter BROTLI_COMPRESS',
])
conf.extend(self._curltest_conf(domain1))
if domain1 in self._extra_configs:
conf.extend(self._extra_configs[domain1])
conf.extend([
'</VirtualHost>',
'',
])
conf.extend([ # plain http host for domain2
f'<VirtualHost *:{self.ports["http"]}>',
f' ServerName {domain2}',
' ServerAlias localhost',
f' DocumentRoot "{self._docs_dir}"',
' Protocols h2c http/1.1',
])
conf.extend(self._curltest_conf(domain2))
conf.extend([
'</VirtualHost>',
'',
])
self._mkpath(os.path.join(self._docs_dir, 'two'))
conf.extend([ # https host for domain2, no h2
f'<VirtualHost *:{self.ports["https"]}>',
f' ServerName {domain2}',
' Protocols http/1.1',
' SSLEngine on',
f' SSLCertificateFile {creds2.cert_file}',
f' SSLCertificateKeyFile {creds2.pkey_file}',
f' DocumentRoot "{self._docs_dir}/two"',
])
conf.extend(self._curltest_conf(domain2))
if domain2 in self._extra_configs:
conf.extend(self._extra_configs[domain2])
conf.extend([
'</VirtualHost>',
'',
])
conf.extend([ # https host for domain2, no h2, tcp only
f'<VirtualHost *:{self.ports["https-tcp-only"]}>',
f' ServerName {domain2}',
' Protocols http/1.1',
' SSLEngine on',
f' SSLCertificateFile {creds2.cert_file}',
f' SSLCertificateKeyFile {creds2.pkey_file}',
f' DocumentRoot "{self._docs_dir}/two"',
])
conf.extend(self._curltest_conf(domain2))
if domain2 in self._extra_configs:
conf.extend(self._extra_configs[domain2])
conf.extend([
'</VirtualHost>',
'',
])
self._mkpath(os.path.join(self._docs_dir, 'expired'))
conf.extend([ # https host for expired domain
f'<VirtualHost *:{self.ports["https"]}>',
f' ServerName {exp_domain}',
' Protocols h2 http/1.1',
' SSLEngine on',
f' SSLCertificateFile {exp_creds.cert_file}',
f' SSLCertificateKeyFile {exp_creds.pkey_file}',
f' DocumentRoot "{self._docs_dir}/expired"',
])
conf.extend(self._curltest_conf(exp_domain))
if exp_domain in self._extra_configs:
conf.extend(self._extra_configs[exp_domain])
conf.extend([
'</VirtualHost>',
'',
])
conf.extend([ # http forward proxy
f'<VirtualHost *:{self.ports["proxy"]}>',
f' ServerName {proxy_domain}',
' Protocols h2c http/1.1',
' ProxyRequests On',
' H2ProxyRequests On',
' ProxyVia On',
f' AllowCONNECT {self.ports["http"]} {self.ports["https"]}',
])
conf.extend(self._get_proxy_conf())
conf.extend([
'</VirtualHost>',
'',
])
conf.extend([ # https forward proxy
f'<VirtualHost *:{self.ports["proxys"]}>',
f' ServerName {proxy_domain}',
' Protocols h2 http/1.1',
' SSLEngine on',
f' SSLCertificateFile {proxy_creds.cert_file}',
f' SSLCertificateKeyFile {proxy_creds.pkey_file}',
' ProxyRequests On',
' H2ProxyRequests On',
' ProxyVia On',
f' AllowCONNECT {self.ports["http"]} {self.ports["https"]}',
])
conf.extend(self._get_proxy_conf())
conf.extend([
'</VirtualHost>',
'',
])
fd.write("\n".join(conf))
with open(os.path.join(self._conf_dir, 'mime.types'), 'w') as fd:
fd.write("\n".join([
'text/plain txt',
'text/html html',
'application/json json',
'application/x-gzip gzip',
'application/x-gzip gz',
''
]))
def _get_proxy_conf(self):
if self._proxy_auth_basic:
return [
' <Proxy "*">',
' AuthType Basic',
' AuthName "Restricted Proxy"',
' AuthBasicProvider file',
f' AuthUserFile "{self._basic_passwords}"',
' Require user proxy',
' </Proxy>',
]
else:
return [
' <Proxy "*">',
' Require ip 127.0.0.1',
' </Proxy>',
]
def _get_log_level(self):
if self.env.verbose > 3:
return 'trace2'
if self.env.verbose > 2:
return 'trace1'
if self.env.verbose > 1:
return 'debug'
return 'info'
def _curltest_conf(self, servername) -> List[str]:
lines = []
if Httpd.MOD_CURLTEST is not None:
lines.extend([
' Redirect 302 /data.json.302 /data.json',
' Redirect 301 /curltest/echo301 /curltest/echo',
' Redirect 302 /curltest/echo302 /curltest/echo',
' Redirect 303 /curltest/echo303 /curltest/echo',
' Redirect 307 /curltest/echo307 /curltest/echo',
' <Location /curltest/sslinfo>',
' SSLOptions StdEnvVars',
' SetHandler curltest-sslinfo',
' </Location>',
' <Location /curltest/echo>',
' SetHandler curltest-echo',
' </Location>',
' <Location /curltest/put>',
' SetHandler curltest-put',
' </Location>',
' <Location /curltest/tweak>',
' SetHandler curltest-tweak',
' </Location>',
' Redirect 302 /tweak /curltest/tweak',
' <Location /curltest/1_1>',
' SetHandler curltest-1_1-required',
' </Location>',
' <Location /curltest/shutdown_unclean>',
' SetHandler curltest-tweak',
' SetEnv force-response-1.0 1',
' </Location>',
' SetEnvIf Request_URI "/shutdown_unclean" ssl-unclean=1',
' RewriteEngine on',
' RewriteRule "^/curltest/put-redir-301$" "/curltest/put" [R=301]',
' RewriteRule "^/curltest/put-redir-302$" "/curltest/put" [R=302]',
' RewriteRule "^/curltest/put-redir-307$" "/curltest/put" [R=307]',
' RewriteRule "^/curltest/put-redir-308$" "/curltest/put" [R=308]',
])
if self._auth_digest:
lines.extend([
f' <Directory {self.docs_dir}/restricted/digest>',
' AuthType Digest',
' AuthName "restricted area"',
f' AuthDigestDomain "https://{servername}"',
' AuthBasicProvider file',
f' AuthUserFile "{self._digest_passwords}"',
' Require valid-user',
' </Directory>',
])
return lines
def _init_curltest(self):
if Httpd.MOD_CURLTEST is not None:
return
local_dir = os.path.dirname(inspect.getfile(Httpd))
out_dir = os.path.join(self.env.gen_dir, 'mod_curltest')
in_source = os.path.join(local_dir, 'mod_curltest/mod_curltest.c')
out_source = os.path.join(out_dir, 'mod_curltest.c')
if not os.path.exists(out_dir):
os.mkdir(out_dir)
if not os.path.exists(out_source) or \
os.stat(in_source).st_mtime > os.stat(out_source).st_mtime:
shutil.copy(in_source, out_source)
p = subprocess.run([
self.env.apxs, '-c', out_source
], capture_output=True, cwd=out_dir)
rv = p.returncode
if rv != 0:
log.error(f"compiling mod_curltest failed: {p.stderr}")
raise Exception(f"compiling mod_curltest failed: {p.stderr}")
Httpd.MOD_CURLTEST = os.path.join(out_dir, '.libs/mod_curltest.so')
@@ -0,0 +1,881 @@
/***************************************************************************
* _ _ ____ _
* Project ___| | | | _ \| |
* / __| | | | |_) | |
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
* furnished to do so, under the terms of the COPYING file.
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
* KIND, either express or implied.
*
* SPDX-License-Identifier: curl
*
***************************************************************************/
#include <assert.h>
#include <apr_optional.h>
#include <apr_optional_hooks.h>
#include <apr_strings.h>
#include <apr_cstr.h>
#include <apr_time.h>
#include <apr_want.h>
#include <httpd.h>
#include <http_protocol.h>
#include <http_request.h>
#include <http_log.h>
static void curltest_hooks(apr_pool_t *pool);
static int curltest_echo_handler(request_rec *r);
static int curltest_put_handler(request_rec *r);
static int curltest_tweak_handler(request_rec *r);
static int curltest_1_1_required(request_rec *r);
static int curltest_sslinfo_handler(request_rec *r);
AP_DECLARE_MODULE(curltest) =
{
STANDARD20_MODULE_STUFF,
NULL, /* func to create per dir config */
NULL, /* func to merge per dir config */
NULL, /* func to create per server config */
NULL, /* func to merge per server config */
NULL, /* command handlers */
curltest_hooks,
#ifdef AP_MODULE_FLAG_NONE
AP_MODULE_FLAG_ALWAYS_MERGE
#endif
};
static int curltest_post_config(apr_pool_t *p, apr_pool_t *plog,
apr_pool_t *ptemp, server_rec *s)
{
void *data = NULL;
const char *key = "mod_curltest_init_counter";
(void)plog;(void)ptemp;
apr_pool_userdata_get(&data, key, s->process->pool);
if(!data) {
/* dry run */
apr_pool_userdata_set((const void *)1, key,
apr_pool_cleanup_null, s->process->pool);
return APR_SUCCESS;
}
/* mess with the overall server here */
return APR_SUCCESS;
}
static void curltest_hooks(apr_pool_t *pool)
{
ap_log_perror(APLOG_MARK, APLOG_TRACE1, 0, pool, "installing hooks");
/* Run once after configuration is set, but before mpm children initialize.
*/
ap_hook_post_config(curltest_post_config, NULL, NULL, APR_HOOK_MIDDLE);
/* curl test handlers */
ap_hook_handler(curltest_echo_handler, NULL, NULL, APR_HOOK_MIDDLE);
ap_hook_handler(curltest_put_handler, NULL, NULL, APR_HOOK_MIDDLE);
ap_hook_handler(curltest_tweak_handler, NULL, NULL, APR_HOOK_MIDDLE);
ap_hook_handler(curltest_1_1_required, NULL, NULL, APR_HOOK_MIDDLE);
ap_hook_handler(curltest_sslinfo_handler, NULL, NULL, APR_HOOK_MIDDLE);
}
#define SECS_PER_HOUR (60*60)
#define SECS_PER_DAY (24*SECS_PER_HOUR)
static apr_status_t duration_parse(apr_interval_time_t *ptimeout,
const char *value, const char *def_unit)
{
char *endp;
apr_int64_t n;
n = apr_strtoi64(value, &endp, 10);
if(errno) {
return errno;
}
if(!endp || !*endp) {
if(!def_unit)
def_unit = "s";
}
else if(endp == value) {
return APR_EINVAL;
}
else {
def_unit = endp;
}
switch(*def_unit) {
case 'D':
case 'd':
*ptimeout = apr_time_from_sec(n * SECS_PER_DAY);
break;
case 's':
case 'S':
*ptimeout = (apr_interval_time_t) apr_time_from_sec(n);
break;
case 'h':
case 'H':
/* Time is in hours */
*ptimeout = (apr_interval_time_t) apr_time_from_sec(n * SECS_PER_HOUR);
break;
case 'm':
case 'M':
switch(*(++def_unit)) {
/* Time is in milliseconds */
case 's':
case 'S':
*ptimeout = (apr_interval_time_t) n * 1000;
break;
/* Time is in minutes */
case 'i':
case 'I':
*ptimeout = (apr_interval_time_t) apr_time_from_sec(n * 60);
break;
default:
return APR_EGENERAL;
}
break;
case 'u':
case 'U':
switch(*(++def_unit)) {
/* Time is in microseconds */
case 's':
case 'S':
*ptimeout = (apr_interval_time_t) n;
break;
default:
return APR_EGENERAL;
}
break;
default:
return APR_EGENERAL;
}
return APR_SUCCESS;
}
static int status_from_str(const char *s, apr_status_t *pstatus)
{
if(!strcmp("timeout", s)) {
*pstatus = APR_TIMEUP;
return 1;
}
else if(!strcmp("reset", s)) {
*pstatus = APR_ECONNRESET;
return 1;
}
return 0;
}
static int curltest_echo_handler(request_rec *r)
{
conn_rec *c = r->connection;
apr_bucket_brigade *bb;
apr_bucket *b;
apr_status_t rv;
char buffer[8192];
const char *ct;
apr_off_t die_after_len = -1, total_read_len = 0;
apr_time_t read_delay = 0;
int just_die = 0, die_after_100 = 0;
long l;
if(strcmp(r->handler, "curltest-echo")) {
return DECLINED;
}
if(r->method_number != M_GET && r->method_number != M_POST) {
return DECLINED;
}
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: processing");
if(r->args) {
apr_array_header_t *args = NULL;
int i;
args = apr_cstr_split(r->args, "&", 1, r->pool);
for(i = 0; i < args->nelts; ++i) {
char *s, *val, *arg = APR_ARRAY_IDX(args, i, char *);
s = strchr(arg, '=');
if(s) {
*s = '\0';
val = s + 1;
if(!strcmp("die_after", arg)) {
die_after_len = (apr_off_t)apr_atoi64(val);
continue;
}
else if(!strcmp("just_die", arg)) {
just_die = 1;
continue;
}
else if(!strcmp("die_after_100", arg)) {
die_after_100 = 1;
continue;
}
else if(!strcmp("read_delay", arg)) {
rv = duration_parse(&read_delay, val, "s");
if(APR_SUCCESS == rv) {
continue;
}
}
}
}
}
if(just_die) {
ap_log_rerror(APLOG_MARK, APLOG_INFO, 0, r,
"echo_handler: dying right away");
/* Generate no HTTP response at all. */
ap_remove_output_filter_byhandle(r->output_filters, "HTTP_HEADER");
r->connection->keepalive = AP_CONN_CLOSE;
return AP_FILTER_ERROR;
}
r->status = 200;
if(die_after_len >= 0) {
r->clength = die_after_len + 1;
r->chunked = 0;
apr_table_set(r->headers_out, "Content-Length",
apr_ltoa(r->pool, (long)r->clength));
}
else {
r->clength = -1;
r->chunked = 1;
apr_table_unset(r->headers_out, "Content-Length");
}
/* Discourage content-encodings */
apr_table_unset(r->headers_out, "Content-Encoding");
apr_table_setn(r->subprocess_env, "no-brotli", "1");
apr_table_setn(r->subprocess_env, "no-gzip", "1");
ct = apr_table_get(r->headers_in, "content-type");
ap_set_content_type(r, ct ? ct : "application/octet-stream");
if(apr_table_get(r->headers_in, "TE"))
apr_table_setn(r->headers_out, "Request-TE",
apr_table_get(r->headers_in, "TE"));
if(read_delay) {
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
"put_handler: read_delay");
apr_sleep(read_delay);
}
bb = apr_brigade_create(r->pool, c->bucket_alloc);
/* copy any request body into the response */
rv = ap_setup_client_block(r, REQUEST_CHUNKED_DECHUNK);
if(rv)
goto cleanup;
if(die_after_100) {
ap_log_rerror(APLOG_MARK, APLOG_INFO, 0, r,
"echo_handler: dying after 100-continue");
/* Generate no HTTP response at all. */
ap_remove_output_filter_byhandle(r->output_filters, "HTTP_HEADER");
r->connection->keepalive = AP_CONN_CLOSE;
return AP_FILTER_ERROR;
}
if(ap_should_client_block(r)) {
while(0 < (l = ap_get_client_block(r, &buffer[0], sizeof(buffer)))) {
total_read_len += l;
if(die_after_len >= 0 && total_read_len >= die_after_len) {
ap_log_rerror(APLOG_MARK, APLOG_INFO, 0, r,
"echo_handler: dying after %ld bytes as requested",
(long)total_read_len);
ap_pass_brigade(r->output_filters, bb);
ap_remove_output_filter_byhandle(r->output_filters, "HTTP_HEADER");
r->connection->keepalive = AP_CONN_CLOSE;
return DONE;
}
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
"echo_handler: copying %ld bytes from request body", l);
rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
if(APR_SUCCESS != rv)
goto cleanup;
rv = ap_pass_brigade(r->output_filters, bb);
if(APR_SUCCESS != rv)
goto cleanup;
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
"echo_handler: passed %ld bytes from request body", l);
}
}
/* we are done */
b = apr_bucket_eos_create(c->bucket_alloc);
APR_BRIGADE_INSERT_TAIL(bb, b);
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: request read");
if(r->trailers_in && !apr_is_empty_table(r->trailers_in)) {
ap_log_rerror(APLOG_MARK, APLOG_TRACE2, 0, r,
"echo_handler: seeing incoming trailers");
apr_table_setn(r->trailers_out, "h2test-trailers-in",
apr_itoa(r->pool, 1));
}
rv = ap_pass_brigade(r->output_filters, bb);
cleanup:
if(rv == APR_SUCCESS ||
r->status != HTTP_OK ||
c->aborted) {
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "echo_handler: done");
return OK;
}
else {
/* no way to know what type of error occurred */
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "echo_handler failed");
return AP_FILTER_ERROR;
}
return DECLINED;
}
static int curltest_tweak_handler(request_rec *r)
{
conn_rec *c = r->connection;
apr_bucket_brigade *bb;
apr_bucket *b;
apr_status_t rv;
char buffer[16*1024];
int i, chunks = 3, error_bucket = 1;
size_t chunk_size = sizeof(buffer);
const char *request_id = "none";
apr_time_t delay = 0, chunk_delay = 0, close_delay = 0;
apr_array_header_t *args = NULL;
int http_status = 200;
apr_status_t error = APR_SUCCESS, body_error = APR_SUCCESS;
int close_conn = 0, with_cl = 0;
int x_hd_len = 0, x_hd1_len = 0;
if(strcmp(r->handler, "curltest-tweak")) {
return DECLINED;
}
if(r->method_number == M_DELETE) {
http_status = 204;
chunks = 0;
}
else if(r->method_number != M_GET && r->method_number != M_POST) {
return DECLINED;
}
if(r->args) {
args = apr_cstr_split(r->args, "&", 1, r->pool);
for(i = 0; i < args->nelts; ++i) {
char *s, *val, *arg = APR_ARRAY_IDX(args, i, char *);
s = strchr(arg, '=');
if(s) {
*s = '\0';
val = s + 1;
if(!strcmp("status", arg)) {
http_status = (int)apr_atoi64(val);
if(http_status > 0) {
continue;
}
}
else if(!strcmp("chunks", arg)) {
chunks = (int)apr_atoi64(val);
if(chunks >= 0) {
continue;
}
}
else if(!strcmp("chunk_size", arg)) {
chunk_size = (int)apr_atoi64(val);
if(chunk_size >= 0) {
if(chunk_size > sizeof(buffer)) {
ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r,
"chunk_size %zu too large", chunk_size);
ap_die(HTTP_BAD_REQUEST, r);
return OK;
}
continue;
}
}
else if(!strcmp("id", arg)) {
/* just an id for repeated requests with curl's url globbing */
request_id = val;
continue;
}
else if(!strcmp("error", arg)) {
if(status_from_str(val, &error)) {
continue;
}
}
else if(!strcmp("error_bucket", arg)) {
error_bucket = (int)apr_atoi64(val);
if(error_bucket >= 0) {
continue;
}
}
else if(!strcmp("body_error", arg)) {
if(status_from_str(val, &body_error)) {
continue;
}
}
else if(!strcmp("delay", arg)) {
rv = duration_parse(&delay, val, "s");
if(APR_SUCCESS == rv) {
continue;
}
}
else if(!strcmp("chunk_delay", arg)) {
rv = duration_parse(&chunk_delay, val, "s");
if(APR_SUCCESS == rv) {
continue;
}
}
else if(!strcmp("close_delay", arg)) {
rv = duration_parse(&close_delay, val, "s");
if(APR_SUCCESS == rv) {
continue;
}
}
else if(!strcmp("x-hd", arg)) {
x_hd_len = (int)apr_atoi64(val);
continue;
}
else if(!strcmp("x-hd1", arg)) {
x_hd1_len = (int)apr_atoi64(val);
continue;
}
}
else if(!strcmp("close", arg)) {
/* we are asked to close the connection */
close_conn = 1;
continue;
}
else if(!strcmp("with_cl", arg)) {
with_cl = 1;
continue;
}
ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, "query parameter not "
"understood: '%s' in %s",
arg, r->args);
ap_die(HTTP_BAD_REQUEST, r);
return OK;
}
}
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "error_handler: processing "
"request, %s", r->args? r->args : "(no args)");
r->status = http_status;
r->clength = with_cl ? (chunks * chunk_size) : -1;
r->chunked = (r->proto_num >= HTTP_VERSION(1, 1)) && !with_cl;
apr_table_setn(r->headers_out, "request-id", request_id);
if(r->clength >= 0) {
apr_table_set(r->headers_out, "Content-Length",
apr_ltoa(r->pool, (long)r->clength));
}
else
apr_table_unset(r->headers_out, "Content-Length");
/* Discourage content-encodings */
apr_table_unset(r->headers_out, "Content-Encoding");
if(x_hd_len > 0) {
int i, hd_len = (16 * 1024);
int n = (x_hd_len / hd_len);
char *hd_val = apr_palloc(r->pool, x_hd_len);
memset(hd_val, 'X', hd_len);
hd_val[hd_len - 1] = 0;
for(i = 0; i < n; ++i) {
apr_table_setn(r->headers_out,
apr_psprintf(r->pool, "X-Header-%d", i), hd_val);
}
if(x_hd_len % hd_len) {
hd_val[(x_hd_len % hd_len)] = 0;
apr_table_setn(r->headers_out,
apr_psprintf(r->pool, "X-Header-%d", i), hd_val);
}
}
if(x_hd1_len > 0) {
char *hd_val = apr_palloc(r->pool, x_hd1_len);
memset(hd_val, 'Y', x_hd1_len);
hd_val[x_hd1_len - 1] = 0;
apr_table_setn(r->headers_out, "X-Mega-Header", hd_val);
}
apr_table_setn(r->subprocess_env, "no-brotli", "1");
apr_table_setn(r->subprocess_env, "no-gzip", "1");
ap_set_content_type(r, "application/octet-stream");
bb = apr_brigade_create(r->pool, c->bucket_alloc);
if(delay) {
apr_sleep(delay);
}
if(error != APR_SUCCESS) {
return ap_map_http_request_error(error, HTTP_BAD_REQUEST);
}
/* flush response */
b = apr_bucket_flush_create(c->bucket_alloc);
APR_BRIGADE_INSERT_TAIL(bb, b);
rv = ap_pass_brigade(r->output_filters, bb);
if(APR_SUCCESS != rv)
goto cleanup;
memset(buffer, 'X', sizeof(buffer));
for(i = 0; i < chunks; ++i) {
if(chunk_delay) {
apr_sleep(chunk_delay);
}
rv = apr_brigade_write(bb, NULL, NULL, buffer, chunk_size);
if(APR_SUCCESS != rv)
goto cleanup;
rv = ap_pass_brigade(r->output_filters, bb);
if(APR_SUCCESS != rv)
goto cleanup;
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
"error_handler: passed %lu bytes as response body",
(unsigned long)chunk_size);
if(body_error != APR_SUCCESS) {
rv = body_error;
goto cleanup;
}
}
/* we are done */
b = apr_bucket_eos_create(c->bucket_alloc);
APR_BRIGADE_INSERT_TAIL(bb, b);
rv = ap_pass_brigade(r->output_filters, bb);
apr_brigade_cleanup(bb);
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r,
"error_handler: response passed");
cleanup:
if(close_conn) {
if(close_delay) {
b = apr_bucket_flush_create(c->bucket_alloc);
APR_BRIGADE_INSERT_TAIL(bb, b);
rv = ap_pass_brigade(r->output_filters, bb);
apr_brigade_cleanup(bb);
apr_sleep(close_delay);
}
r->connection->keepalive = AP_CONN_CLOSE;
}
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r,
"error_handler: request cleanup, r->status=%d, aborted=%d, "
"close=%d", r->status, c->aborted, close_conn);
if(rv == APR_SUCCESS) {
return OK;
}
if(error_bucket) {
http_status = ap_map_http_request_error(rv, HTTP_BAD_REQUEST);
b = ap_bucket_error_create(http_status, NULL, r->pool, c->bucket_alloc);
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r,
"error_handler: passing error bucket, status=%d",
http_status);
APR_BRIGADE_INSERT_TAIL(bb, b);
ap_pass_brigade(r->output_filters, bb);
}
return AP_FILTER_ERROR;
}
static int curltest_put_handler(request_rec *r)
{
conn_rec *c = r->connection;
apr_bucket_brigade *bb;
apr_bucket *b;
apr_status_t rv;
char buffer[128*1024];
const char *ct;
apr_off_t rbody_len = 0;
apr_off_t rbody_max_len = -1;
const char *s_rbody_len;
const char *request_id = "none";
apr_time_t read_delay = 0, chunk_delay = 0;
apr_array_header_t *args = NULL;
long l;
int i;
if(strcmp(r->handler, "curltest-put")) {
return DECLINED;
}
if(r->method_number != M_PUT) {
return DECLINED;
}
if(r->args) {
args = apr_cstr_split(r->args, "&", 1, r->pool);
for(i = 0; i < args->nelts; ++i) {
char *s, *val, *arg = APR_ARRAY_IDX(args, i, char *);
s = strchr(arg, '=');
if(s) {
*s = '\0';
val = s + 1;
if(!strcmp("id", arg)) {
/* just an id for repeated requests with curl's url globbing */
request_id = val;
continue;
}
else if(!strcmp("read_delay", arg)) {
rv = duration_parse(&read_delay, val, "s");
if(APR_SUCCESS == rv) {
continue;
}
}
else if(!strcmp("chunk_delay", arg)) {
rv = duration_parse(&chunk_delay, val, "s");
if(APR_SUCCESS == rv) {
continue;
}
}
else if(!strcmp("max_upload", arg)) {
rbody_max_len = (apr_off_t)apr_atoi64(val);
continue;
}
}
ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, "query parameter not "
"understood: '%s' in %s",
arg, r->args);
ap_die(HTTP_BAD_REQUEST, r);
return OK;
}
}
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "put_handler: processing");
r->status = 200;
r->clength = -1;
r->chunked = 1;
apr_table_unset(r->headers_out, "Content-Length");
/* Discourage content-encodings */
apr_table_unset(r->headers_out, "Content-Encoding");
apr_table_setn(r->headers_out, "request-id", request_id);
apr_table_setn(r->subprocess_env, "no-brotli", "1");
apr_table_setn(r->subprocess_env, "no-gzip", "1");
ct = apr_table_get(r->headers_in, "content-type");
ap_set_content_type(r, ct ? ct : "text/plain");
if(read_delay) {
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
"put_handler: read_delay");
apr_sleep(read_delay);
}
bb = apr_brigade_create(r->pool, c->bucket_alloc);
/* copy any request body into the response */
rv = ap_setup_client_block(r, REQUEST_CHUNKED_DECHUNK);
if(rv)
goto cleanup;
if(ap_should_client_block(r)) {
while(0 < (l = ap_get_client_block(r, &buffer[0], sizeof(buffer)))) {
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
"put_handler: read %ld bytes from request body", l);
if(chunk_delay) {
apr_sleep(chunk_delay);
}
rbody_len += l;
if((rbody_max_len > 0) && (rbody_len > rbody_max_len)) {
r->status = 413;
break;
}
}
}
/* we are done */
s_rbody_len = apr_psprintf(r->pool, "%"APR_OFF_T_FMT, rbody_len);
apr_table_setn(r->headers_out, "Received-Length", s_rbody_len);
rv = apr_brigade_puts(bb, NULL, NULL, s_rbody_len);
if(APR_SUCCESS != rv)
goto cleanup;
b = apr_bucket_eos_create(c->bucket_alloc);
APR_BRIGADE_INSERT_TAIL(bb, b);
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "put_handler: request read");
rv = ap_pass_brigade(r->output_filters, bb);
if(r->status == 413) {
apr_sleep(apr_time_from_sec(1));
}
cleanup:
if(rv == APR_SUCCESS ||
r->status != HTTP_OK ||
c->aborted) {
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "put_handler: done");
return OK;
}
else {
/* no way to know what type of error occurred */
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "put_handler failed");
return AP_FILTER_ERROR;
}
return DECLINED;
}
static int curltest_1_1_required(request_rec *r)
{
conn_rec *c = r->connection;
apr_bucket_brigade *bb;
apr_bucket *b;
apr_status_t rv;
const char *ct;
if(strcmp(r->handler, "curltest-1_1-required")) {
return DECLINED;
}
if(HTTP_VERSION_MAJOR(r->proto_num) > 1) {
apr_table_setn(r->notes, "ssl-renegotiate-forbidden", "1");
ap_die(HTTP_FORBIDDEN, r);
return OK;
}
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "1_1_handler: processing");
r->status = 200;
r->clength = -1;
r->chunked = 1;
apr_table_unset(r->headers_out, "Content-Length");
/* Discourage content-encodings */
apr_table_unset(r->headers_out, "Content-Encoding");
apr_table_setn(r->subprocess_env, "no-brotli", "1");
apr_table_setn(r->subprocess_env, "no-gzip", "1");
ct = apr_table_get(r->headers_in, "content-type");
ap_set_content_type(r, ct ? ct : "text/plain");
bb = apr_brigade_create(r->pool, c->bucket_alloc);
/* flush response */
b = apr_bucket_flush_create(c->bucket_alloc);
APR_BRIGADE_INSERT_TAIL(bb, b);
rv = ap_pass_brigade(r->output_filters, bb);
if(APR_SUCCESS != rv)
goto cleanup;
/* we are done */
rv = apr_brigade_printf(bb, NULL, NULL, "well done!");
if(APR_SUCCESS != rv)
goto cleanup;
b = apr_bucket_eos_create(c->bucket_alloc);
APR_BRIGADE_INSERT_TAIL(bb, b);
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "1_1_handler: request read");
rv = ap_pass_brigade(r->output_filters, bb);
cleanup:
if(rv == APR_SUCCESS ||
r->status != HTTP_OK ||
c->aborted) {
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "1_1_handler: done");
return OK;
}
else {
/* no way to know what type of error occurred */
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "1_1_handler failed");
return AP_FILTER_ERROR;
}
return DECLINED;
}
static int brigade_env_var(request_rec *r, apr_bucket_brigade *bb,
const char *name)
{
const char *s;
s = apr_table_get(r->subprocess_env, name);
if(s)
return apr_brigade_printf(bb, NULL, NULL, ",\n \"%s\": \"%s\"", name, s);
return 0;
}
static int curltest_sslinfo_handler(request_rec *r)
{
conn_rec *c = r->connection;
apr_bucket_brigade *bb;
apr_bucket *b;
apr_status_t rv;
const char *request_id = NULL;
int close_conn = 0;
int i;
if(strcmp(r->handler, "curltest-sslinfo")) {
return DECLINED;
}
if(r->method_number != M_GET) {
return DECLINED;
}
if(r->args) {
apr_array_header_t *args = apr_cstr_split(r->args, "&", 1, r->pool);
for(i = 0; i < args->nelts; ++i) {
char *s, *val, *arg = APR_ARRAY_IDX(args, i, char *);
s = strchr(arg, '=');
if(s) {
*s = '\0';
val = s + 1;
if(!strcmp("id", arg)) {
/* just an id for repeated requests with curl's url globbing */
request_id = val;
continue;
}
}
else if(!strcmp("close", arg)) {
/* we are asked to close the connection */
close_conn = 1;
continue;
}
ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, "query parameter not "
"understood: '%s' in %s",
arg, r->args);
ap_die(HTTP_BAD_REQUEST, r);
return OK;
}
}
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "sslinfo: processing");
r->status = 200;
r->clength = -1;
r->chunked = 1;
apr_table_unset(r->headers_out, "Content-Length");
/* Discourage content-encodings */
apr_table_unset(r->headers_out, "Content-Encoding");
if(request_id)
apr_table_setn(r->headers_out, "request-id", request_id);
apr_table_setn(r->subprocess_env, "no-brotli", "1");
apr_table_setn(r->subprocess_env, "no-gzip", "1");
ap_set_content_type(r, "application/json");
bb = apr_brigade_create(r->pool, c->bucket_alloc);
apr_brigade_puts(bb, NULL, NULL, "{\n \"Name\": \"SSL-Information\"");
brigade_env_var(r, bb, "HTTPS");
brigade_env_var(r, bb, "SSL_PROTOCOL");
brigade_env_var(r, bb, "SSL_CIPHER");
brigade_env_var(r, bb, "SSL_SESSION_ID");
brigade_env_var(r, bb, "SSL_SESSION_RESUMED");
brigade_env_var(r, bb, "SSL_SRP_USER");
brigade_env_var(r, bb, "SSL_SRP_USERINFO");
brigade_env_var(r, bb, "SSL_TLS_SNI");
apr_brigade_puts(bb, NULL, NULL, "}\n");
/* flush response */
b = apr_bucket_flush_create(c->bucket_alloc);
APR_BRIGADE_INSERT_TAIL(bb, b);
rv = ap_pass_brigade(r->output_filters, bb);
if(APR_SUCCESS != rv)
goto cleanup;
/* we are done */
b = apr_bucket_eos_create(c->bucket_alloc);
APR_BRIGADE_INSERT_TAIL(bb, b);
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "1_1_handler: request read");
rv = ap_pass_brigade(r->output_filters, bb);
cleanup:
if(close_conn)
r->connection->keepalive = AP_CONN_CLOSE;
if(rv == APR_SUCCESS ||
r->status != HTTP_OK ||
c->aborted) {
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "1_1_handler: done");
return OK;
}
else {
/* no way to know what type of error occurred */
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "1_1_handler failed");
return AP_FILTER_ERROR;
}
return DECLINED;
}
+346
View File
@@ -0,0 +1,346 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import signal
import socket
import subprocess
import time
from typing import Optional, Dict
from datetime import datetime, timedelta
from .env import Env, NghttpxUtil
from .curl import CurlClient
from .ports import alloc_ports_and_do
log = logging.getLogger(__name__)
class Nghttpx:
def __init__(self, env: Env, name: str, domain: str, cred_name: str):
self.env = env
self._name = name
self._domain = domain
self._port = 0
self._https_port = 0
self._cmd = env.nghttpx
self._run_dir = os.path.join(env.gen_dir, name)
self._pid_file = os.path.join(self._run_dir, 'nghttpx.pid')
self._conf_file = os.path.join(self._run_dir, 'nghttpx.conf')
self._error_log = os.path.join(self._run_dir, 'nghttpx.log')
self._stderr = os.path.join(self._run_dir, 'nghttpx.stderr')
self._tmp_dir = os.path.join(self._run_dir, 'tmp')
self._process: Optional[subprocess.Popen] = None
self._cred_name = self._def_cred_name = cred_name
self._loaded_cred_name = ''
self._version = NghttpxUtil.version(self._cmd)
def supports_h3(self):
return NghttpxUtil.version_with_h3(self._version)
def set_cred_name(self, name: str):
self._cred_name = name
def reset_config(self):
self._cred_name = self._def_cred_name
def reload_if_config_changed(self):
if self._process and self._port > 0 and \
self._loaded_cred_name == self._cred_name:
return True
return self.reload()
@property
def https_port(self):
return self._https_port
def exists(self):
return self._cmd and os.path.exists(self._cmd)
def clear_logs(self):
self._rmf(self._error_log)
self._rmf(self._stderr)
def is_running(self):
if self._process:
self._process.poll()
return self._process.returncode is None
return False
def start_if_needed(self):
if not self.is_running():
return self.start()
return True
def initial_start(self):
self._rmf(self._pid_file)
self._rmf(self._error_log)
self._mkpath(self._run_dir)
self._write_config()
def start(self, wait_live=True):
pass
def stop(self, wait_dead=True):
self._mkpath(self._tmp_dir)
if self._process:
self._process.terminate()
self._process.wait(timeout=2)
self._process = None
return not wait_dead or self.wait_dead(timeout=timedelta(seconds=5))
return True
def restart(self):
self.stop()
return self.start()
def reload(self, timeout: timedelta = timedelta(seconds=Env.SERVER_TIMEOUT)):
if self._process:
running = self._process
self._process = None
os.kill(running.pid, signal.SIGQUIT)
end_wait = datetime.now() + timedelta(seconds=5)
if not self.start(wait_live=False):
self._process = running
return False
while datetime.now() < end_wait:
try:
log.debug(f'waiting for nghttpx({running.pid}) to exit.')
running.wait(1)
log.debug(f'nghttpx({running.pid}) terminated -> {running.returncode}')
running = None
break
except subprocess.TimeoutExpired:
log.warning(f'nghttpx({running.pid}), not shut down yet.')
os.kill(running.pid, signal.SIGQUIT)
if datetime.now() >= end_wait:
log.error(f'nghttpx({running.pid}), terminate forcefully.')
os.kill(running.pid, signal.SIGKILL)
running.terminate()
running.wait(1)
return self.wait_live(timeout=timeout)
return False
def wait_dead(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
if self._https_port > 0:
check_url = f'https://{self._domain}:{self._port}/'
r = curl.http_get(url=check_url, extra_args=[
'--trace', 'curl.trace', '--trace-time',
'--connect-timeout', '1'
])
else:
check_url = f'https://{self._domain}:{self._port}/'
r = curl.http_get(url=check_url, extra_args=[
'--trace', 'curl.trace', '--trace-time',
'--http3-only', '--connect-timeout', '1'
])
if r.exit_code != 0:
return True
log.debug(f'waiting for nghttpx to stop responding: {r}')
time.sleep(.1)
log.debug(f"Server still responding after {timeout}")
return False
def wait_live(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
if self._https_port > 0:
check_url = f'https://{self._domain}:{self._port}/'
r = curl.http_get(url=check_url, extra_args=[
'--trace', 'curl.trace', '--trace-time',
'--connect-timeout', '1'
])
else:
check_url = f'https://{self._domain}:{self._port}/'
r = curl.http_get(url=check_url, extra_args=[
'--http3-only', '--trace', 'curl.trace', '--trace-time',
'--connect-timeout', '1'
])
if r.exit_code == 0:
return True
time.sleep(.1)
log.error(f"Server still not responding after {timeout}")
return False
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
def _mkpath(self, path):
if not os.path.exists(path):
return os.makedirs(path)
def _write_config(self):
with open(self._conf_file, 'w') as fd:
fd.write('# nghttpx test config')
fd.write("\n".join([
'# do we need something here?'
]))
class NghttpxQuic(Nghttpx):
PORT_SPECS = {
'nghttpx_https': socket.SOCK_STREAM,
}
def __init__(self, env: Env):
super().__init__(env=env, name='nghttpx-quic',
domain=env.domain1, cred_name=env.domain1)
self._https_port = env.https_port
def initial_start(self):
super().initial_start()
def startup(ports: Dict[str, int]) -> bool:
self._port = ports['nghttpx_https']
if self.start():
self.env.update_ports(ports)
return True
self.stop()
self._port = 0
return False
return alloc_ports_and_do(NghttpxQuic.PORT_SPECS, startup,
self.env.gen_root, max_tries=3)
def start(self, wait_live=True):
self._mkpath(self._tmp_dir)
if self._process:
self.stop()
creds = self.env.get_credentials(self._cred_name)
assert creds # convince pytype this isn't None
self._loaded_cred_name = self._cred_name
args = [self._cmd, f'--frontend=*,{self._port};tls']
if self.supports_h3():
args.extend([
f'--frontend=*,{self.env.h3_port};quic',
'--frontend-quic-early-data',
])
args.extend([
f'--backend=127.0.0.1,{self.env.https_port};{self._domain};sni={self._domain};proto=h2;tls',
f'--backend=127.0.0.1,{self.env.http_port}',
'--log-level=ERROR',
f'--pid-file={self._pid_file}',
f'--errorlog-file={self._error_log}',
f'--conf={self._conf_file}',
f'--cacert={self.env.ca.cert_file}',
creds.pkey_file,
creds.cert_file,
'--frontend-http3-window-size=1M',
'--frontend-http3-max-window-size=10M',
'--frontend-http3-connection-window-size=10M',
'--frontend-http3-max-connection-window-size=100M',
# f'--frontend-quic-debug-log',
])
ngerr = open(self._stderr, 'a')
self._process = subprocess.Popen(args=args, stderr=ngerr)
if self._process.returncode is not None:
return False
return not wait_live or self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))
class NghttpxFwd(Nghttpx):
def __init__(self, env: Env):
super().__init__(env=env, name='nghttpx-fwd',
domain=env.proxy_domain,
cred_name=env.proxy_domain)
def initial_start(self):
super().initial_start()
def startup(ports: Dict[str, int]) -> bool:
self._port = ports['h2proxys']
if self.start():
self.env.update_ports(ports)
return True
self.stop()
self._port = 0
return False
return alloc_ports_and_do({'h2proxys': socket.SOCK_STREAM},
startup, self.env.gen_root, max_tries=3)
def start(self, wait_live=True):
assert self._port > 0
self._mkpath(self._tmp_dir)
if self._process:
self.stop()
creds = self.env.get_credentials(self._cred_name)
assert creds # convince pytype this isn't None
self._loaded_cred_name = self._cred_name
args = [
self._cmd,
'--http2-proxy',
f'--frontend=*,{self._port}',
f'--backend=127.0.0.1,{self.env.proxy_port}',
'--log-level=ERROR',
f'--pid-file={self._pid_file}',
f'--errorlog-file={self._error_log}',
f'--conf={self._conf_file}',
f'--cacert={self.env.ca.cert_file}',
creds.pkey_file,
creds.cert_file,
]
ngerr = open(self._stderr, 'a')
self._process = subprocess.Popen(args=args, stderr=ngerr)
if self._process.returncode is not None:
return False
return not wait_live or self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))
def wait_dead(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
check_url = f'https://{self.env.proxy_domain}:{self._port}/'
r = curl.http_get(url=check_url)
if r.exit_code != 0:
return True
log.debug(f'waiting for nghttpx-fwd to stop responding: {r}')
time.sleep(.1)
log.debug(f"Server still responding after {timeout}")
return False
def wait_live(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
check_url = f'https://{self.env.proxy_domain}:{self._port}/'
r = curl.http_get(url=check_url, extra_args=[
'--trace', 'curl.trace', '--trace-time'
])
if r.exit_code == 0:
return True
time.sleep(.1)
log.error(f"Server still not responding after {timeout}")
return False
+63
View File
@@ -0,0 +1,63 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import socket
from collections.abc import Callable
from typing import Dict
from filelock import FileLock
log = logging.getLogger(__name__)
def alloc_port_set(port_specs: Dict[str, int]) -> Dict[str, int]:
socks = []
ports = {}
for name, ptype in port_specs.items():
try:
s = socket.socket(type=ptype)
s.bind(('127.0.0.1', 0))
ports[name] = s.getsockname()[1]
socks.append(s)
except Exception as e:
raise e
for s in socks:
s.close()
return ports
def alloc_ports_and_do(port_spec: Dict[str, int],
do_func: Callable[[Dict[str, int]], bool],
gen_dir, max_tries=1) -> bool:
lock_file = os.path.join(gen_dir, 'ports.lock')
with FileLock(lock_file):
for _ in range(max_tries):
port_set = alloc_port_set(port_spec)
if do_func(port_set):
return True
return False
+224
View File
@@ -0,0 +1,224 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import logging
import os
import re
import socket
import subprocess
import time
from datetime import datetime, timedelta
from typing import List, Dict
from .curl import CurlClient, ExecResult
from .env import Env
from .ports import alloc_ports_and_do
log = logging.getLogger(__name__)
class VsFTPD:
def __init__(self, env: Env, with_ssl=False, ssl_implicit=False):
self.env = env
self._cmd = env.vsftpd
self._port = 0
self._with_ssl = with_ssl
self._ssl_implicit = ssl_implicit and with_ssl
self._scheme = 'ftps' if self._ssl_implicit else 'ftp'
if self._with_ssl:
self.name = 'vsftpds'
self._port_skey = 'ftps'
self._port_specs = {
'ftps': socket.SOCK_STREAM,
}
else:
self.name = 'vsftpd'
self._port_skey = 'ftp'
self._port_specs = {
'ftp': socket.SOCK_STREAM,
}
self._vsftpd_dir = os.path.join(env.gen_dir, self.name)
self._run_dir = os.path.join(self._vsftpd_dir, 'run')
self._docs_dir = os.path.join(self._vsftpd_dir, 'docs')
self._tmp_dir = os.path.join(self._vsftpd_dir, 'tmp')
self._conf_file = os.path.join(self._vsftpd_dir, 'test.conf')
self._pid_file = os.path.join(self._vsftpd_dir, 'vsftpd.pid')
self._error_log = os.path.join(self._vsftpd_dir, 'vsftpd.log')
self._process = None
self.clear_logs()
@property
def domain(self):
return self.env.ftp_domain
@property
def docs_dir(self):
return self._docs_dir
@property
def port(self) -> int:
return self._port
def clear_logs(self):
self._rmf(self._error_log)
def exists(self):
return os.path.exists(self._cmd)
def is_running(self):
if self._process:
self._process.poll()
return self._process.returncode is None
return False
def start_if_needed(self):
if not self.is_running():
return self.start()
return True
def stop(self, wait_dead=True):
self._mkpath(self._tmp_dir)
if self._process:
self._process.terminate()
self._process.wait(timeout=2)
self._process = None
return not wait_dead or self.wait_dead(timeout=timedelta(seconds=5))
return True
def restart(self):
self.stop()
return self.start()
def initial_start(self):
def startup(ports: Dict[str, int]) -> bool:
self._port = ports[self._port_skey]
if self.start():
self.env.update_ports(ports)
return True
self.stop()
self._port = 0
return False
return alloc_ports_and_do(self._port_specs, startup,
self.env.gen_root, max_tries=3)
def start(self, wait_live=True):
assert self._port > 0
self._mkpath(self._tmp_dir)
if self._process:
self.stop()
self._write_config()
args = [
self._cmd,
f'{self._conf_file}',
]
procerr = open(self._error_log, 'a')
self._process = subprocess.Popen(args=args, stderr=procerr)
if self._process.returncode is not None:
return False
return not wait_live or self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))
def wait_dead(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
check_url = f'{self._scheme}://{self.domain}:{self.port}/'
r = curl.ftp_get(urls=[check_url], extra_args=['-v'])
if r.exit_code != 0:
return True
log.debug(f'waiting for vsftpd to stop responding: {r}')
time.sleep(.1)
log.debug(f"Server still responding after {timeout}")
return False
def wait_live(self, timeout: timedelta):
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
check_url = f'{self._scheme}://{self.domain}:{self.port}/'
r = curl.ftp_get(urls=[check_url], extra_args=[
'--trace', 'curl-start.trace', '--trace-time'
])
if r.exit_code == 0:
return True
time.sleep(.1)
log.error(f"Server still not responding after {timeout}")
return False
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
def _mkpath(self, path):
if not os.path.exists(path):
return os.makedirs(path)
def _write_config(self):
self._mkpath(self._docs_dir)
self._mkpath(self._tmp_dir)
conf = [ # base server config
'listen=YES',
'run_as_launching_user=YES',
'#listen_address=127.0.0.1',
f'listen_port={self.port}',
'local_enable=NO',
'anonymous_enable=YES',
f'anon_root={self._docs_dir}',
'dirmessage_enable=YES',
'write_enable=YES',
'anon_upload_enable=YES',
'log_ftp_protocol=YES',
'xferlog_enable=YES',
'xferlog_std_format=NO',
f'vsftpd_log_file={self._error_log}',
'\n',
]
if self._with_ssl:
creds = self.env.get_credentials(self.domain)
assert creds # convince pytype this isn't None
conf.extend([
'ssl_enable=YES',
'debug_ssl=YES',
'allow_anon_ssl=YES',
f'rsa_cert_file={creds.cert_file}',
f'rsa_private_key_file={creds.pkey_file}',
# require_ssl_reuse=YES means ctrl and data connection need to use the same session
'require_ssl_reuse=NO',
])
if self._ssl_implicit:
conf.extend([
'implicit_ssl=YES',
])
with open(self._conf_file, 'w') as fd:
fd.write("\n".join(conf))
def get_data_ports(self, r: ExecResult) -> List[int]:
return [int(m.group(1)) for line in r.trace_lines if
(m := re.match(r'.*Established 2nd connection to .* \(\S+ port (\d+)\)', line))]
@@ -0,0 +1,65 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
#
import argparse
import asyncio
import logging
from websockets import server
from websockets.exceptions import ConnectionClosedError
async def echo(websocket):
try:
async for message in websocket:
await websocket.send(message)
except ConnectionClosedError:
pass
async def run_server(port):
async with server.serve(echo, "localhost", port):
await asyncio.Future() # run forever
def main():
parser = argparse.ArgumentParser(prog='scorecard', description="""
Run a websocket echo server.
""")
parser.add_argument("--port", type=int,
default=9876, help="port to listen on")
args = parser.parse_args()
logging.basicConfig(
format="%(asctime)s %(message)s",
level=logging.DEBUG,
)
asyncio.run(run_server(args.port))
if __name__ == "__main__":
main()